index
int64 0
100k
| blob_id
stringlengths 40
40
| code
stringlengths 7
7.27M
| steps
listlengths 1
1.25k
| error
bool 2
classes |
---|---|---|---|---|
800 |
026e06e777d64f8724ec5e89a7829b3a42a25d6b
|
from flask import Flask, request, redirect, url_for, render_template
from flask_modus import Modus
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
app = Flask(__name__)
app.config[
'SQLALCHEMY_DATABASE_URI'] = "postgres://localhost/flask_one_to_many"
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app.config['SQLALCHEMY_ECHO'] = True
modus = Modus(app)
db = SQLAlchemy(app)
Migrate(app, db)
class Student(db.Model):
__tablename__ = "students"
id = db.Column(db.Integer, primary_key=True)
first_name = db.Column(db.Text)
last_name = db.Column(db.Text)
excuses = db.relationship('Excuse', backref='student',
lazy='dynamic')
def __init__(self, first_name, last_name):
self.first_name = first_name
self.last_name = last_name
class Excuse(db.Model):
__tablename__ = "excuses"
id = db.Column(db.Integer, primary_key = True)
content = db.Column(db.Text)
is_believable = db.Column(db.Text)
student_id = db.Column(db.Integer, db.ForeignKey("students.id"))
@app.route('/')
def root():
return redirect(url_for('index'))
@app.route('/students', methods=["GET", "POST"])
def index():
if request.method == 'POST':
new_student = Student(request.form['first_name'],
request.form['last_name'])
db.session.add(new_student)
db.session.commit()
return redirect(url_for('index'))
return render_template('students/index.html', students=Student.query.all())
@app.route('/students/new')
def new():
return render_template('students/new.html')
@app.route('/students/<int:id>/edit')
def edit(id):
return render_template('students/edit.html', student=Student.query.get(id))
@app.route('/students/<int:id>', methods=["GET", "PATCH"])
def show(id):
found_student = Student.query.get(id)
if request.method == b'PATCH':
found_student.first_name = request.form['first_name']
found_student.last_name = request.form['last_name']
db.session.add(found_student)
db.session.commit()
return redirect(url_for('index'))
return render_template('students/show.html', student=found_student)
@app.route("/students/<int:id>/excuses", methods = ["GET", "POST"])
def excuses_index(id):
found_student = Student.query.get(id)
if request.method == "POST":
new_excuse = Excuse(content = request.form.get("content"), is_believable = request.form.get("is_believable"), student_id = id)
db.session.add(new_excuse)
db.session.commit()
return redirect(url_for("excuses_index", id = id))
excuses_list = found_student.excuses.all()
return render_template("excuses/index.html", excuses=excuses_list, student= found_student)
@app.route("/students/<int:id>/excuses/new")
def new_excuse(id):
return render_template("/excuses/new.html", id = id)
@app.route("/students/<int:id>/excuses/<int:excuse_id>/edit", methods = ["GET", "PATCH","DELETE"])
def edit_excuse(id,excuse_id):
print(id)
found_student = Student.query.get(id)
found_excuse = Excuse.query.get(excuse_id)
excuses_list = found_student.excuses.all()
if request.method == b'DELETE':
db.session.delete(found_excuse)
db.session.commit()
return redirect(url_for('excuses_index', id = found_student.id))
elif request.method == b"PATCH":
found_excuse.content = request.form.get("content")
found_excuse.is_believable = request.form.get("is_believable")
db.session.add(found_excuse)
db.session.commit()
return redirect(url_for("excuses_index", id = found_student.id))
# return render_template("excuses/index.html",excuses = excuses_list, student = found_student)
return render_template("excuses/edit.html",excuse = found_excuse, student = found_student)
|
[
"from flask import Flask, request, redirect, url_for, render_template\nfrom flask_modus import Modus\nfrom flask_sqlalchemy import SQLAlchemy\nfrom flask_migrate import Migrate\n\napp = Flask(__name__)\napp.config[\n 'SQLALCHEMY_DATABASE_URI'] = \"postgres://localhost/flask_one_to_many\"\napp.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False\napp.config['SQLALCHEMY_ECHO'] = True\nmodus = Modus(app)\ndb = SQLAlchemy(app)\nMigrate(app, db)\n\n\nclass Student(db.Model):\n\n __tablename__ = \"students\"\n\n id = db.Column(db.Integer, primary_key=True)\n first_name = db.Column(db.Text)\n last_name = db.Column(db.Text)\n excuses = db.relationship('Excuse', backref='student',\n lazy='dynamic')\n def __init__(self, first_name, last_name):\n self.first_name = first_name\n self.last_name = last_name\n\nclass Excuse(db.Model):\n __tablename__ = \"excuses\"\n\n id = db.Column(db.Integer, primary_key = True)\n content = db.Column(db.Text)\n is_believable = db.Column(db.Text)\n student_id = db.Column(db.Integer, db.ForeignKey(\"students.id\"))\n \n\n\n\n\[email protected]('/')\ndef root():\n return redirect(url_for('index'))\n\n\[email protected]('/students', methods=[\"GET\", \"POST\"])\ndef index():\n if request.method == 'POST':\n new_student = Student(request.form['first_name'],\n request.form['last_name'])\n db.session.add(new_student)\n db.session.commit()\n return redirect(url_for('index'))\n return render_template('students/index.html', students=Student.query.all())\n\n\[email protected]('/students/new')\ndef new():\n return render_template('students/new.html')\n\n\[email protected]('/students/<int:id>/edit')\ndef edit(id):\n return render_template('students/edit.html', student=Student.query.get(id))\n\n\[email protected]('/students/<int:id>', methods=[\"GET\", \"PATCH\"])\ndef show(id):\n found_student = Student.query.get(id)\n if request.method == b'PATCH':\n found_student.first_name = request.form['first_name']\n found_student.last_name = request.form['last_name']\n db.session.add(found_student)\n db.session.commit()\n return redirect(url_for('index'))\n return render_template('students/show.html', student=found_student)\n\[email protected](\"/students/<int:id>/excuses\", methods = [\"GET\", \"POST\"])\ndef excuses_index(id):\n found_student = Student.query.get(id)\n if request.method == \"POST\":\n new_excuse = Excuse(content = request.form.get(\"content\"), is_believable = request.form.get(\"is_believable\"), student_id = id)\n db.session.add(new_excuse)\n db.session.commit()\n return redirect(url_for(\"excuses_index\", id = id))\n\n \n excuses_list = found_student.excuses.all()\n return render_template(\"excuses/index.html\", excuses=excuses_list, student= found_student)\n\n\n\[email protected](\"/students/<int:id>/excuses/new\")\ndef new_excuse(id):\n\n return render_template(\"/excuses/new.html\", id = id)\n\n\[email protected](\"/students/<int:id>/excuses/<int:excuse_id>/edit\", methods = [\"GET\", \"PATCH\",\"DELETE\"])\ndef edit_excuse(id,excuse_id):\n print(id)\n found_student = Student.query.get(id)\n found_excuse = Excuse.query.get(excuse_id)\n excuses_list = found_student.excuses.all()\n if request.method == b'DELETE':\n db.session.delete(found_excuse)\n db.session.commit()\n return redirect(url_for('excuses_index', id = found_student.id))\n elif request.method == b\"PATCH\":\n \n found_excuse.content = request.form.get(\"content\")\n found_excuse.is_believable = request.form.get(\"is_believable\")\n \n db.session.add(found_excuse)\n db.session.commit()\n\n return redirect(url_for(\"excuses_index\", id = found_student.id))\n \n # return render_template(\"excuses/index.html\",excuses = excuses_list, student = found_student)\n return render_template(\"excuses/edit.html\",excuse = found_excuse, student = found_student)",
"from flask import Flask, request, redirect, url_for, render_template\nfrom flask_modus import Modus\nfrom flask_sqlalchemy import SQLAlchemy\nfrom flask_migrate import Migrate\napp = Flask(__name__)\napp.config['SQLALCHEMY_DATABASE_URI'\n ] = 'postgres://localhost/flask_one_to_many'\napp.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False\napp.config['SQLALCHEMY_ECHO'] = True\nmodus = Modus(app)\ndb = SQLAlchemy(app)\nMigrate(app, db)\n\n\nclass Student(db.Model):\n __tablename__ = 'students'\n id = db.Column(db.Integer, primary_key=True)\n first_name = db.Column(db.Text)\n last_name = db.Column(db.Text)\n excuses = db.relationship('Excuse', backref='student', lazy='dynamic')\n\n def __init__(self, first_name, last_name):\n self.first_name = first_name\n self.last_name = last_name\n\n\nclass Excuse(db.Model):\n __tablename__ = 'excuses'\n id = db.Column(db.Integer, primary_key=True)\n content = db.Column(db.Text)\n is_believable = db.Column(db.Text)\n student_id = db.Column(db.Integer, db.ForeignKey('students.id'))\n\n\[email protected]('/')\ndef root():\n return redirect(url_for('index'))\n\n\[email protected]('/students', methods=['GET', 'POST'])\ndef index():\n if request.method == 'POST':\n new_student = Student(request.form['first_name'], request.form[\n 'last_name'])\n db.session.add(new_student)\n db.session.commit()\n return redirect(url_for('index'))\n return render_template('students/index.html', students=Student.query.all())\n\n\[email protected]('/students/new')\ndef new():\n return render_template('students/new.html')\n\n\[email protected]('/students/<int:id>/edit')\ndef edit(id):\n return render_template('students/edit.html', student=Student.query.get(id))\n\n\[email protected]('/students/<int:id>', methods=['GET', 'PATCH'])\ndef show(id):\n found_student = Student.query.get(id)\n if request.method == b'PATCH':\n found_student.first_name = request.form['first_name']\n found_student.last_name = request.form['last_name']\n db.session.add(found_student)\n db.session.commit()\n return redirect(url_for('index'))\n return render_template('students/show.html', student=found_student)\n\n\[email protected]('/students/<int:id>/excuses', methods=['GET', 'POST'])\ndef excuses_index(id):\n found_student = Student.query.get(id)\n if request.method == 'POST':\n new_excuse = Excuse(content=request.form.get('content'),\n is_believable=request.form.get('is_believable'), student_id=id)\n db.session.add(new_excuse)\n db.session.commit()\n return redirect(url_for('excuses_index', id=id))\n excuses_list = found_student.excuses.all()\n return render_template('excuses/index.html', excuses=excuses_list,\n student=found_student)\n\n\[email protected]('/students/<int:id>/excuses/new')\ndef new_excuse(id):\n return render_template('/excuses/new.html', id=id)\n\n\[email protected]('/students/<int:id>/excuses/<int:excuse_id>/edit', methods=[\n 'GET', 'PATCH', 'DELETE'])\ndef edit_excuse(id, excuse_id):\n print(id)\n found_student = Student.query.get(id)\n found_excuse = Excuse.query.get(excuse_id)\n excuses_list = found_student.excuses.all()\n if request.method == b'DELETE':\n db.session.delete(found_excuse)\n db.session.commit()\n return redirect(url_for('excuses_index', id=found_student.id))\n elif request.method == b'PATCH':\n found_excuse.content = request.form.get('content')\n found_excuse.is_believable = request.form.get('is_believable')\n db.session.add(found_excuse)\n db.session.commit()\n return redirect(url_for('excuses_index', id=found_student.id))\n return render_template('excuses/edit.html', excuse=found_excuse,\n student=found_student)\n",
"<import token>\napp = Flask(__name__)\napp.config['SQLALCHEMY_DATABASE_URI'\n ] = 'postgres://localhost/flask_one_to_many'\napp.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False\napp.config['SQLALCHEMY_ECHO'] = True\nmodus = Modus(app)\ndb = SQLAlchemy(app)\nMigrate(app, db)\n\n\nclass Student(db.Model):\n __tablename__ = 'students'\n id = db.Column(db.Integer, primary_key=True)\n first_name = db.Column(db.Text)\n last_name = db.Column(db.Text)\n excuses = db.relationship('Excuse', backref='student', lazy='dynamic')\n\n def __init__(self, first_name, last_name):\n self.first_name = first_name\n self.last_name = last_name\n\n\nclass Excuse(db.Model):\n __tablename__ = 'excuses'\n id = db.Column(db.Integer, primary_key=True)\n content = db.Column(db.Text)\n is_believable = db.Column(db.Text)\n student_id = db.Column(db.Integer, db.ForeignKey('students.id'))\n\n\[email protected]('/')\ndef root():\n return redirect(url_for('index'))\n\n\[email protected]('/students', methods=['GET', 'POST'])\ndef index():\n if request.method == 'POST':\n new_student = Student(request.form['first_name'], request.form[\n 'last_name'])\n db.session.add(new_student)\n db.session.commit()\n return redirect(url_for('index'))\n return render_template('students/index.html', students=Student.query.all())\n\n\[email protected]('/students/new')\ndef new():\n return render_template('students/new.html')\n\n\[email protected]('/students/<int:id>/edit')\ndef edit(id):\n return render_template('students/edit.html', student=Student.query.get(id))\n\n\[email protected]('/students/<int:id>', methods=['GET', 'PATCH'])\ndef show(id):\n found_student = Student.query.get(id)\n if request.method == b'PATCH':\n found_student.first_name = request.form['first_name']\n found_student.last_name = request.form['last_name']\n db.session.add(found_student)\n db.session.commit()\n return redirect(url_for('index'))\n return render_template('students/show.html', student=found_student)\n\n\[email protected]('/students/<int:id>/excuses', methods=['GET', 'POST'])\ndef excuses_index(id):\n found_student = Student.query.get(id)\n if request.method == 'POST':\n new_excuse = Excuse(content=request.form.get('content'),\n is_believable=request.form.get('is_believable'), student_id=id)\n db.session.add(new_excuse)\n db.session.commit()\n return redirect(url_for('excuses_index', id=id))\n excuses_list = found_student.excuses.all()\n return render_template('excuses/index.html', excuses=excuses_list,\n student=found_student)\n\n\[email protected]('/students/<int:id>/excuses/new')\ndef new_excuse(id):\n return render_template('/excuses/new.html', id=id)\n\n\[email protected]('/students/<int:id>/excuses/<int:excuse_id>/edit', methods=[\n 'GET', 'PATCH', 'DELETE'])\ndef edit_excuse(id, excuse_id):\n print(id)\n found_student = Student.query.get(id)\n found_excuse = Excuse.query.get(excuse_id)\n excuses_list = found_student.excuses.all()\n if request.method == b'DELETE':\n db.session.delete(found_excuse)\n db.session.commit()\n return redirect(url_for('excuses_index', id=found_student.id))\n elif request.method == b'PATCH':\n found_excuse.content = request.form.get('content')\n found_excuse.is_believable = request.form.get('is_believable')\n db.session.add(found_excuse)\n db.session.commit()\n return redirect(url_for('excuses_index', id=found_student.id))\n return render_template('excuses/edit.html', excuse=found_excuse,\n student=found_student)\n",
"<import token>\n<assignment token>\nMigrate(app, db)\n\n\nclass Student(db.Model):\n __tablename__ = 'students'\n id = db.Column(db.Integer, primary_key=True)\n first_name = db.Column(db.Text)\n last_name = db.Column(db.Text)\n excuses = db.relationship('Excuse', backref='student', lazy='dynamic')\n\n def __init__(self, first_name, last_name):\n self.first_name = first_name\n self.last_name = last_name\n\n\nclass Excuse(db.Model):\n __tablename__ = 'excuses'\n id = db.Column(db.Integer, primary_key=True)\n content = db.Column(db.Text)\n is_believable = db.Column(db.Text)\n student_id = db.Column(db.Integer, db.ForeignKey('students.id'))\n\n\[email protected]('/')\ndef root():\n return redirect(url_for('index'))\n\n\[email protected]('/students', methods=['GET', 'POST'])\ndef index():\n if request.method == 'POST':\n new_student = Student(request.form['first_name'], request.form[\n 'last_name'])\n db.session.add(new_student)\n db.session.commit()\n return redirect(url_for('index'))\n return render_template('students/index.html', students=Student.query.all())\n\n\[email protected]('/students/new')\ndef new():\n return render_template('students/new.html')\n\n\[email protected]('/students/<int:id>/edit')\ndef edit(id):\n return render_template('students/edit.html', student=Student.query.get(id))\n\n\[email protected]('/students/<int:id>', methods=['GET', 'PATCH'])\ndef show(id):\n found_student = Student.query.get(id)\n if request.method == b'PATCH':\n found_student.first_name = request.form['first_name']\n found_student.last_name = request.form['last_name']\n db.session.add(found_student)\n db.session.commit()\n return redirect(url_for('index'))\n return render_template('students/show.html', student=found_student)\n\n\[email protected]('/students/<int:id>/excuses', methods=['GET', 'POST'])\ndef excuses_index(id):\n found_student = Student.query.get(id)\n if request.method == 'POST':\n new_excuse = Excuse(content=request.form.get('content'),\n is_believable=request.form.get('is_believable'), student_id=id)\n db.session.add(new_excuse)\n db.session.commit()\n return redirect(url_for('excuses_index', id=id))\n excuses_list = found_student.excuses.all()\n return render_template('excuses/index.html', excuses=excuses_list,\n student=found_student)\n\n\[email protected]('/students/<int:id>/excuses/new')\ndef new_excuse(id):\n return render_template('/excuses/new.html', id=id)\n\n\[email protected]('/students/<int:id>/excuses/<int:excuse_id>/edit', methods=[\n 'GET', 'PATCH', 'DELETE'])\ndef edit_excuse(id, excuse_id):\n print(id)\n found_student = Student.query.get(id)\n found_excuse = Excuse.query.get(excuse_id)\n excuses_list = found_student.excuses.all()\n if request.method == b'DELETE':\n db.session.delete(found_excuse)\n db.session.commit()\n return redirect(url_for('excuses_index', id=found_student.id))\n elif request.method == b'PATCH':\n found_excuse.content = request.form.get('content')\n found_excuse.is_believable = request.form.get('is_believable')\n db.session.add(found_excuse)\n db.session.commit()\n return redirect(url_for('excuses_index', id=found_student.id))\n return render_template('excuses/edit.html', excuse=found_excuse,\n student=found_student)\n",
"<import token>\n<assignment token>\n<code token>\n\n\nclass Student(db.Model):\n __tablename__ = 'students'\n id = db.Column(db.Integer, primary_key=True)\n first_name = db.Column(db.Text)\n last_name = db.Column(db.Text)\n excuses = db.relationship('Excuse', backref='student', lazy='dynamic')\n\n def __init__(self, first_name, last_name):\n self.first_name = first_name\n self.last_name = last_name\n\n\nclass Excuse(db.Model):\n __tablename__ = 'excuses'\n id = db.Column(db.Integer, primary_key=True)\n content = db.Column(db.Text)\n is_believable = db.Column(db.Text)\n student_id = db.Column(db.Integer, db.ForeignKey('students.id'))\n\n\[email protected]('/')\ndef root():\n return redirect(url_for('index'))\n\n\[email protected]('/students', methods=['GET', 'POST'])\ndef index():\n if request.method == 'POST':\n new_student = Student(request.form['first_name'], request.form[\n 'last_name'])\n db.session.add(new_student)\n db.session.commit()\n return redirect(url_for('index'))\n return render_template('students/index.html', students=Student.query.all())\n\n\[email protected]('/students/new')\ndef new():\n return render_template('students/new.html')\n\n\[email protected]('/students/<int:id>/edit')\ndef edit(id):\n return render_template('students/edit.html', student=Student.query.get(id))\n\n\[email protected]('/students/<int:id>', methods=['GET', 'PATCH'])\ndef show(id):\n found_student = Student.query.get(id)\n if request.method == b'PATCH':\n found_student.first_name = request.form['first_name']\n found_student.last_name = request.form['last_name']\n db.session.add(found_student)\n db.session.commit()\n return redirect(url_for('index'))\n return render_template('students/show.html', student=found_student)\n\n\[email protected]('/students/<int:id>/excuses', methods=['GET', 'POST'])\ndef excuses_index(id):\n found_student = Student.query.get(id)\n if request.method == 'POST':\n new_excuse = Excuse(content=request.form.get('content'),\n is_believable=request.form.get('is_believable'), student_id=id)\n db.session.add(new_excuse)\n db.session.commit()\n return redirect(url_for('excuses_index', id=id))\n excuses_list = found_student.excuses.all()\n return render_template('excuses/index.html', excuses=excuses_list,\n student=found_student)\n\n\[email protected]('/students/<int:id>/excuses/new')\ndef new_excuse(id):\n return render_template('/excuses/new.html', id=id)\n\n\[email protected]('/students/<int:id>/excuses/<int:excuse_id>/edit', methods=[\n 'GET', 'PATCH', 'DELETE'])\ndef edit_excuse(id, excuse_id):\n print(id)\n found_student = Student.query.get(id)\n found_excuse = Excuse.query.get(excuse_id)\n excuses_list = found_student.excuses.all()\n if request.method == b'DELETE':\n db.session.delete(found_excuse)\n db.session.commit()\n return redirect(url_for('excuses_index', id=found_student.id))\n elif request.method == b'PATCH':\n found_excuse.content = request.form.get('content')\n found_excuse.is_believable = request.form.get('is_believable')\n db.session.add(found_excuse)\n db.session.commit()\n return redirect(url_for('excuses_index', id=found_student.id))\n return render_template('excuses/edit.html', excuse=found_excuse,\n student=found_student)\n",
"<import token>\n<assignment token>\n<code token>\n\n\nclass Student(db.Model):\n __tablename__ = 'students'\n id = db.Column(db.Integer, primary_key=True)\n first_name = db.Column(db.Text)\n last_name = db.Column(db.Text)\n excuses = db.relationship('Excuse', backref='student', lazy='dynamic')\n\n def __init__(self, first_name, last_name):\n self.first_name = first_name\n self.last_name = last_name\n\n\nclass Excuse(db.Model):\n __tablename__ = 'excuses'\n id = db.Column(db.Integer, primary_key=True)\n content = db.Column(db.Text)\n is_believable = db.Column(db.Text)\n student_id = db.Column(db.Integer, db.ForeignKey('students.id'))\n\n\[email protected]('/')\ndef root():\n return redirect(url_for('index'))\n\n\[email protected]('/students', methods=['GET', 'POST'])\ndef index():\n if request.method == 'POST':\n new_student = Student(request.form['first_name'], request.form[\n 'last_name'])\n db.session.add(new_student)\n db.session.commit()\n return redirect(url_for('index'))\n return render_template('students/index.html', students=Student.query.all())\n\n\[email protected]('/students/new')\ndef new():\n return render_template('students/new.html')\n\n\[email protected]('/students/<int:id>/edit')\ndef edit(id):\n return render_template('students/edit.html', student=Student.query.get(id))\n\n\n<function token>\n\n\[email protected]('/students/<int:id>/excuses', methods=['GET', 'POST'])\ndef excuses_index(id):\n found_student = Student.query.get(id)\n if request.method == 'POST':\n new_excuse = Excuse(content=request.form.get('content'),\n is_believable=request.form.get('is_believable'), student_id=id)\n db.session.add(new_excuse)\n db.session.commit()\n return redirect(url_for('excuses_index', id=id))\n excuses_list = found_student.excuses.all()\n return render_template('excuses/index.html', excuses=excuses_list,\n student=found_student)\n\n\[email protected]('/students/<int:id>/excuses/new')\ndef new_excuse(id):\n return render_template('/excuses/new.html', id=id)\n\n\[email protected]('/students/<int:id>/excuses/<int:excuse_id>/edit', methods=[\n 'GET', 'PATCH', 'DELETE'])\ndef edit_excuse(id, excuse_id):\n print(id)\n found_student = Student.query.get(id)\n found_excuse = Excuse.query.get(excuse_id)\n excuses_list = found_student.excuses.all()\n if request.method == b'DELETE':\n db.session.delete(found_excuse)\n db.session.commit()\n return redirect(url_for('excuses_index', id=found_student.id))\n elif request.method == b'PATCH':\n found_excuse.content = request.form.get('content')\n found_excuse.is_believable = request.form.get('is_believable')\n db.session.add(found_excuse)\n db.session.commit()\n return redirect(url_for('excuses_index', id=found_student.id))\n return render_template('excuses/edit.html', excuse=found_excuse,\n student=found_student)\n",
"<import token>\n<assignment token>\n<code token>\n\n\nclass Student(db.Model):\n __tablename__ = 'students'\n id = db.Column(db.Integer, primary_key=True)\n first_name = db.Column(db.Text)\n last_name = db.Column(db.Text)\n excuses = db.relationship('Excuse', backref='student', lazy='dynamic')\n\n def __init__(self, first_name, last_name):\n self.first_name = first_name\n self.last_name = last_name\n\n\nclass Excuse(db.Model):\n __tablename__ = 'excuses'\n id = db.Column(db.Integer, primary_key=True)\n content = db.Column(db.Text)\n is_believable = db.Column(db.Text)\n student_id = db.Column(db.Integer, db.ForeignKey('students.id'))\n\n\[email protected]('/')\ndef root():\n return redirect(url_for('index'))\n\n\[email protected]('/students', methods=['GET', 'POST'])\ndef index():\n if request.method == 'POST':\n new_student = Student(request.form['first_name'], request.form[\n 'last_name'])\n db.session.add(new_student)\n db.session.commit()\n return redirect(url_for('index'))\n return render_template('students/index.html', students=Student.query.all())\n\n\[email protected]('/students/new')\ndef new():\n return render_template('students/new.html')\n\n\[email protected]('/students/<int:id>/edit')\ndef edit(id):\n return render_template('students/edit.html', student=Student.query.get(id))\n\n\n<function token>\n\n\[email protected]('/students/<int:id>/excuses', methods=['GET', 'POST'])\ndef excuses_index(id):\n found_student = Student.query.get(id)\n if request.method == 'POST':\n new_excuse = Excuse(content=request.form.get('content'),\n is_believable=request.form.get('is_believable'), student_id=id)\n db.session.add(new_excuse)\n db.session.commit()\n return redirect(url_for('excuses_index', id=id))\n excuses_list = found_student.excuses.all()\n return render_template('excuses/index.html', excuses=excuses_list,\n student=found_student)\n\n\n<function token>\n\n\[email protected]('/students/<int:id>/excuses/<int:excuse_id>/edit', methods=[\n 'GET', 'PATCH', 'DELETE'])\ndef edit_excuse(id, excuse_id):\n print(id)\n found_student = Student.query.get(id)\n found_excuse = Excuse.query.get(excuse_id)\n excuses_list = found_student.excuses.all()\n if request.method == b'DELETE':\n db.session.delete(found_excuse)\n db.session.commit()\n return redirect(url_for('excuses_index', id=found_student.id))\n elif request.method == b'PATCH':\n found_excuse.content = request.form.get('content')\n found_excuse.is_believable = request.form.get('is_believable')\n db.session.add(found_excuse)\n db.session.commit()\n return redirect(url_for('excuses_index', id=found_student.id))\n return render_template('excuses/edit.html', excuse=found_excuse,\n student=found_student)\n",
"<import token>\n<assignment token>\n<code token>\n\n\nclass Student(db.Model):\n __tablename__ = 'students'\n id = db.Column(db.Integer, primary_key=True)\n first_name = db.Column(db.Text)\n last_name = db.Column(db.Text)\n excuses = db.relationship('Excuse', backref='student', lazy='dynamic')\n\n def __init__(self, first_name, last_name):\n self.first_name = first_name\n self.last_name = last_name\n\n\nclass Excuse(db.Model):\n __tablename__ = 'excuses'\n id = db.Column(db.Integer, primary_key=True)\n content = db.Column(db.Text)\n is_believable = db.Column(db.Text)\n student_id = db.Column(db.Integer, db.ForeignKey('students.id'))\n\n\[email protected]('/')\ndef root():\n return redirect(url_for('index'))\n\n\[email protected]('/students', methods=['GET', 'POST'])\ndef index():\n if request.method == 'POST':\n new_student = Student(request.form['first_name'], request.form[\n 'last_name'])\n db.session.add(new_student)\n db.session.commit()\n return redirect(url_for('index'))\n return render_template('students/index.html', students=Student.query.all())\n\n\[email protected]('/students/new')\ndef new():\n return render_template('students/new.html')\n\n\n<function token>\n<function token>\n\n\[email protected]('/students/<int:id>/excuses', methods=['GET', 'POST'])\ndef excuses_index(id):\n found_student = Student.query.get(id)\n if request.method == 'POST':\n new_excuse = Excuse(content=request.form.get('content'),\n is_believable=request.form.get('is_believable'), student_id=id)\n db.session.add(new_excuse)\n db.session.commit()\n return redirect(url_for('excuses_index', id=id))\n excuses_list = found_student.excuses.all()\n return render_template('excuses/index.html', excuses=excuses_list,\n student=found_student)\n\n\n<function token>\n\n\[email protected]('/students/<int:id>/excuses/<int:excuse_id>/edit', methods=[\n 'GET', 'PATCH', 'DELETE'])\ndef edit_excuse(id, excuse_id):\n print(id)\n found_student = Student.query.get(id)\n found_excuse = Excuse.query.get(excuse_id)\n excuses_list = found_student.excuses.all()\n if request.method == b'DELETE':\n db.session.delete(found_excuse)\n db.session.commit()\n return redirect(url_for('excuses_index', id=found_student.id))\n elif request.method == b'PATCH':\n found_excuse.content = request.form.get('content')\n found_excuse.is_believable = request.form.get('is_believable')\n db.session.add(found_excuse)\n db.session.commit()\n return redirect(url_for('excuses_index', id=found_student.id))\n return render_template('excuses/edit.html', excuse=found_excuse,\n student=found_student)\n",
"<import token>\n<assignment token>\n<code token>\n\n\nclass Student(db.Model):\n __tablename__ = 'students'\n id = db.Column(db.Integer, primary_key=True)\n first_name = db.Column(db.Text)\n last_name = db.Column(db.Text)\n excuses = db.relationship('Excuse', backref='student', lazy='dynamic')\n\n def __init__(self, first_name, last_name):\n self.first_name = first_name\n self.last_name = last_name\n\n\nclass Excuse(db.Model):\n __tablename__ = 'excuses'\n id = db.Column(db.Integer, primary_key=True)\n content = db.Column(db.Text)\n is_believable = db.Column(db.Text)\n student_id = db.Column(db.Integer, db.ForeignKey('students.id'))\n\n\n<function token>\n\n\[email protected]('/students', methods=['GET', 'POST'])\ndef index():\n if request.method == 'POST':\n new_student = Student(request.form['first_name'], request.form[\n 'last_name'])\n db.session.add(new_student)\n db.session.commit()\n return redirect(url_for('index'))\n return render_template('students/index.html', students=Student.query.all())\n\n\[email protected]('/students/new')\ndef new():\n return render_template('students/new.html')\n\n\n<function token>\n<function token>\n\n\[email protected]('/students/<int:id>/excuses', methods=['GET', 'POST'])\ndef excuses_index(id):\n found_student = Student.query.get(id)\n if request.method == 'POST':\n new_excuse = Excuse(content=request.form.get('content'),\n is_believable=request.form.get('is_believable'), student_id=id)\n db.session.add(new_excuse)\n db.session.commit()\n return redirect(url_for('excuses_index', id=id))\n excuses_list = found_student.excuses.all()\n return render_template('excuses/index.html', excuses=excuses_list,\n student=found_student)\n\n\n<function token>\n\n\[email protected]('/students/<int:id>/excuses/<int:excuse_id>/edit', methods=[\n 'GET', 'PATCH', 'DELETE'])\ndef edit_excuse(id, excuse_id):\n print(id)\n found_student = Student.query.get(id)\n found_excuse = Excuse.query.get(excuse_id)\n excuses_list = found_student.excuses.all()\n if request.method == b'DELETE':\n db.session.delete(found_excuse)\n db.session.commit()\n return redirect(url_for('excuses_index', id=found_student.id))\n elif request.method == b'PATCH':\n found_excuse.content = request.form.get('content')\n found_excuse.is_believable = request.form.get('is_believable')\n db.session.add(found_excuse)\n db.session.commit()\n return redirect(url_for('excuses_index', id=found_student.id))\n return render_template('excuses/edit.html', excuse=found_excuse,\n student=found_student)\n",
"<import token>\n<assignment token>\n<code token>\n\n\nclass Student(db.Model):\n __tablename__ = 'students'\n id = db.Column(db.Integer, primary_key=True)\n first_name = db.Column(db.Text)\n last_name = db.Column(db.Text)\n excuses = db.relationship('Excuse', backref='student', lazy='dynamic')\n\n def __init__(self, first_name, last_name):\n self.first_name = first_name\n self.last_name = last_name\n\n\nclass Excuse(db.Model):\n __tablename__ = 'excuses'\n id = db.Column(db.Integer, primary_key=True)\n content = db.Column(db.Text)\n is_believable = db.Column(db.Text)\n student_id = db.Column(db.Integer, db.ForeignKey('students.id'))\n\n\n<function token>\n\n\[email protected]('/students', methods=['GET', 'POST'])\ndef index():\n if request.method == 'POST':\n new_student = Student(request.form['first_name'], request.form[\n 'last_name'])\n db.session.add(new_student)\n db.session.commit()\n return redirect(url_for('index'))\n return render_template('students/index.html', students=Student.query.all())\n\n\[email protected]('/students/new')\ndef new():\n return render_template('students/new.html')\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n\n\[email protected]('/students/<int:id>/excuses/<int:excuse_id>/edit', methods=[\n 'GET', 'PATCH', 'DELETE'])\ndef edit_excuse(id, excuse_id):\n print(id)\n found_student = Student.query.get(id)\n found_excuse = Excuse.query.get(excuse_id)\n excuses_list = found_student.excuses.all()\n if request.method == b'DELETE':\n db.session.delete(found_excuse)\n db.session.commit()\n return redirect(url_for('excuses_index', id=found_student.id))\n elif request.method == b'PATCH':\n found_excuse.content = request.form.get('content')\n found_excuse.is_believable = request.form.get('is_believable')\n db.session.add(found_excuse)\n db.session.commit()\n return redirect(url_for('excuses_index', id=found_student.id))\n return render_template('excuses/edit.html', excuse=found_excuse,\n student=found_student)\n",
"<import token>\n<assignment token>\n<code token>\n\n\nclass Student(db.Model):\n __tablename__ = 'students'\n id = db.Column(db.Integer, primary_key=True)\n first_name = db.Column(db.Text)\n last_name = db.Column(db.Text)\n excuses = db.relationship('Excuse', backref='student', lazy='dynamic')\n\n def __init__(self, first_name, last_name):\n self.first_name = first_name\n self.last_name = last_name\n\n\nclass Excuse(db.Model):\n __tablename__ = 'excuses'\n id = db.Column(db.Integer, primary_key=True)\n content = db.Column(db.Text)\n is_believable = db.Column(db.Text)\n student_id = db.Column(db.Integer, db.ForeignKey('students.id'))\n\n\n<function token>\n\n\[email protected]('/students', methods=['GET', 'POST'])\ndef index():\n if request.method == 'POST':\n new_student = Student(request.form['first_name'], request.form[\n 'last_name'])\n db.session.add(new_student)\n db.session.commit()\n return redirect(url_for('index'))\n return render_template('students/index.html', students=Student.query.all())\n\n\[email protected]('/students/new')\ndef new():\n return render_template('students/new.html')\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n",
"<import token>\n<assignment token>\n<code token>\n\n\nclass Student(db.Model):\n __tablename__ = 'students'\n id = db.Column(db.Integer, primary_key=True)\n first_name = db.Column(db.Text)\n last_name = db.Column(db.Text)\n excuses = db.relationship('Excuse', backref='student', lazy='dynamic')\n\n def __init__(self, first_name, last_name):\n self.first_name = first_name\n self.last_name = last_name\n\n\nclass Excuse(db.Model):\n __tablename__ = 'excuses'\n id = db.Column(db.Integer, primary_key=True)\n content = db.Column(db.Text)\n is_believable = db.Column(db.Text)\n student_id = db.Column(db.Integer, db.ForeignKey('students.id'))\n\n\n<function token>\n<function token>\n\n\[email protected]('/students/new')\ndef new():\n return render_template('students/new.html')\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n",
"<import token>\n<assignment token>\n<code token>\n\n\nclass Student(db.Model):\n __tablename__ = 'students'\n id = db.Column(db.Integer, primary_key=True)\n first_name = db.Column(db.Text)\n last_name = db.Column(db.Text)\n excuses = db.relationship('Excuse', backref='student', lazy='dynamic')\n\n def __init__(self, first_name, last_name):\n self.first_name = first_name\n self.last_name = last_name\n\n\nclass Excuse(db.Model):\n __tablename__ = 'excuses'\n id = db.Column(db.Integer, primary_key=True)\n content = db.Column(db.Text)\n is_believable = db.Column(db.Text)\n student_id = db.Column(db.Integer, db.ForeignKey('students.id'))\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n",
"<import token>\n<assignment token>\n<code token>\n\n\nclass Student(db.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n def __init__(self, first_name, last_name):\n self.first_name = first_name\n self.last_name = last_name\n\n\nclass Excuse(db.Model):\n __tablename__ = 'excuses'\n id = db.Column(db.Integer, primary_key=True)\n content = db.Column(db.Text)\n is_believable = db.Column(db.Text)\n student_id = db.Column(db.Integer, db.ForeignKey('students.id'))\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n",
"<import token>\n<assignment token>\n<code token>\n\n\nclass Student(db.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n\nclass Excuse(db.Model):\n __tablename__ = 'excuses'\n id = db.Column(db.Integer, primary_key=True)\n content = db.Column(db.Text)\n is_believable = db.Column(db.Text)\n student_id = db.Column(db.Integer, db.ForeignKey('students.id'))\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n",
"<import token>\n<assignment token>\n<code token>\n<class token>\n\n\nclass Excuse(db.Model):\n __tablename__ = 'excuses'\n id = db.Column(db.Integer, primary_key=True)\n content = db.Column(db.Text)\n is_believable = db.Column(db.Text)\n student_id = db.Column(db.Integer, db.ForeignKey('students.id'))\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n",
"<import token>\n<assignment token>\n<code token>\n<class token>\n\n\nclass Excuse(db.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n",
"<import token>\n<assignment token>\n<code token>\n<class token>\n<class token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n"
] | false |
801 |
848934680253ff2950db7723b1fe82b2ae799900
|
# -*- coding: utf-8 -*-
"""
Noting is perfect, errors and timeouts may happen, and when such failures happen, the
consumer has to decide what to do with that. By default, the consumer would reject the
envelope (RabbitMQ message) when a failure happens. However, errors and timeouts
issues, unless there is a software bug, usually solved with retries. Just like the
routing, the consumer doesn't make the retry decision itself, the consumer delegates
it to a retry policy. Retry policy defines how the retry is performed. Retries
usually happens with back-offs to avoid worsening the situation by hammering other
services with more requests, especially if it was a timeout issue. The consumer can be
configured to use a retry policy by calling :meth:`.Consumer.set_retry_policy`, passing
an instance of :class:`.RetryPolicy`. When a retry policy is set, the consumer won't
reject messages, but rather, it send them to the retry policy to deal with the
situation by invoking :meth:`.RetryPolicy.retry` method. Based on it's implementation,
The retry policy decides how to do retries.
There are 4 different retry policies available:
1. :class:`.UnlimitedRetriesPolicy`, Unlimited retries policy
2. :class:`.LimitedRetriesPolicy`, Limited retries policy
3. :class:`.FixedDelayUnlimitedRetriesPolicy`, Fixed delay unlimited retries policy
4. :class:`.FixedDelayLimitedRetriesPolicy`, Fixed delay limited retries policy
Custom retry policies can be created by implementing the base class
:class:`.RetryPolicy`
"""
import logging
logger = logging.getLogger(__name__)
class RetryPolicy(object):
"""Base class for retry policies.
Subclasses MUST implement :meth:`retry` method.
"""
def __init__(self, **kwargs):
# type: (RetryPolicy) -> None
super(RetryPolicy, self).__init__()
def retry(self, envelope):
# type: (RetryPolicy, Envelope) -> None
"""This method is implemented by the subclass."""
raise NotImplementedError()
class BaseRetryPolicy(RetryPolicy):
"""Base retry policy class for :class:`.UnlimitedRetriesPolicy` and
:class:`.LimitedRetriesPolicy`.
It has implementation for geting mesage death count and retry queue creation.
"""
def __init__(self, consumer, retry_queue_suffix='retry', **kwargs):
# type: (BaseRetryPolicy, Consumer, str) -> None
"""
:param Consumer consumer: message consumer instance
:param str retry_queue_suffix: Suffix used when creating retry queues. Retry
queue names are constructed in this form "queue_name.<suffix>.<delay>".
Optional, default to ``retry``
"""
super(BaseRetryPolicy, self).__init__(**kwargs)
retry_queue_suffix = retry_queue_suffix.strip()
self.consumer = consumer
assert len(retry_queue_suffix) > 0
self.retry_queue_suffix = retry_queue_suffix
# To avoid frequent retry queue create and destroy for low retry delays
self.min_retry_queue_ttl = 20 * 1000 # 20 seconds
def set_original_delivery_info_header(self, envelope):
# type: (BaseRetryPolicy, Envelope) -> None
"""Save original message delivery infomation in a header."""
if not envelope.get_header('x-original-delivery-info'):
original_delivery_info = {
'consumer_tag': envelope.delivery_info.consumer_tag,
'delivery_tag': envelope.delivery_info.delivery_tag,
'redelivered': envelope.delivery_info.redelivered,
'exchange': envelope.delivery_info.exchange,
'routing_key': envelope.delivery_info.routing_key
}
envelope.set_header('x-original-delivery-info',
original_delivery_info)
def get_death_count(self, envelope):
# type: (BaseRetryPolicy, Envelope) -> int
"""Return the death count of a message by examining "x-death" header.
:param Envelope envelope: Message envelope
:return int: death count
"""
death_header = envelope.get_header('x-death')
if death_header is None:
return 0
count = 0
for death in death_header:
if not death['queue'].startswith(self.consumer.queue_name):
continue
count += death.get('count', 1)
return count
def declare_retry_queue(self, delay):
# type: (BaseRetryPolicy, int) -> str
"""Declare a retry queue for the provided delay.
Each different delay has a different queue where all retry messages with the
same delay will be sent to till they expire and get sent back to the original
queue for handling retry. The queue is declared with a TTL and automatically
gets deleted. The queue TTL is equal to the provided delay. The retry
queue's dead letter exchange is (default) direct exchange and the dead letter
routing key is the original queue name where the messages originally
came from. The messages will be sent back to the original queue when they
reach their TTL, for handling retry.
The retry queue is redeclared before every a new message is sent to it.
Redeclaration resets the queue's TTL, preventing it from being destroyed.
:param int delay: Retry delay in seconds
:return: retry queue name
:rtype: str
"""
delay_in_ms = int(delay * 1000)
retry_queue_name = '{}.{}.{}'.format(
self.consumer.queue_name, self.retry_queue_suffix, delay_in_ms)
# To avoid frequent queue create and destroy for low retry delays
queue_ttl = delay_in_ms * 2
if queue_ttl < self.min_retry_queue_ttl:
queue_ttl = self.min_retry_queue_ttl
self.consumer.channel.queue_declare(
callback=None,
queue=retry_queue_name,
durable=self.consumer.durable,
nowait=True,
arguments={
'x-dead-letter-exchange': '',
'x-dead-letter-routing-key': self.consumer.queue_name,
'x-message-ttl': delay_in_ms,
'x-expires': queue_ttl
})
logger.warning(
'Retry queue "{}" is created/redeclared'.format(retry_queue_name))
return retry_queue_name
class UnlimitedRetriesPolicy(BaseRetryPolicy):
"""Unlimited Retries Policy.
This is an implementation of :class:`.RetryPolicy` which does incremental backoff,
unlimited retries.
:attr:`initial_delay`: is the initial/first backoff delay in seconds
:attr:`delay_incremented_by`: is number of seconds the backoff should be incremented
by after each death
:attr:`max_delay`: is the final/maximum backoff delay in seconds that should net be
exceeded
"""
def __init__(self,
consumer,
initial_delay,
max_delay,
delay_incremented_by,
retry_queue_suffix='retry',
**kwargs):
# type: (UnlimitedRetriesPolicy, Consumer, int, int, int, str) -> None
"""
:param Consumer consumer: message consumer instance
:param int initial_delay: `initial_delay` is the initial/first backoff delay
in seconds.
:param int max_delay: `max_delay` is the final/maximum backoff delay in seconds
that should net be exceeded. When exceeded, this max is used.
:param int delay_incremented_by: `delay_incremented_by` is number of seconds
the backoff should be incremented by after each death.
:param: str retry_queue_suffix: suffix used when naming retry queues.
"""
super(UnlimitedRetriesPolicy,
self).__init__(consumer, retry_queue_suffix, **kwargs)
assert initial_delay >= 0
assert delay_incremented_by >= 0
assert max_delay >= initial_delay
self.initial_delay = initial_delay
self.max_delay = max_delay
self.delay_incremented_by = delay_incremented_by
def retry(self, envelope):
# type: (UnlimitedRetriesPolicy, Envelope) -> None
"""Send message to retry queue to retry handling it later.
Death count is calculated by examining 'x-death' header. Based on the death
count, the message is sent to a retry queue where it waits there till it
expires and gets sent back to the original queue for handling retry.
:param Envelope envelope: Message envelope
"""
death_count = self.get_death_count(envelope)
delay = self.initial_delay + (death_count * self.delay_incremented_by)
if delay > self.max_delay:
delay = self.max_delay
retry_queue_name = self.declare_retry_queue(delay)
# Save original delivery information
if envelope.get_header('x-original-delivery-info') is None:
self.set_original_delivery_info_header(envelope)
self.consumer.channel.basic_publish(
exchange='',
routing_key=retry_queue_name,
properties=envelope.properties,
body=envelope.payload)
self.consumer.channel.basic_ack(envelope.delivery_tag)
logger.warning(
'Retry handling message [{}] after {}s; death count: {}'.format(
envelope.message_id, delay, death_count + 1))
class LimitedRetriesPolicy(BaseRetryPolicy):
"""Limited Retries Policy.
This is an implementation of :class:`.RetryPolicy` which does incremental backoff,
limited number of retries.
:attr:`consumer`: message consumer instance
:attr:`retry_delays`: immutable list of retry backoff delays in seconds. Message
is sent to dlx when this list is exhausted. e.g ``(1, 5, 10, 60, 5 * 60)``
:attr:`retry_queue_suffix`: suffix str used when naming retry queues.
"""
def __init__(self,
consumer,
retry_delays,
retry_queue_suffix='retry',
**kwargs):
# type: (LimitedRetriesPolicy, Consumer, Iterable[int], str) -> None
"""
:param Consumer consumer: message consumer instance
:param Iterable[int] retry_delays: Immutable list of retry backoff delays in
seconds. Message is sent to dlx when this list is exhausted.
e.g ``(1, 5, 10, 60, 5 * 60)``
:param: str retry_queue_suffix: suffix used when naming retry queues.
"""
assert len(retry_delays) > 0
super(LimitedRetriesPolicy, self).__init__(consumer, retry_queue_suffix,
**kwargs)
self.retry_delays = retry_delays
def retry(self, envelope):
# type: (LimitedRetriesPolicy, Envelope) -> None
"""Send message to retry queue to retry handling it later.
Death count is calculated by examining 'x-death' header. Based on the death
count, the message is sent to a retry queue where it waits there till it
expires and gets sent back to the original queue for handling retry.
The death count is used as an index for `retry_delays` list. Where each
item in the list represents a retry delay in seconds.
The message will be rejected if the death count exceeded the length of
`retry_delays` list.
:param Envelope envelope: Message envelope
"""
death_count = self.get_death_count(envelope)
if death_count < len(self.retry_delays):
delay = self.retry_delays[death_count]
retry_queue_name = self.declare_retry_queue(delay)
# Save original delivery information
if envelope.get_header('x-original-delivery-info') is None:
self.set_original_delivery_info_header(envelope)
self.consumer.channel.basic_publish(
exchange='',
routing_key=retry_queue_name,
properties=envelope.properties,
body=envelope.payload)
self.consumer.channel.basic_ack(envelope.delivery_tag)
logger.warning(
'Retry handling message [{}] after {}s; death count: {}'.format(
envelope.message_id, delay, death_count + 1))
else:
logger.warning(
'Message [{}] exceeded retry limit; death count: {}'.format(
envelope.message_id, death_count + 1))
self.consumer.channel.basic_reject(
envelope.delivery_tag, requeue=False)
logger.error('Message [{}] is rejected'.format(envelope.message_id))
class FixedDelayUnlimitedRetriesPolicy(UnlimitedRetriesPolicy):
"""Fixed delay unlimited retries policy.
This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,
unlimited retries.
:attr:`consumer`: consumer instance
:attr:`delay`: retry delay in seconds
:attr:`retry_queue_suffix`: suffix str used when naming retry queues.
"""
def __init__(self, consumer, delay, retry_queue_suffix='retry', **kwargs):
# type: (FixedDelayUnlimitedRetriesPolicy, Consumer, int, str) -> None
"""
:param Consumer consumer: message consumer instance
:param int delay: retry delay in seconds
:param: str retry_queue_suffix: suffix used when naming retry queues.
"""
super(FixedDelayUnlimitedRetriesPolicy, self).__init__(
consumer=consumer,
initial_delay=delay,
max_delay=delay,
delay_incremented_by=0,
retry_queue_suffix=retry_queue_suffix,
**kwargs)
class FixedDelayLimitedRetriesPolicy(LimitedRetriesPolicy):
"""Fixed delay limited retries policy.
This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,
limited number of retries.
:attr:`consumer`: consumer instance
:attr:`delay`: retry delay in seconds.
:attr:`retries_limit`: retries limit count.
:attr:`retry_queue_suffix`: suffix str used when naming retry queues.
"""
def __init__(self,
consumer,
delay,
retries_limit,
retry_queue_suffix='retry',
**kwargs):
# type: (FixedDelayLimitedRetriesPolicy, Consumer, int, int, str) -> None
"""
:param Consumer consumer: message consumer instance
:param int delay: retry delay in seconds
:param int retries_limit: retries limit count
:param: str retry_queue_suffix: suffix used when naming retry queues.
"""
assert retries_limit > 0
retry_delays = tuple([delay] * retries_limit)
super(FixedDelayLimitedRetriesPolicy, self).__init__(
consumer=consumer,
retry_delays=retry_delays,
retry_queue_suffix=retry_queue_suffix,
**kwargs)
|
[
"# -*- coding: utf-8 -*-\n\"\"\"\nNoting is perfect, errors and timeouts may happen, and when such failures happen, the\nconsumer has to decide what to do with that. By default, the consumer would reject the\nenvelope (RabbitMQ message) when a failure happens. However, errors and timeouts\nissues, unless there is a software bug, usually solved with retries. Just like the\nrouting, the consumer doesn't make the retry decision itself, the consumer delegates\nit to a retry policy. Retry policy defines how the retry is performed. Retries\nusually happens with back-offs to avoid worsening the situation by hammering other\nservices with more requests, especially if it was a timeout issue. The consumer can be\nconfigured to use a retry policy by calling :meth:`.Consumer.set_retry_policy`, passing\nan instance of :class:`.RetryPolicy`. When a retry policy is set, the consumer won't\nreject messages, but rather, it send them to the retry policy to deal with the\nsituation by invoking :meth:`.RetryPolicy.retry` method. Based on it's implementation,\nThe retry policy decides how to do retries.\n\nThere are 4 different retry policies available:\n\n1. :class:`.UnlimitedRetriesPolicy`, Unlimited retries policy\n2. :class:`.LimitedRetriesPolicy`, Limited retries policy\n3. :class:`.FixedDelayUnlimitedRetriesPolicy`, Fixed delay unlimited retries policy\n4. :class:`.FixedDelayLimitedRetriesPolicy`, Fixed delay limited retries policy\n\nCustom retry policies can be created by implementing the base class\n:class:`.RetryPolicy`\n\"\"\"\nimport logging\n\nlogger = logging.getLogger(__name__)\n\n\nclass RetryPolicy(object):\n \"\"\"Base class for retry policies.\n\n Subclasses MUST implement :meth:`retry` method.\n \"\"\"\n\n def __init__(self, **kwargs):\n # type: (RetryPolicy) -> None\n super(RetryPolicy, self).__init__()\n\n def retry(self, envelope):\n # type: (RetryPolicy, Envelope) -> None\n \"\"\"This method is implemented by the subclass.\"\"\"\n raise NotImplementedError()\n\n\nclass BaseRetryPolicy(RetryPolicy):\n \"\"\"Base retry policy class for :class:`.UnlimitedRetriesPolicy` and\n :class:`.LimitedRetriesPolicy`.\n\n It has implementation for geting mesage death count and retry queue creation.\n \"\"\"\n\n def __init__(self, consumer, retry_queue_suffix='retry', **kwargs):\n # type: (BaseRetryPolicy, Consumer, str) -> None\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param str retry_queue_suffix: Suffix used when creating retry queues. Retry\n queue names are constructed in this form \"queue_name.<suffix>.<delay>\".\n Optional, default to ``retry``\n \"\"\"\n super(BaseRetryPolicy, self).__init__(**kwargs)\n retry_queue_suffix = retry_queue_suffix.strip()\n self.consumer = consumer\n assert len(retry_queue_suffix) > 0\n self.retry_queue_suffix = retry_queue_suffix\n # To avoid frequent retry queue create and destroy for low retry delays\n self.min_retry_queue_ttl = 20 * 1000 # 20 seconds\n\n def set_original_delivery_info_header(self, envelope):\n # type: (BaseRetryPolicy, Envelope) -> None\n \"\"\"Save original message delivery infomation in a header.\"\"\"\n if not envelope.get_header('x-original-delivery-info'):\n original_delivery_info = {\n 'consumer_tag': envelope.delivery_info.consumer_tag,\n 'delivery_tag': envelope.delivery_info.delivery_tag,\n 'redelivered': envelope.delivery_info.redelivered,\n 'exchange': envelope.delivery_info.exchange,\n 'routing_key': envelope.delivery_info.routing_key\n }\n envelope.set_header('x-original-delivery-info',\n original_delivery_info)\n\n def get_death_count(self, envelope):\n # type: (BaseRetryPolicy, Envelope) -> int\n \"\"\"Return the death count of a message by examining \"x-death\" header.\n\n :param Envelope envelope: Message envelope\n\n :return int: death count\n \"\"\"\n death_header = envelope.get_header('x-death')\n\n if death_header is None:\n return 0\n\n count = 0\n for death in death_header:\n if not death['queue'].startswith(self.consumer.queue_name):\n continue\n count += death.get('count', 1)\n return count\n\n def declare_retry_queue(self, delay):\n # type: (BaseRetryPolicy, int) -> str\n \"\"\"Declare a retry queue for the provided delay.\n\n Each different delay has a different queue where all retry messages with the\n same delay will be sent to till they expire and get sent back to the original\n queue for handling retry. The queue is declared with a TTL and automatically\n gets deleted. The queue TTL is equal to the provided delay. The retry\n queue's dead letter exchange is (default) direct exchange and the dead letter\n routing key is the original queue name where the messages originally\n came from. The messages will be sent back to the original queue when they\n reach their TTL, for handling retry.\n\n The retry queue is redeclared before every a new message is sent to it.\n Redeclaration resets the queue's TTL, preventing it from being destroyed.\n\n\n :param int delay: Retry delay in seconds\n\n :return: retry queue name\n :rtype: str\n \"\"\"\n\n delay_in_ms = int(delay * 1000)\n retry_queue_name = '{}.{}.{}'.format(\n self.consumer.queue_name, self.retry_queue_suffix, delay_in_ms)\n\n # To avoid frequent queue create and destroy for low retry delays\n queue_ttl = delay_in_ms * 2\n if queue_ttl < self.min_retry_queue_ttl:\n queue_ttl = self.min_retry_queue_ttl\n\n self.consumer.channel.queue_declare(\n callback=None,\n queue=retry_queue_name,\n durable=self.consumer.durable,\n nowait=True,\n arguments={\n 'x-dead-letter-exchange': '',\n 'x-dead-letter-routing-key': self.consumer.queue_name,\n 'x-message-ttl': delay_in_ms,\n 'x-expires': queue_ttl\n })\n logger.warning(\n 'Retry queue \"{}\" is created/redeclared'.format(retry_queue_name))\n return retry_queue_name\n\n\nclass UnlimitedRetriesPolicy(BaseRetryPolicy):\n \"\"\"Unlimited Retries Policy.\n\n This is an implementation of :class:`.RetryPolicy` which does incremental backoff,\n unlimited retries.\n\n :attr:`initial_delay`: is the initial/first backoff delay in seconds\n\n :attr:`delay_incremented_by`: is number of seconds the backoff should be incremented\n by after each death\n\n :attr:`max_delay`: is the final/maximum backoff delay in seconds that should net be\n exceeded\n \"\"\"\n\n def __init__(self,\n consumer,\n initial_delay,\n max_delay,\n delay_incremented_by,\n retry_queue_suffix='retry',\n **kwargs):\n # type: (UnlimitedRetriesPolicy, Consumer, int, int, int, str) -> None\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int initial_delay: `initial_delay` is the initial/first backoff delay\n in seconds.\n\n :param int max_delay: `max_delay` is the final/maximum backoff delay in seconds\n that should net be exceeded. When exceeded, this max is used.\n\n :param int delay_incremented_by: `delay_incremented_by` is number of seconds\n the backoff should be incremented by after each death.\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n super(UnlimitedRetriesPolicy,\n self).__init__(consumer, retry_queue_suffix, **kwargs)\n\n assert initial_delay >= 0\n assert delay_incremented_by >= 0\n assert max_delay >= initial_delay\n\n self.initial_delay = initial_delay\n self.max_delay = max_delay\n self.delay_incremented_by = delay_incremented_by\n\n def retry(self, envelope):\n # type: (UnlimitedRetriesPolicy, Envelope) -> None\n \"\"\"Send message to retry queue to retry handling it later.\n\n Death count is calculated by examining 'x-death' header. Based on the death\n count, the message is sent to a retry queue where it waits there till it\n expires and gets sent back to the original queue for handling retry.\n\n :param Envelope envelope: Message envelope\n \"\"\"\n death_count = self.get_death_count(envelope)\n delay = self.initial_delay + (death_count * self.delay_incremented_by)\n\n if delay > self.max_delay:\n delay = self.max_delay\n\n retry_queue_name = self.declare_retry_queue(delay)\n\n # Save original delivery information\n if envelope.get_header('x-original-delivery-info') is None:\n self.set_original_delivery_info_header(envelope)\n\n self.consumer.channel.basic_publish(\n exchange='',\n routing_key=retry_queue_name,\n properties=envelope.properties,\n body=envelope.payload)\n\n self.consumer.channel.basic_ack(envelope.delivery_tag)\n logger.warning(\n 'Retry handling message [{}] after {}s; death count: {}'.format(\n envelope.message_id, delay, death_count + 1))\n\n\nclass LimitedRetriesPolicy(BaseRetryPolicy):\n \"\"\"Limited Retries Policy.\n\n This is an implementation of :class:`.RetryPolicy` which does incremental backoff,\n limited number of retries.\n\n :attr:`consumer`: message consumer instance\n\n :attr:`retry_delays`: immutable list of retry backoff delays in seconds. Message\n is sent to dlx when this list is exhausted. e.g ``(1, 5, 10, 60, 5 * 60)``\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self,\n consumer,\n retry_delays,\n retry_queue_suffix='retry',\n **kwargs):\n # type: (LimitedRetriesPolicy, Consumer, Iterable[int], str) -> None\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param Iterable[int] retry_delays: Immutable list of retry backoff delays in\n seconds. Message is sent to dlx when this list is exhausted.\n e.g ``(1, 5, 10, 60, 5 * 60)``\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert len(retry_delays) > 0\n super(LimitedRetriesPolicy, self).__init__(consumer, retry_queue_suffix,\n **kwargs)\n self.retry_delays = retry_delays\n\n def retry(self, envelope):\n # type: (LimitedRetriesPolicy, Envelope) -> None\n \"\"\"Send message to retry queue to retry handling it later.\n\n Death count is calculated by examining 'x-death' header. Based on the death\n count, the message is sent to a retry queue where it waits there till it\n expires and gets sent back to the original queue for handling retry.\n\n The death count is used as an index for `retry_delays` list. Where each\n item in the list represents a retry delay in seconds.\n\n The message will be rejected if the death count exceeded the length of\n `retry_delays` list.\n\n :param Envelope envelope: Message envelope\n \"\"\"\n death_count = self.get_death_count(envelope)\n if death_count < len(self.retry_delays):\n delay = self.retry_delays[death_count]\n retry_queue_name = self.declare_retry_queue(delay)\n\n # Save original delivery information\n if envelope.get_header('x-original-delivery-info') is None:\n self.set_original_delivery_info_header(envelope)\n\n self.consumer.channel.basic_publish(\n exchange='',\n routing_key=retry_queue_name,\n properties=envelope.properties,\n body=envelope.payload)\n\n self.consumer.channel.basic_ack(envelope.delivery_tag)\n logger.warning(\n 'Retry handling message [{}] after {}s; death count: {}'.format(\n envelope.message_id, delay, death_count + 1))\n else:\n logger.warning(\n 'Message [{}] exceeded retry limit; death count: {}'.format(\n envelope.message_id, death_count + 1))\n self.consumer.channel.basic_reject(\n envelope.delivery_tag, requeue=False)\n logger.error('Message [{}] is rejected'.format(envelope.message_id))\n\n\nclass FixedDelayUnlimitedRetriesPolicy(UnlimitedRetriesPolicy):\n \"\"\"Fixed delay unlimited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n unlimited retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retry_queue_suffix='retry', **kwargs):\n # type: (FixedDelayUnlimitedRetriesPolicy, Consumer, int, str) -> None\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n super(FixedDelayUnlimitedRetriesPolicy, self).__init__(\n consumer=consumer,\n initial_delay=delay,\n max_delay=delay,\n delay_incremented_by=0,\n retry_queue_suffix=retry_queue_suffix,\n **kwargs)\n\n\nclass FixedDelayLimitedRetriesPolicy(LimitedRetriesPolicy):\n \"\"\"Fixed delay limited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n limited number of retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds.\n\n :attr:`retries_limit`: retries limit count.\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self,\n consumer,\n delay,\n retries_limit,\n retry_queue_suffix='retry',\n **kwargs):\n # type: (FixedDelayLimitedRetriesPolicy, Consumer, int, int, str) -> None\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param int retries_limit: retries limit count\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert retries_limit > 0\n retry_delays = tuple([delay] * retries_limit)\n super(FixedDelayLimitedRetriesPolicy, self).__init__(\n consumer=consumer,\n retry_delays=retry_delays,\n retry_queue_suffix=retry_queue_suffix,\n **kwargs)\n",
"<docstring token>\nimport logging\nlogger = logging.getLogger(__name__)\n\n\nclass RetryPolicy(object):\n \"\"\"Base class for retry policies.\n\n Subclasses MUST implement :meth:`retry` method.\n \"\"\"\n\n def __init__(self, **kwargs):\n super(RetryPolicy, self).__init__()\n\n def retry(self, envelope):\n \"\"\"This method is implemented by the subclass.\"\"\"\n raise NotImplementedError()\n\n\nclass BaseRetryPolicy(RetryPolicy):\n \"\"\"Base retry policy class for :class:`.UnlimitedRetriesPolicy` and\n :class:`.LimitedRetriesPolicy`.\n\n It has implementation for geting mesage death count and retry queue creation.\n \"\"\"\n\n def __init__(self, consumer, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param str retry_queue_suffix: Suffix used when creating retry queues. Retry\n queue names are constructed in this form \"queue_name.<suffix>.<delay>\".\n Optional, default to ``retry``\n \"\"\"\n super(BaseRetryPolicy, self).__init__(**kwargs)\n retry_queue_suffix = retry_queue_suffix.strip()\n self.consumer = consumer\n assert len(retry_queue_suffix) > 0\n self.retry_queue_suffix = retry_queue_suffix\n self.min_retry_queue_ttl = 20 * 1000\n\n def set_original_delivery_info_header(self, envelope):\n \"\"\"Save original message delivery infomation in a header.\"\"\"\n if not envelope.get_header('x-original-delivery-info'):\n original_delivery_info = {'consumer_tag': envelope.\n delivery_info.consumer_tag, 'delivery_tag': envelope.\n delivery_info.delivery_tag, 'redelivered': envelope.\n delivery_info.redelivered, 'exchange': envelope.\n delivery_info.exchange, 'routing_key': envelope.\n delivery_info.routing_key}\n envelope.set_header('x-original-delivery-info',\n original_delivery_info)\n\n def get_death_count(self, envelope):\n \"\"\"Return the death count of a message by examining \"x-death\" header.\n\n :param Envelope envelope: Message envelope\n\n :return int: death count\n \"\"\"\n death_header = envelope.get_header('x-death')\n if death_header is None:\n return 0\n count = 0\n for death in death_header:\n if not death['queue'].startswith(self.consumer.queue_name):\n continue\n count += death.get('count', 1)\n return count\n\n def declare_retry_queue(self, delay):\n \"\"\"Declare a retry queue for the provided delay.\n\n Each different delay has a different queue where all retry messages with the\n same delay will be sent to till they expire and get sent back to the original\n queue for handling retry. The queue is declared with a TTL and automatically\n gets deleted. The queue TTL is equal to the provided delay. The retry\n queue's dead letter exchange is (default) direct exchange and the dead letter\n routing key is the original queue name where the messages originally\n came from. The messages will be sent back to the original queue when they\n reach their TTL, for handling retry.\n\n The retry queue is redeclared before every a new message is sent to it.\n Redeclaration resets the queue's TTL, preventing it from being destroyed.\n\n\n :param int delay: Retry delay in seconds\n\n :return: retry queue name\n :rtype: str\n \"\"\"\n delay_in_ms = int(delay * 1000)\n retry_queue_name = '{}.{}.{}'.format(self.consumer.queue_name, self\n .retry_queue_suffix, delay_in_ms)\n queue_ttl = delay_in_ms * 2\n if queue_ttl < self.min_retry_queue_ttl:\n queue_ttl = self.min_retry_queue_ttl\n self.consumer.channel.queue_declare(callback=None, queue=\n retry_queue_name, durable=self.consumer.durable, nowait=True,\n arguments={'x-dead-letter-exchange': '',\n 'x-dead-letter-routing-key': self.consumer.queue_name,\n 'x-message-ttl': delay_in_ms, 'x-expires': queue_ttl})\n logger.warning('Retry queue \"{}\" is created/redeclared'.format(\n retry_queue_name))\n return retry_queue_name\n\n\nclass UnlimitedRetriesPolicy(BaseRetryPolicy):\n \"\"\"Unlimited Retries Policy.\n\n This is an implementation of :class:`.RetryPolicy` which does incremental backoff,\n unlimited retries.\n\n :attr:`initial_delay`: is the initial/first backoff delay in seconds\n\n :attr:`delay_incremented_by`: is number of seconds the backoff should be incremented\n by after each death\n\n :attr:`max_delay`: is the final/maximum backoff delay in seconds that should net be\n exceeded\n \"\"\"\n\n def __init__(self, consumer, initial_delay, max_delay,\n delay_incremented_by, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int initial_delay: `initial_delay` is the initial/first backoff delay\n in seconds.\n\n :param int max_delay: `max_delay` is the final/maximum backoff delay in seconds\n that should net be exceeded. When exceeded, this max is used.\n\n :param int delay_incremented_by: `delay_incremented_by` is number of seconds\n the backoff should be incremented by after each death.\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n super(UnlimitedRetriesPolicy, self).__init__(consumer,\n retry_queue_suffix, **kwargs)\n assert initial_delay >= 0\n assert delay_incremented_by >= 0\n assert max_delay >= initial_delay\n self.initial_delay = initial_delay\n self.max_delay = max_delay\n self.delay_incremented_by = delay_incremented_by\n\n def retry(self, envelope):\n \"\"\"Send message to retry queue to retry handling it later.\n\n Death count is calculated by examining 'x-death' header. Based on the death\n count, the message is sent to a retry queue where it waits there till it\n expires and gets sent back to the original queue for handling retry.\n\n :param Envelope envelope: Message envelope\n \"\"\"\n death_count = self.get_death_count(envelope)\n delay = self.initial_delay + death_count * self.delay_incremented_by\n if delay > self.max_delay:\n delay = self.max_delay\n retry_queue_name = self.declare_retry_queue(delay)\n if envelope.get_header('x-original-delivery-info') is None:\n self.set_original_delivery_info_header(envelope)\n self.consumer.channel.basic_publish(exchange='', routing_key=\n retry_queue_name, properties=envelope.properties, body=envelope\n .payload)\n self.consumer.channel.basic_ack(envelope.delivery_tag)\n logger.warning('Retry handling message [{}] after {}s; death count: {}'\n .format(envelope.message_id, delay, death_count + 1))\n\n\nclass LimitedRetriesPolicy(BaseRetryPolicy):\n \"\"\"Limited Retries Policy.\n\n This is an implementation of :class:`.RetryPolicy` which does incremental backoff,\n limited number of retries.\n\n :attr:`consumer`: message consumer instance\n\n :attr:`retry_delays`: immutable list of retry backoff delays in seconds. Message\n is sent to dlx when this list is exhausted. e.g ``(1, 5, 10, 60, 5 * 60)``\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, retry_delays, retry_queue_suffix='retry',\n **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param Iterable[int] retry_delays: Immutable list of retry backoff delays in\n seconds. Message is sent to dlx when this list is exhausted.\n e.g ``(1, 5, 10, 60, 5 * 60)``\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert len(retry_delays) > 0\n super(LimitedRetriesPolicy, self).__init__(consumer,\n retry_queue_suffix, **kwargs)\n self.retry_delays = retry_delays\n\n def retry(self, envelope):\n \"\"\"Send message to retry queue to retry handling it later.\n\n Death count is calculated by examining 'x-death' header. Based on the death\n count, the message is sent to a retry queue where it waits there till it\n expires and gets sent back to the original queue for handling retry.\n\n The death count is used as an index for `retry_delays` list. Where each\n item in the list represents a retry delay in seconds.\n\n The message will be rejected if the death count exceeded the length of\n `retry_delays` list.\n\n :param Envelope envelope: Message envelope\n \"\"\"\n death_count = self.get_death_count(envelope)\n if death_count < len(self.retry_delays):\n delay = self.retry_delays[death_count]\n retry_queue_name = self.declare_retry_queue(delay)\n if envelope.get_header('x-original-delivery-info') is None:\n self.set_original_delivery_info_header(envelope)\n self.consumer.channel.basic_publish(exchange='', routing_key=\n retry_queue_name, properties=envelope.properties, body=\n envelope.payload)\n self.consumer.channel.basic_ack(envelope.delivery_tag)\n logger.warning(\n 'Retry handling message [{}] after {}s; death count: {}'.\n format(envelope.message_id, delay, death_count + 1))\n else:\n logger.warning('Message [{}] exceeded retry limit; death count: {}'\n .format(envelope.message_id, death_count + 1))\n self.consumer.channel.basic_reject(envelope.delivery_tag,\n requeue=False)\n logger.error('Message [{}] is rejected'.format(envelope.message_id)\n )\n\n\nclass FixedDelayUnlimitedRetriesPolicy(UnlimitedRetriesPolicy):\n \"\"\"Fixed delay unlimited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n unlimited retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n super(FixedDelayUnlimitedRetriesPolicy, self).__init__(consumer=\n consumer, initial_delay=delay, max_delay=delay,\n delay_incremented_by=0, retry_queue_suffix=retry_queue_suffix,\n **kwargs)\n\n\nclass FixedDelayLimitedRetriesPolicy(LimitedRetriesPolicy):\n \"\"\"Fixed delay limited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n limited number of retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds.\n\n :attr:`retries_limit`: retries limit count.\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retries_limit, retry_queue_suffix=\n 'retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param int retries_limit: retries limit count\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert retries_limit > 0\n retry_delays = tuple([delay] * retries_limit)\n super(FixedDelayLimitedRetriesPolicy, self).__init__(consumer=\n consumer, retry_delays=retry_delays, retry_queue_suffix=\n retry_queue_suffix, **kwargs)\n",
"<docstring token>\n<import token>\nlogger = logging.getLogger(__name__)\n\n\nclass RetryPolicy(object):\n \"\"\"Base class for retry policies.\n\n Subclasses MUST implement :meth:`retry` method.\n \"\"\"\n\n def __init__(self, **kwargs):\n super(RetryPolicy, self).__init__()\n\n def retry(self, envelope):\n \"\"\"This method is implemented by the subclass.\"\"\"\n raise NotImplementedError()\n\n\nclass BaseRetryPolicy(RetryPolicy):\n \"\"\"Base retry policy class for :class:`.UnlimitedRetriesPolicy` and\n :class:`.LimitedRetriesPolicy`.\n\n It has implementation for geting mesage death count and retry queue creation.\n \"\"\"\n\n def __init__(self, consumer, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param str retry_queue_suffix: Suffix used when creating retry queues. Retry\n queue names are constructed in this form \"queue_name.<suffix>.<delay>\".\n Optional, default to ``retry``\n \"\"\"\n super(BaseRetryPolicy, self).__init__(**kwargs)\n retry_queue_suffix = retry_queue_suffix.strip()\n self.consumer = consumer\n assert len(retry_queue_suffix) > 0\n self.retry_queue_suffix = retry_queue_suffix\n self.min_retry_queue_ttl = 20 * 1000\n\n def set_original_delivery_info_header(self, envelope):\n \"\"\"Save original message delivery infomation in a header.\"\"\"\n if not envelope.get_header('x-original-delivery-info'):\n original_delivery_info = {'consumer_tag': envelope.\n delivery_info.consumer_tag, 'delivery_tag': envelope.\n delivery_info.delivery_tag, 'redelivered': envelope.\n delivery_info.redelivered, 'exchange': envelope.\n delivery_info.exchange, 'routing_key': envelope.\n delivery_info.routing_key}\n envelope.set_header('x-original-delivery-info',\n original_delivery_info)\n\n def get_death_count(self, envelope):\n \"\"\"Return the death count of a message by examining \"x-death\" header.\n\n :param Envelope envelope: Message envelope\n\n :return int: death count\n \"\"\"\n death_header = envelope.get_header('x-death')\n if death_header is None:\n return 0\n count = 0\n for death in death_header:\n if not death['queue'].startswith(self.consumer.queue_name):\n continue\n count += death.get('count', 1)\n return count\n\n def declare_retry_queue(self, delay):\n \"\"\"Declare a retry queue for the provided delay.\n\n Each different delay has a different queue where all retry messages with the\n same delay will be sent to till they expire and get sent back to the original\n queue for handling retry. The queue is declared with a TTL and automatically\n gets deleted. The queue TTL is equal to the provided delay. The retry\n queue's dead letter exchange is (default) direct exchange and the dead letter\n routing key is the original queue name where the messages originally\n came from. The messages will be sent back to the original queue when they\n reach their TTL, for handling retry.\n\n The retry queue is redeclared before every a new message is sent to it.\n Redeclaration resets the queue's TTL, preventing it from being destroyed.\n\n\n :param int delay: Retry delay in seconds\n\n :return: retry queue name\n :rtype: str\n \"\"\"\n delay_in_ms = int(delay * 1000)\n retry_queue_name = '{}.{}.{}'.format(self.consumer.queue_name, self\n .retry_queue_suffix, delay_in_ms)\n queue_ttl = delay_in_ms * 2\n if queue_ttl < self.min_retry_queue_ttl:\n queue_ttl = self.min_retry_queue_ttl\n self.consumer.channel.queue_declare(callback=None, queue=\n retry_queue_name, durable=self.consumer.durable, nowait=True,\n arguments={'x-dead-letter-exchange': '',\n 'x-dead-letter-routing-key': self.consumer.queue_name,\n 'x-message-ttl': delay_in_ms, 'x-expires': queue_ttl})\n logger.warning('Retry queue \"{}\" is created/redeclared'.format(\n retry_queue_name))\n return retry_queue_name\n\n\nclass UnlimitedRetriesPolicy(BaseRetryPolicy):\n \"\"\"Unlimited Retries Policy.\n\n This is an implementation of :class:`.RetryPolicy` which does incremental backoff,\n unlimited retries.\n\n :attr:`initial_delay`: is the initial/first backoff delay in seconds\n\n :attr:`delay_incremented_by`: is number of seconds the backoff should be incremented\n by after each death\n\n :attr:`max_delay`: is the final/maximum backoff delay in seconds that should net be\n exceeded\n \"\"\"\n\n def __init__(self, consumer, initial_delay, max_delay,\n delay_incremented_by, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int initial_delay: `initial_delay` is the initial/first backoff delay\n in seconds.\n\n :param int max_delay: `max_delay` is the final/maximum backoff delay in seconds\n that should net be exceeded. When exceeded, this max is used.\n\n :param int delay_incremented_by: `delay_incremented_by` is number of seconds\n the backoff should be incremented by after each death.\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n super(UnlimitedRetriesPolicy, self).__init__(consumer,\n retry_queue_suffix, **kwargs)\n assert initial_delay >= 0\n assert delay_incremented_by >= 0\n assert max_delay >= initial_delay\n self.initial_delay = initial_delay\n self.max_delay = max_delay\n self.delay_incremented_by = delay_incremented_by\n\n def retry(self, envelope):\n \"\"\"Send message to retry queue to retry handling it later.\n\n Death count is calculated by examining 'x-death' header. Based on the death\n count, the message is sent to a retry queue where it waits there till it\n expires and gets sent back to the original queue for handling retry.\n\n :param Envelope envelope: Message envelope\n \"\"\"\n death_count = self.get_death_count(envelope)\n delay = self.initial_delay + death_count * self.delay_incremented_by\n if delay > self.max_delay:\n delay = self.max_delay\n retry_queue_name = self.declare_retry_queue(delay)\n if envelope.get_header('x-original-delivery-info') is None:\n self.set_original_delivery_info_header(envelope)\n self.consumer.channel.basic_publish(exchange='', routing_key=\n retry_queue_name, properties=envelope.properties, body=envelope\n .payload)\n self.consumer.channel.basic_ack(envelope.delivery_tag)\n logger.warning('Retry handling message [{}] after {}s; death count: {}'\n .format(envelope.message_id, delay, death_count + 1))\n\n\nclass LimitedRetriesPolicy(BaseRetryPolicy):\n \"\"\"Limited Retries Policy.\n\n This is an implementation of :class:`.RetryPolicy` which does incremental backoff,\n limited number of retries.\n\n :attr:`consumer`: message consumer instance\n\n :attr:`retry_delays`: immutable list of retry backoff delays in seconds. Message\n is sent to dlx when this list is exhausted. e.g ``(1, 5, 10, 60, 5 * 60)``\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, retry_delays, retry_queue_suffix='retry',\n **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param Iterable[int] retry_delays: Immutable list of retry backoff delays in\n seconds. Message is sent to dlx when this list is exhausted.\n e.g ``(1, 5, 10, 60, 5 * 60)``\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert len(retry_delays) > 0\n super(LimitedRetriesPolicy, self).__init__(consumer,\n retry_queue_suffix, **kwargs)\n self.retry_delays = retry_delays\n\n def retry(self, envelope):\n \"\"\"Send message to retry queue to retry handling it later.\n\n Death count is calculated by examining 'x-death' header. Based on the death\n count, the message is sent to a retry queue where it waits there till it\n expires and gets sent back to the original queue for handling retry.\n\n The death count is used as an index for `retry_delays` list. Where each\n item in the list represents a retry delay in seconds.\n\n The message will be rejected if the death count exceeded the length of\n `retry_delays` list.\n\n :param Envelope envelope: Message envelope\n \"\"\"\n death_count = self.get_death_count(envelope)\n if death_count < len(self.retry_delays):\n delay = self.retry_delays[death_count]\n retry_queue_name = self.declare_retry_queue(delay)\n if envelope.get_header('x-original-delivery-info') is None:\n self.set_original_delivery_info_header(envelope)\n self.consumer.channel.basic_publish(exchange='', routing_key=\n retry_queue_name, properties=envelope.properties, body=\n envelope.payload)\n self.consumer.channel.basic_ack(envelope.delivery_tag)\n logger.warning(\n 'Retry handling message [{}] after {}s; death count: {}'.\n format(envelope.message_id, delay, death_count + 1))\n else:\n logger.warning('Message [{}] exceeded retry limit; death count: {}'\n .format(envelope.message_id, death_count + 1))\n self.consumer.channel.basic_reject(envelope.delivery_tag,\n requeue=False)\n logger.error('Message [{}] is rejected'.format(envelope.message_id)\n )\n\n\nclass FixedDelayUnlimitedRetriesPolicy(UnlimitedRetriesPolicy):\n \"\"\"Fixed delay unlimited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n unlimited retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n super(FixedDelayUnlimitedRetriesPolicy, self).__init__(consumer=\n consumer, initial_delay=delay, max_delay=delay,\n delay_incremented_by=0, retry_queue_suffix=retry_queue_suffix,\n **kwargs)\n\n\nclass FixedDelayLimitedRetriesPolicy(LimitedRetriesPolicy):\n \"\"\"Fixed delay limited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n limited number of retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds.\n\n :attr:`retries_limit`: retries limit count.\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retries_limit, retry_queue_suffix=\n 'retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param int retries_limit: retries limit count\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert retries_limit > 0\n retry_delays = tuple([delay] * retries_limit)\n super(FixedDelayLimitedRetriesPolicy, self).__init__(consumer=\n consumer, retry_delays=retry_delays, retry_queue_suffix=\n retry_queue_suffix, **kwargs)\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\nclass RetryPolicy(object):\n \"\"\"Base class for retry policies.\n\n Subclasses MUST implement :meth:`retry` method.\n \"\"\"\n\n def __init__(self, **kwargs):\n super(RetryPolicy, self).__init__()\n\n def retry(self, envelope):\n \"\"\"This method is implemented by the subclass.\"\"\"\n raise NotImplementedError()\n\n\nclass BaseRetryPolicy(RetryPolicy):\n \"\"\"Base retry policy class for :class:`.UnlimitedRetriesPolicy` and\n :class:`.LimitedRetriesPolicy`.\n\n It has implementation for geting mesage death count and retry queue creation.\n \"\"\"\n\n def __init__(self, consumer, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param str retry_queue_suffix: Suffix used when creating retry queues. Retry\n queue names are constructed in this form \"queue_name.<suffix>.<delay>\".\n Optional, default to ``retry``\n \"\"\"\n super(BaseRetryPolicy, self).__init__(**kwargs)\n retry_queue_suffix = retry_queue_suffix.strip()\n self.consumer = consumer\n assert len(retry_queue_suffix) > 0\n self.retry_queue_suffix = retry_queue_suffix\n self.min_retry_queue_ttl = 20 * 1000\n\n def set_original_delivery_info_header(self, envelope):\n \"\"\"Save original message delivery infomation in a header.\"\"\"\n if not envelope.get_header('x-original-delivery-info'):\n original_delivery_info = {'consumer_tag': envelope.\n delivery_info.consumer_tag, 'delivery_tag': envelope.\n delivery_info.delivery_tag, 'redelivered': envelope.\n delivery_info.redelivered, 'exchange': envelope.\n delivery_info.exchange, 'routing_key': envelope.\n delivery_info.routing_key}\n envelope.set_header('x-original-delivery-info',\n original_delivery_info)\n\n def get_death_count(self, envelope):\n \"\"\"Return the death count of a message by examining \"x-death\" header.\n\n :param Envelope envelope: Message envelope\n\n :return int: death count\n \"\"\"\n death_header = envelope.get_header('x-death')\n if death_header is None:\n return 0\n count = 0\n for death in death_header:\n if not death['queue'].startswith(self.consumer.queue_name):\n continue\n count += death.get('count', 1)\n return count\n\n def declare_retry_queue(self, delay):\n \"\"\"Declare a retry queue for the provided delay.\n\n Each different delay has a different queue where all retry messages with the\n same delay will be sent to till they expire and get sent back to the original\n queue for handling retry. The queue is declared with a TTL and automatically\n gets deleted. The queue TTL is equal to the provided delay. The retry\n queue's dead letter exchange is (default) direct exchange and the dead letter\n routing key is the original queue name where the messages originally\n came from. The messages will be sent back to the original queue when they\n reach their TTL, for handling retry.\n\n The retry queue is redeclared before every a new message is sent to it.\n Redeclaration resets the queue's TTL, preventing it from being destroyed.\n\n\n :param int delay: Retry delay in seconds\n\n :return: retry queue name\n :rtype: str\n \"\"\"\n delay_in_ms = int(delay * 1000)\n retry_queue_name = '{}.{}.{}'.format(self.consumer.queue_name, self\n .retry_queue_suffix, delay_in_ms)\n queue_ttl = delay_in_ms * 2\n if queue_ttl < self.min_retry_queue_ttl:\n queue_ttl = self.min_retry_queue_ttl\n self.consumer.channel.queue_declare(callback=None, queue=\n retry_queue_name, durable=self.consumer.durable, nowait=True,\n arguments={'x-dead-letter-exchange': '',\n 'x-dead-letter-routing-key': self.consumer.queue_name,\n 'x-message-ttl': delay_in_ms, 'x-expires': queue_ttl})\n logger.warning('Retry queue \"{}\" is created/redeclared'.format(\n retry_queue_name))\n return retry_queue_name\n\n\nclass UnlimitedRetriesPolicy(BaseRetryPolicy):\n \"\"\"Unlimited Retries Policy.\n\n This is an implementation of :class:`.RetryPolicy` which does incremental backoff,\n unlimited retries.\n\n :attr:`initial_delay`: is the initial/first backoff delay in seconds\n\n :attr:`delay_incremented_by`: is number of seconds the backoff should be incremented\n by after each death\n\n :attr:`max_delay`: is the final/maximum backoff delay in seconds that should net be\n exceeded\n \"\"\"\n\n def __init__(self, consumer, initial_delay, max_delay,\n delay_incremented_by, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int initial_delay: `initial_delay` is the initial/first backoff delay\n in seconds.\n\n :param int max_delay: `max_delay` is the final/maximum backoff delay in seconds\n that should net be exceeded. When exceeded, this max is used.\n\n :param int delay_incremented_by: `delay_incremented_by` is number of seconds\n the backoff should be incremented by after each death.\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n super(UnlimitedRetriesPolicy, self).__init__(consumer,\n retry_queue_suffix, **kwargs)\n assert initial_delay >= 0\n assert delay_incremented_by >= 0\n assert max_delay >= initial_delay\n self.initial_delay = initial_delay\n self.max_delay = max_delay\n self.delay_incremented_by = delay_incremented_by\n\n def retry(self, envelope):\n \"\"\"Send message to retry queue to retry handling it later.\n\n Death count is calculated by examining 'x-death' header. Based on the death\n count, the message is sent to a retry queue where it waits there till it\n expires and gets sent back to the original queue for handling retry.\n\n :param Envelope envelope: Message envelope\n \"\"\"\n death_count = self.get_death_count(envelope)\n delay = self.initial_delay + death_count * self.delay_incremented_by\n if delay > self.max_delay:\n delay = self.max_delay\n retry_queue_name = self.declare_retry_queue(delay)\n if envelope.get_header('x-original-delivery-info') is None:\n self.set_original_delivery_info_header(envelope)\n self.consumer.channel.basic_publish(exchange='', routing_key=\n retry_queue_name, properties=envelope.properties, body=envelope\n .payload)\n self.consumer.channel.basic_ack(envelope.delivery_tag)\n logger.warning('Retry handling message [{}] after {}s; death count: {}'\n .format(envelope.message_id, delay, death_count + 1))\n\n\nclass LimitedRetriesPolicy(BaseRetryPolicy):\n \"\"\"Limited Retries Policy.\n\n This is an implementation of :class:`.RetryPolicy` which does incremental backoff,\n limited number of retries.\n\n :attr:`consumer`: message consumer instance\n\n :attr:`retry_delays`: immutable list of retry backoff delays in seconds. Message\n is sent to dlx when this list is exhausted. e.g ``(1, 5, 10, 60, 5 * 60)``\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, retry_delays, retry_queue_suffix='retry',\n **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param Iterable[int] retry_delays: Immutable list of retry backoff delays in\n seconds. Message is sent to dlx when this list is exhausted.\n e.g ``(1, 5, 10, 60, 5 * 60)``\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert len(retry_delays) > 0\n super(LimitedRetriesPolicy, self).__init__(consumer,\n retry_queue_suffix, **kwargs)\n self.retry_delays = retry_delays\n\n def retry(self, envelope):\n \"\"\"Send message to retry queue to retry handling it later.\n\n Death count is calculated by examining 'x-death' header. Based on the death\n count, the message is sent to a retry queue where it waits there till it\n expires and gets sent back to the original queue for handling retry.\n\n The death count is used as an index for `retry_delays` list. Where each\n item in the list represents a retry delay in seconds.\n\n The message will be rejected if the death count exceeded the length of\n `retry_delays` list.\n\n :param Envelope envelope: Message envelope\n \"\"\"\n death_count = self.get_death_count(envelope)\n if death_count < len(self.retry_delays):\n delay = self.retry_delays[death_count]\n retry_queue_name = self.declare_retry_queue(delay)\n if envelope.get_header('x-original-delivery-info') is None:\n self.set_original_delivery_info_header(envelope)\n self.consumer.channel.basic_publish(exchange='', routing_key=\n retry_queue_name, properties=envelope.properties, body=\n envelope.payload)\n self.consumer.channel.basic_ack(envelope.delivery_tag)\n logger.warning(\n 'Retry handling message [{}] after {}s; death count: {}'.\n format(envelope.message_id, delay, death_count + 1))\n else:\n logger.warning('Message [{}] exceeded retry limit; death count: {}'\n .format(envelope.message_id, death_count + 1))\n self.consumer.channel.basic_reject(envelope.delivery_tag,\n requeue=False)\n logger.error('Message [{}] is rejected'.format(envelope.message_id)\n )\n\n\nclass FixedDelayUnlimitedRetriesPolicy(UnlimitedRetriesPolicy):\n \"\"\"Fixed delay unlimited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n unlimited retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n super(FixedDelayUnlimitedRetriesPolicy, self).__init__(consumer=\n consumer, initial_delay=delay, max_delay=delay,\n delay_incremented_by=0, retry_queue_suffix=retry_queue_suffix,\n **kwargs)\n\n\nclass FixedDelayLimitedRetriesPolicy(LimitedRetriesPolicy):\n \"\"\"Fixed delay limited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n limited number of retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds.\n\n :attr:`retries_limit`: retries limit count.\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retries_limit, retry_queue_suffix=\n 'retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param int retries_limit: retries limit count\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert retries_limit > 0\n retry_delays = tuple([delay] * retries_limit)\n super(FixedDelayLimitedRetriesPolicy, self).__init__(consumer=\n consumer, retry_delays=retry_delays, retry_queue_suffix=\n retry_queue_suffix, **kwargs)\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\nclass RetryPolicy(object):\n <docstring token>\n\n def __init__(self, **kwargs):\n super(RetryPolicy, self).__init__()\n\n def retry(self, envelope):\n \"\"\"This method is implemented by the subclass.\"\"\"\n raise NotImplementedError()\n\n\nclass BaseRetryPolicy(RetryPolicy):\n \"\"\"Base retry policy class for :class:`.UnlimitedRetriesPolicy` and\n :class:`.LimitedRetriesPolicy`.\n\n It has implementation for geting mesage death count and retry queue creation.\n \"\"\"\n\n def __init__(self, consumer, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param str retry_queue_suffix: Suffix used when creating retry queues. Retry\n queue names are constructed in this form \"queue_name.<suffix>.<delay>\".\n Optional, default to ``retry``\n \"\"\"\n super(BaseRetryPolicy, self).__init__(**kwargs)\n retry_queue_suffix = retry_queue_suffix.strip()\n self.consumer = consumer\n assert len(retry_queue_suffix) > 0\n self.retry_queue_suffix = retry_queue_suffix\n self.min_retry_queue_ttl = 20 * 1000\n\n def set_original_delivery_info_header(self, envelope):\n \"\"\"Save original message delivery infomation in a header.\"\"\"\n if not envelope.get_header('x-original-delivery-info'):\n original_delivery_info = {'consumer_tag': envelope.\n delivery_info.consumer_tag, 'delivery_tag': envelope.\n delivery_info.delivery_tag, 'redelivered': envelope.\n delivery_info.redelivered, 'exchange': envelope.\n delivery_info.exchange, 'routing_key': envelope.\n delivery_info.routing_key}\n envelope.set_header('x-original-delivery-info',\n original_delivery_info)\n\n def get_death_count(self, envelope):\n \"\"\"Return the death count of a message by examining \"x-death\" header.\n\n :param Envelope envelope: Message envelope\n\n :return int: death count\n \"\"\"\n death_header = envelope.get_header('x-death')\n if death_header is None:\n return 0\n count = 0\n for death in death_header:\n if not death['queue'].startswith(self.consumer.queue_name):\n continue\n count += death.get('count', 1)\n return count\n\n def declare_retry_queue(self, delay):\n \"\"\"Declare a retry queue for the provided delay.\n\n Each different delay has a different queue where all retry messages with the\n same delay will be sent to till they expire and get sent back to the original\n queue for handling retry. The queue is declared with a TTL and automatically\n gets deleted. The queue TTL is equal to the provided delay. The retry\n queue's dead letter exchange is (default) direct exchange and the dead letter\n routing key is the original queue name where the messages originally\n came from. The messages will be sent back to the original queue when they\n reach their TTL, for handling retry.\n\n The retry queue is redeclared before every a new message is sent to it.\n Redeclaration resets the queue's TTL, preventing it from being destroyed.\n\n\n :param int delay: Retry delay in seconds\n\n :return: retry queue name\n :rtype: str\n \"\"\"\n delay_in_ms = int(delay * 1000)\n retry_queue_name = '{}.{}.{}'.format(self.consumer.queue_name, self\n .retry_queue_suffix, delay_in_ms)\n queue_ttl = delay_in_ms * 2\n if queue_ttl < self.min_retry_queue_ttl:\n queue_ttl = self.min_retry_queue_ttl\n self.consumer.channel.queue_declare(callback=None, queue=\n retry_queue_name, durable=self.consumer.durable, nowait=True,\n arguments={'x-dead-letter-exchange': '',\n 'x-dead-letter-routing-key': self.consumer.queue_name,\n 'x-message-ttl': delay_in_ms, 'x-expires': queue_ttl})\n logger.warning('Retry queue \"{}\" is created/redeclared'.format(\n retry_queue_name))\n return retry_queue_name\n\n\nclass UnlimitedRetriesPolicy(BaseRetryPolicy):\n \"\"\"Unlimited Retries Policy.\n\n This is an implementation of :class:`.RetryPolicy` which does incremental backoff,\n unlimited retries.\n\n :attr:`initial_delay`: is the initial/first backoff delay in seconds\n\n :attr:`delay_incremented_by`: is number of seconds the backoff should be incremented\n by after each death\n\n :attr:`max_delay`: is the final/maximum backoff delay in seconds that should net be\n exceeded\n \"\"\"\n\n def __init__(self, consumer, initial_delay, max_delay,\n delay_incremented_by, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int initial_delay: `initial_delay` is the initial/first backoff delay\n in seconds.\n\n :param int max_delay: `max_delay` is the final/maximum backoff delay in seconds\n that should net be exceeded. When exceeded, this max is used.\n\n :param int delay_incremented_by: `delay_incremented_by` is number of seconds\n the backoff should be incremented by after each death.\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n super(UnlimitedRetriesPolicy, self).__init__(consumer,\n retry_queue_suffix, **kwargs)\n assert initial_delay >= 0\n assert delay_incremented_by >= 0\n assert max_delay >= initial_delay\n self.initial_delay = initial_delay\n self.max_delay = max_delay\n self.delay_incremented_by = delay_incremented_by\n\n def retry(self, envelope):\n \"\"\"Send message to retry queue to retry handling it later.\n\n Death count is calculated by examining 'x-death' header. Based on the death\n count, the message is sent to a retry queue where it waits there till it\n expires and gets sent back to the original queue for handling retry.\n\n :param Envelope envelope: Message envelope\n \"\"\"\n death_count = self.get_death_count(envelope)\n delay = self.initial_delay + death_count * self.delay_incremented_by\n if delay > self.max_delay:\n delay = self.max_delay\n retry_queue_name = self.declare_retry_queue(delay)\n if envelope.get_header('x-original-delivery-info') is None:\n self.set_original_delivery_info_header(envelope)\n self.consumer.channel.basic_publish(exchange='', routing_key=\n retry_queue_name, properties=envelope.properties, body=envelope\n .payload)\n self.consumer.channel.basic_ack(envelope.delivery_tag)\n logger.warning('Retry handling message [{}] after {}s; death count: {}'\n .format(envelope.message_id, delay, death_count + 1))\n\n\nclass LimitedRetriesPolicy(BaseRetryPolicy):\n \"\"\"Limited Retries Policy.\n\n This is an implementation of :class:`.RetryPolicy` which does incremental backoff,\n limited number of retries.\n\n :attr:`consumer`: message consumer instance\n\n :attr:`retry_delays`: immutable list of retry backoff delays in seconds. Message\n is sent to dlx when this list is exhausted. e.g ``(1, 5, 10, 60, 5 * 60)``\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, retry_delays, retry_queue_suffix='retry',\n **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param Iterable[int] retry_delays: Immutable list of retry backoff delays in\n seconds. Message is sent to dlx when this list is exhausted.\n e.g ``(1, 5, 10, 60, 5 * 60)``\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert len(retry_delays) > 0\n super(LimitedRetriesPolicy, self).__init__(consumer,\n retry_queue_suffix, **kwargs)\n self.retry_delays = retry_delays\n\n def retry(self, envelope):\n \"\"\"Send message to retry queue to retry handling it later.\n\n Death count is calculated by examining 'x-death' header. Based on the death\n count, the message is sent to a retry queue where it waits there till it\n expires and gets sent back to the original queue for handling retry.\n\n The death count is used as an index for `retry_delays` list. Where each\n item in the list represents a retry delay in seconds.\n\n The message will be rejected if the death count exceeded the length of\n `retry_delays` list.\n\n :param Envelope envelope: Message envelope\n \"\"\"\n death_count = self.get_death_count(envelope)\n if death_count < len(self.retry_delays):\n delay = self.retry_delays[death_count]\n retry_queue_name = self.declare_retry_queue(delay)\n if envelope.get_header('x-original-delivery-info') is None:\n self.set_original_delivery_info_header(envelope)\n self.consumer.channel.basic_publish(exchange='', routing_key=\n retry_queue_name, properties=envelope.properties, body=\n envelope.payload)\n self.consumer.channel.basic_ack(envelope.delivery_tag)\n logger.warning(\n 'Retry handling message [{}] after {}s; death count: {}'.\n format(envelope.message_id, delay, death_count + 1))\n else:\n logger.warning('Message [{}] exceeded retry limit; death count: {}'\n .format(envelope.message_id, death_count + 1))\n self.consumer.channel.basic_reject(envelope.delivery_tag,\n requeue=False)\n logger.error('Message [{}] is rejected'.format(envelope.message_id)\n )\n\n\nclass FixedDelayUnlimitedRetriesPolicy(UnlimitedRetriesPolicy):\n \"\"\"Fixed delay unlimited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n unlimited retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n super(FixedDelayUnlimitedRetriesPolicy, self).__init__(consumer=\n consumer, initial_delay=delay, max_delay=delay,\n delay_incremented_by=0, retry_queue_suffix=retry_queue_suffix,\n **kwargs)\n\n\nclass FixedDelayLimitedRetriesPolicy(LimitedRetriesPolicy):\n \"\"\"Fixed delay limited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n limited number of retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds.\n\n :attr:`retries_limit`: retries limit count.\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retries_limit, retry_queue_suffix=\n 'retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param int retries_limit: retries limit count\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert retries_limit > 0\n retry_delays = tuple([delay] * retries_limit)\n super(FixedDelayLimitedRetriesPolicy, self).__init__(consumer=\n consumer, retry_delays=retry_delays, retry_queue_suffix=\n retry_queue_suffix, **kwargs)\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\nclass RetryPolicy(object):\n <docstring token>\n\n def __init__(self, **kwargs):\n super(RetryPolicy, self).__init__()\n <function token>\n\n\nclass BaseRetryPolicy(RetryPolicy):\n \"\"\"Base retry policy class for :class:`.UnlimitedRetriesPolicy` and\n :class:`.LimitedRetriesPolicy`.\n\n It has implementation for geting mesage death count and retry queue creation.\n \"\"\"\n\n def __init__(self, consumer, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param str retry_queue_suffix: Suffix used when creating retry queues. Retry\n queue names are constructed in this form \"queue_name.<suffix>.<delay>\".\n Optional, default to ``retry``\n \"\"\"\n super(BaseRetryPolicy, self).__init__(**kwargs)\n retry_queue_suffix = retry_queue_suffix.strip()\n self.consumer = consumer\n assert len(retry_queue_suffix) > 0\n self.retry_queue_suffix = retry_queue_suffix\n self.min_retry_queue_ttl = 20 * 1000\n\n def set_original_delivery_info_header(self, envelope):\n \"\"\"Save original message delivery infomation in a header.\"\"\"\n if not envelope.get_header('x-original-delivery-info'):\n original_delivery_info = {'consumer_tag': envelope.\n delivery_info.consumer_tag, 'delivery_tag': envelope.\n delivery_info.delivery_tag, 'redelivered': envelope.\n delivery_info.redelivered, 'exchange': envelope.\n delivery_info.exchange, 'routing_key': envelope.\n delivery_info.routing_key}\n envelope.set_header('x-original-delivery-info',\n original_delivery_info)\n\n def get_death_count(self, envelope):\n \"\"\"Return the death count of a message by examining \"x-death\" header.\n\n :param Envelope envelope: Message envelope\n\n :return int: death count\n \"\"\"\n death_header = envelope.get_header('x-death')\n if death_header is None:\n return 0\n count = 0\n for death in death_header:\n if not death['queue'].startswith(self.consumer.queue_name):\n continue\n count += death.get('count', 1)\n return count\n\n def declare_retry_queue(self, delay):\n \"\"\"Declare a retry queue for the provided delay.\n\n Each different delay has a different queue where all retry messages with the\n same delay will be sent to till they expire and get sent back to the original\n queue for handling retry. The queue is declared with a TTL and automatically\n gets deleted. The queue TTL is equal to the provided delay. The retry\n queue's dead letter exchange is (default) direct exchange and the dead letter\n routing key is the original queue name where the messages originally\n came from. The messages will be sent back to the original queue when they\n reach their TTL, for handling retry.\n\n The retry queue is redeclared before every a new message is sent to it.\n Redeclaration resets the queue's TTL, preventing it from being destroyed.\n\n\n :param int delay: Retry delay in seconds\n\n :return: retry queue name\n :rtype: str\n \"\"\"\n delay_in_ms = int(delay * 1000)\n retry_queue_name = '{}.{}.{}'.format(self.consumer.queue_name, self\n .retry_queue_suffix, delay_in_ms)\n queue_ttl = delay_in_ms * 2\n if queue_ttl < self.min_retry_queue_ttl:\n queue_ttl = self.min_retry_queue_ttl\n self.consumer.channel.queue_declare(callback=None, queue=\n retry_queue_name, durable=self.consumer.durable, nowait=True,\n arguments={'x-dead-letter-exchange': '',\n 'x-dead-letter-routing-key': self.consumer.queue_name,\n 'x-message-ttl': delay_in_ms, 'x-expires': queue_ttl})\n logger.warning('Retry queue \"{}\" is created/redeclared'.format(\n retry_queue_name))\n return retry_queue_name\n\n\nclass UnlimitedRetriesPolicy(BaseRetryPolicy):\n \"\"\"Unlimited Retries Policy.\n\n This is an implementation of :class:`.RetryPolicy` which does incremental backoff,\n unlimited retries.\n\n :attr:`initial_delay`: is the initial/first backoff delay in seconds\n\n :attr:`delay_incremented_by`: is number of seconds the backoff should be incremented\n by after each death\n\n :attr:`max_delay`: is the final/maximum backoff delay in seconds that should net be\n exceeded\n \"\"\"\n\n def __init__(self, consumer, initial_delay, max_delay,\n delay_incremented_by, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int initial_delay: `initial_delay` is the initial/first backoff delay\n in seconds.\n\n :param int max_delay: `max_delay` is the final/maximum backoff delay in seconds\n that should net be exceeded. When exceeded, this max is used.\n\n :param int delay_incremented_by: `delay_incremented_by` is number of seconds\n the backoff should be incremented by after each death.\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n super(UnlimitedRetriesPolicy, self).__init__(consumer,\n retry_queue_suffix, **kwargs)\n assert initial_delay >= 0\n assert delay_incremented_by >= 0\n assert max_delay >= initial_delay\n self.initial_delay = initial_delay\n self.max_delay = max_delay\n self.delay_incremented_by = delay_incremented_by\n\n def retry(self, envelope):\n \"\"\"Send message to retry queue to retry handling it later.\n\n Death count is calculated by examining 'x-death' header. Based on the death\n count, the message is sent to a retry queue where it waits there till it\n expires and gets sent back to the original queue for handling retry.\n\n :param Envelope envelope: Message envelope\n \"\"\"\n death_count = self.get_death_count(envelope)\n delay = self.initial_delay + death_count * self.delay_incremented_by\n if delay > self.max_delay:\n delay = self.max_delay\n retry_queue_name = self.declare_retry_queue(delay)\n if envelope.get_header('x-original-delivery-info') is None:\n self.set_original_delivery_info_header(envelope)\n self.consumer.channel.basic_publish(exchange='', routing_key=\n retry_queue_name, properties=envelope.properties, body=envelope\n .payload)\n self.consumer.channel.basic_ack(envelope.delivery_tag)\n logger.warning('Retry handling message [{}] after {}s; death count: {}'\n .format(envelope.message_id, delay, death_count + 1))\n\n\nclass LimitedRetriesPolicy(BaseRetryPolicy):\n \"\"\"Limited Retries Policy.\n\n This is an implementation of :class:`.RetryPolicy` which does incremental backoff,\n limited number of retries.\n\n :attr:`consumer`: message consumer instance\n\n :attr:`retry_delays`: immutable list of retry backoff delays in seconds. Message\n is sent to dlx when this list is exhausted. e.g ``(1, 5, 10, 60, 5 * 60)``\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, retry_delays, retry_queue_suffix='retry',\n **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param Iterable[int] retry_delays: Immutable list of retry backoff delays in\n seconds. Message is sent to dlx when this list is exhausted.\n e.g ``(1, 5, 10, 60, 5 * 60)``\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert len(retry_delays) > 0\n super(LimitedRetriesPolicy, self).__init__(consumer,\n retry_queue_suffix, **kwargs)\n self.retry_delays = retry_delays\n\n def retry(self, envelope):\n \"\"\"Send message to retry queue to retry handling it later.\n\n Death count is calculated by examining 'x-death' header. Based on the death\n count, the message is sent to a retry queue where it waits there till it\n expires and gets sent back to the original queue for handling retry.\n\n The death count is used as an index for `retry_delays` list. Where each\n item in the list represents a retry delay in seconds.\n\n The message will be rejected if the death count exceeded the length of\n `retry_delays` list.\n\n :param Envelope envelope: Message envelope\n \"\"\"\n death_count = self.get_death_count(envelope)\n if death_count < len(self.retry_delays):\n delay = self.retry_delays[death_count]\n retry_queue_name = self.declare_retry_queue(delay)\n if envelope.get_header('x-original-delivery-info') is None:\n self.set_original_delivery_info_header(envelope)\n self.consumer.channel.basic_publish(exchange='', routing_key=\n retry_queue_name, properties=envelope.properties, body=\n envelope.payload)\n self.consumer.channel.basic_ack(envelope.delivery_tag)\n logger.warning(\n 'Retry handling message [{}] after {}s; death count: {}'.\n format(envelope.message_id, delay, death_count + 1))\n else:\n logger.warning('Message [{}] exceeded retry limit; death count: {}'\n .format(envelope.message_id, death_count + 1))\n self.consumer.channel.basic_reject(envelope.delivery_tag,\n requeue=False)\n logger.error('Message [{}] is rejected'.format(envelope.message_id)\n )\n\n\nclass FixedDelayUnlimitedRetriesPolicy(UnlimitedRetriesPolicy):\n \"\"\"Fixed delay unlimited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n unlimited retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n super(FixedDelayUnlimitedRetriesPolicy, self).__init__(consumer=\n consumer, initial_delay=delay, max_delay=delay,\n delay_incremented_by=0, retry_queue_suffix=retry_queue_suffix,\n **kwargs)\n\n\nclass FixedDelayLimitedRetriesPolicy(LimitedRetriesPolicy):\n \"\"\"Fixed delay limited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n limited number of retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds.\n\n :attr:`retries_limit`: retries limit count.\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retries_limit, retry_queue_suffix=\n 'retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param int retries_limit: retries limit count\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert retries_limit > 0\n retry_delays = tuple([delay] * retries_limit)\n super(FixedDelayLimitedRetriesPolicy, self).__init__(consumer=\n consumer, retry_delays=retry_delays, retry_queue_suffix=\n retry_queue_suffix, **kwargs)\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\nclass RetryPolicy(object):\n <docstring token>\n <function token>\n <function token>\n\n\nclass BaseRetryPolicy(RetryPolicy):\n \"\"\"Base retry policy class for :class:`.UnlimitedRetriesPolicy` and\n :class:`.LimitedRetriesPolicy`.\n\n It has implementation for geting mesage death count and retry queue creation.\n \"\"\"\n\n def __init__(self, consumer, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param str retry_queue_suffix: Suffix used when creating retry queues. Retry\n queue names are constructed in this form \"queue_name.<suffix>.<delay>\".\n Optional, default to ``retry``\n \"\"\"\n super(BaseRetryPolicy, self).__init__(**kwargs)\n retry_queue_suffix = retry_queue_suffix.strip()\n self.consumer = consumer\n assert len(retry_queue_suffix) > 0\n self.retry_queue_suffix = retry_queue_suffix\n self.min_retry_queue_ttl = 20 * 1000\n\n def set_original_delivery_info_header(self, envelope):\n \"\"\"Save original message delivery infomation in a header.\"\"\"\n if not envelope.get_header('x-original-delivery-info'):\n original_delivery_info = {'consumer_tag': envelope.\n delivery_info.consumer_tag, 'delivery_tag': envelope.\n delivery_info.delivery_tag, 'redelivered': envelope.\n delivery_info.redelivered, 'exchange': envelope.\n delivery_info.exchange, 'routing_key': envelope.\n delivery_info.routing_key}\n envelope.set_header('x-original-delivery-info',\n original_delivery_info)\n\n def get_death_count(self, envelope):\n \"\"\"Return the death count of a message by examining \"x-death\" header.\n\n :param Envelope envelope: Message envelope\n\n :return int: death count\n \"\"\"\n death_header = envelope.get_header('x-death')\n if death_header is None:\n return 0\n count = 0\n for death in death_header:\n if not death['queue'].startswith(self.consumer.queue_name):\n continue\n count += death.get('count', 1)\n return count\n\n def declare_retry_queue(self, delay):\n \"\"\"Declare a retry queue for the provided delay.\n\n Each different delay has a different queue where all retry messages with the\n same delay will be sent to till they expire and get sent back to the original\n queue for handling retry. The queue is declared with a TTL and automatically\n gets deleted. The queue TTL is equal to the provided delay. The retry\n queue's dead letter exchange is (default) direct exchange and the dead letter\n routing key is the original queue name where the messages originally\n came from. The messages will be sent back to the original queue when they\n reach their TTL, for handling retry.\n\n The retry queue is redeclared before every a new message is sent to it.\n Redeclaration resets the queue's TTL, preventing it from being destroyed.\n\n\n :param int delay: Retry delay in seconds\n\n :return: retry queue name\n :rtype: str\n \"\"\"\n delay_in_ms = int(delay * 1000)\n retry_queue_name = '{}.{}.{}'.format(self.consumer.queue_name, self\n .retry_queue_suffix, delay_in_ms)\n queue_ttl = delay_in_ms * 2\n if queue_ttl < self.min_retry_queue_ttl:\n queue_ttl = self.min_retry_queue_ttl\n self.consumer.channel.queue_declare(callback=None, queue=\n retry_queue_name, durable=self.consumer.durable, nowait=True,\n arguments={'x-dead-letter-exchange': '',\n 'x-dead-letter-routing-key': self.consumer.queue_name,\n 'x-message-ttl': delay_in_ms, 'x-expires': queue_ttl})\n logger.warning('Retry queue \"{}\" is created/redeclared'.format(\n retry_queue_name))\n return retry_queue_name\n\n\nclass UnlimitedRetriesPolicy(BaseRetryPolicy):\n \"\"\"Unlimited Retries Policy.\n\n This is an implementation of :class:`.RetryPolicy` which does incremental backoff,\n unlimited retries.\n\n :attr:`initial_delay`: is the initial/first backoff delay in seconds\n\n :attr:`delay_incremented_by`: is number of seconds the backoff should be incremented\n by after each death\n\n :attr:`max_delay`: is the final/maximum backoff delay in seconds that should net be\n exceeded\n \"\"\"\n\n def __init__(self, consumer, initial_delay, max_delay,\n delay_incremented_by, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int initial_delay: `initial_delay` is the initial/first backoff delay\n in seconds.\n\n :param int max_delay: `max_delay` is the final/maximum backoff delay in seconds\n that should net be exceeded. When exceeded, this max is used.\n\n :param int delay_incremented_by: `delay_incremented_by` is number of seconds\n the backoff should be incremented by after each death.\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n super(UnlimitedRetriesPolicy, self).__init__(consumer,\n retry_queue_suffix, **kwargs)\n assert initial_delay >= 0\n assert delay_incremented_by >= 0\n assert max_delay >= initial_delay\n self.initial_delay = initial_delay\n self.max_delay = max_delay\n self.delay_incremented_by = delay_incremented_by\n\n def retry(self, envelope):\n \"\"\"Send message to retry queue to retry handling it later.\n\n Death count is calculated by examining 'x-death' header. Based on the death\n count, the message is sent to a retry queue where it waits there till it\n expires and gets sent back to the original queue for handling retry.\n\n :param Envelope envelope: Message envelope\n \"\"\"\n death_count = self.get_death_count(envelope)\n delay = self.initial_delay + death_count * self.delay_incremented_by\n if delay > self.max_delay:\n delay = self.max_delay\n retry_queue_name = self.declare_retry_queue(delay)\n if envelope.get_header('x-original-delivery-info') is None:\n self.set_original_delivery_info_header(envelope)\n self.consumer.channel.basic_publish(exchange='', routing_key=\n retry_queue_name, properties=envelope.properties, body=envelope\n .payload)\n self.consumer.channel.basic_ack(envelope.delivery_tag)\n logger.warning('Retry handling message [{}] after {}s; death count: {}'\n .format(envelope.message_id, delay, death_count + 1))\n\n\nclass LimitedRetriesPolicy(BaseRetryPolicy):\n \"\"\"Limited Retries Policy.\n\n This is an implementation of :class:`.RetryPolicy` which does incremental backoff,\n limited number of retries.\n\n :attr:`consumer`: message consumer instance\n\n :attr:`retry_delays`: immutable list of retry backoff delays in seconds. Message\n is sent to dlx when this list is exhausted. e.g ``(1, 5, 10, 60, 5 * 60)``\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, retry_delays, retry_queue_suffix='retry',\n **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param Iterable[int] retry_delays: Immutable list of retry backoff delays in\n seconds. Message is sent to dlx when this list is exhausted.\n e.g ``(1, 5, 10, 60, 5 * 60)``\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert len(retry_delays) > 0\n super(LimitedRetriesPolicy, self).__init__(consumer,\n retry_queue_suffix, **kwargs)\n self.retry_delays = retry_delays\n\n def retry(self, envelope):\n \"\"\"Send message to retry queue to retry handling it later.\n\n Death count is calculated by examining 'x-death' header. Based on the death\n count, the message is sent to a retry queue where it waits there till it\n expires and gets sent back to the original queue for handling retry.\n\n The death count is used as an index for `retry_delays` list. Where each\n item in the list represents a retry delay in seconds.\n\n The message will be rejected if the death count exceeded the length of\n `retry_delays` list.\n\n :param Envelope envelope: Message envelope\n \"\"\"\n death_count = self.get_death_count(envelope)\n if death_count < len(self.retry_delays):\n delay = self.retry_delays[death_count]\n retry_queue_name = self.declare_retry_queue(delay)\n if envelope.get_header('x-original-delivery-info') is None:\n self.set_original_delivery_info_header(envelope)\n self.consumer.channel.basic_publish(exchange='', routing_key=\n retry_queue_name, properties=envelope.properties, body=\n envelope.payload)\n self.consumer.channel.basic_ack(envelope.delivery_tag)\n logger.warning(\n 'Retry handling message [{}] after {}s; death count: {}'.\n format(envelope.message_id, delay, death_count + 1))\n else:\n logger.warning('Message [{}] exceeded retry limit; death count: {}'\n .format(envelope.message_id, death_count + 1))\n self.consumer.channel.basic_reject(envelope.delivery_tag,\n requeue=False)\n logger.error('Message [{}] is rejected'.format(envelope.message_id)\n )\n\n\nclass FixedDelayUnlimitedRetriesPolicy(UnlimitedRetriesPolicy):\n \"\"\"Fixed delay unlimited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n unlimited retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n super(FixedDelayUnlimitedRetriesPolicy, self).__init__(consumer=\n consumer, initial_delay=delay, max_delay=delay,\n delay_incremented_by=0, retry_queue_suffix=retry_queue_suffix,\n **kwargs)\n\n\nclass FixedDelayLimitedRetriesPolicy(LimitedRetriesPolicy):\n \"\"\"Fixed delay limited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n limited number of retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds.\n\n :attr:`retries_limit`: retries limit count.\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retries_limit, retry_queue_suffix=\n 'retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param int retries_limit: retries limit count\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert retries_limit > 0\n retry_delays = tuple([delay] * retries_limit)\n super(FixedDelayLimitedRetriesPolicy, self).__init__(consumer=\n consumer, retry_delays=retry_delays, retry_queue_suffix=\n retry_queue_suffix, **kwargs)\n",
"<docstring token>\n<import token>\n<assignment token>\n<class token>\n\n\nclass BaseRetryPolicy(RetryPolicy):\n \"\"\"Base retry policy class for :class:`.UnlimitedRetriesPolicy` and\n :class:`.LimitedRetriesPolicy`.\n\n It has implementation for geting mesage death count and retry queue creation.\n \"\"\"\n\n def __init__(self, consumer, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param str retry_queue_suffix: Suffix used when creating retry queues. Retry\n queue names are constructed in this form \"queue_name.<suffix>.<delay>\".\n Optional, default to ``retry``\n \"\"\"\n super(BaseRetryPolicy, self).__init__(**kwargs)\n retry_queue_suffix = retry_queue_suffix.strip()\n self.consumer = consumer\n assert len(retry_queue_suffix) > 0\n self.retry_queue_suffix = retry_queue_suffix\n self.min_retry_queue_ttl = 20 * 1000\n\n def set_original_delivery_info_header(self, envelope):\n \"\"\"Save original message delivery infomation in a header.\"\"\"\n if not envelope.get_header('x-original-delivery-info'):\n original_delivery_info = {'consumer_tag': envelope.\n delivery_info.consumer_tag, 'delivery_tag': envelope.\n delivery_info.delivery_tag, 'redelivered': envelope.\n delivery_info.redelivered, 'exchange': envelope.\n delivery_info.exchange, 'routing_key': envelope.\n delivery_info.routing_key}\n envelope.set_header('x-original-delivery-info',\n original_delivery_info)\n\n def get_death_count(self, envelope):\n \"\"\"Return the death count of a message by examining \"x-death\" header.\n\n :param Envelope envelope: Message envelope\n\n :return int: death count\n \"\"\"\n death_header = envelope.get_header('x-death')\n if death_header is None:\n return 0\n count = 0\n for death in death_header:\n if not death['queue'].startswith(self.consumer.queue_name):\n continue\n count += death.get('count', 1)\n return count\n\n def declare_retry_queue(self, delay):\n \"\"\"Declare a retry queue for the provided delay.\n\n Each different delay has a different queue where all retry messages with the\n same delay will be sent to till they expire and get sent back to the original\n queue for handling retry. The queue is declared with a TTL and automatically\n gets deleted. The queue TTL is equal to the provided delay. The retry\n queue's dead letter exchange is (default) direct exchange and the dead letter\n routing key is the original queue name where the messages originally\n came from. The messages will be sent back to the original queue when they\n reach their TTL, for handling retry.\n\n The retry queue is redeclared before every a new message is sent to it.\n Redeclaration resets the queue's TTL, preventing it from being destroyed.\n\n\n :param int delay: Retry delay in seconds\n\n :return: retry queue name\n :rtype: str\n \"\"\"\n delay_in_ms = int(delay * 1000)\n retry_queue_name = '{}.{}.{}'.format(self.consumer.queue_name, self\n .retry_queue_suffix, delay_in_ms)\n queue_ttl = delay_in_ms * 2\n if queue_ttl < self.min_retry_queue_ttl:\n queue_ttl = self.min_retry_queue_ttl\n self.consumer.channel.queue_declare(callback=None, queue=\n retry_queue_name, durable=self.consumer.durable, nowait=True,\n arguments={'x-dead-letter-exchange': '',\n 'x-dead-letter-routing-key': self.consumer.queue_name,\n 'x-message-ttl': delay_in_ms, 'x-expires': queue_ttl})\n logger.warning('Retry queue \"{}\" is created/redeclared'.format(\n retry_queue_name))\n return retry_queue_name\n\n\nclass UnlimitedRetriesPolicy(BaseRetryPolicy):\n \"\"\"Unlimited Retries Policy.\n\n This is an implementation of :class:`.RetryPolicy` which does incremental backoff,\n unlimited retries.\n\n :attr:`initial_delay`: is the initial/first backoff delay in seconds\n\n :attr:`delay_incremented_by`: is number of seconds the backoff should be incremented\n by after each death\n\n :attr:`max_delay`: is the final/maximum backoff delay in seconds that should net be\n exceeded\n \"\"\"\n\n def __init__(self, consumer, initial_delay, max_delay,\n delay_incremented_by, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int initial_delay: `initial_delay` is the initial/first backoff delay\n in seconds.\n\n :param int max_delay: `max_delay` is the final/maximum backoff delay in seconds\n that should net be exceeded. When exceeded, this max is used.\n\n :param int delay_incremented_by: `delay_incremented_by` is number of seconds\n the backoff should be incremented by after each death.\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n super(UnlimitedRetriesPolicy, self).__init__(consumer,\n retry_queue_suffix, **kwargs)\n assert initial_delay >= 0\n assert delay_incremented_by >= 0\n assert max_delay >= initial_delay\n self.initial_delay = initial_delay\n self.max_delay = max_delay\n self.delay_incremented_by = delay_incremented_by\n\n def retry(self, envelope):\n \"\"\"Send message to retry queue to retry handling it later.\n\n Death count is calculated by examining 'x-death' header. Based on the death\n count, the message is sent to a retry queue where it waits there till it\n expires and gets sent back to the original queue for handling retry.\n\n :param Envelope envelope: Message envelope\n \"\"\"\n death_count = self.get_death_count(envelope)\n delay = self.initial_delay + death_count * self.delay_incremented_by\n if delay > self.max_delay:\n delay = self.max_delay\n retry_queue_name = self.declare_retry_queue(delay)\n if envelope.get_header('x-original-delivery-info') is None:\n self.set_original_delivery_info_header(envelope)\n self.consumer.channel.basic_publish(exchange='', routing_key=\n retry_queue_name, properties=envelope.properties, body=envelope\n .payload)\n self.consumer.channel.basic_ack(envelope.delivery_tag)\n logger.warning('Retry handling message [{}] after {}s; death count: {}'\n .format(envelope.message_id, delay, death_count + 1))\n\n\nclass LimitedRetriesPolicy(BaseRetryPolicy):\n \"\"\"Limited Retries Policy.\n\n This is an implementation of :class:`.RetryPolicy` which does incremental backoff,\n limited number of retries.\n\n :attr:`consumer`: message consumer instance\n\n :attr:`retry_delays`: immutable list of retry backoff delays in seconds. Message\n is sent to dlx when this list is exhausted. e.g ``(1, 5, 10, 60, 5 * 60)``\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, retry_delays, retry_queue_suffix='retry',\n **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param Iterable[int] retry_delays: Immutable list of retry backoff delays in\n seconds. Message is sent to dlx when this list is exhausted.\n e.g ``(1, 5, 10, 60, 5 * 60)``\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert len(retry_delays) > 0\n super(LimitedRetriesPolicy, self).__init__(consumer,\n retry_queue_suffix, **kwargs)\n self.retry_delays = retry_delays\n\n def retry(self, envelope):\n \"\"\"Send message to retry queue to retry handling it later.\n\n Death count is calculated by examining 'x-death' header. Based on the death\n count, the message is sent to a retry queue where it waits there till it\n expires and gets sent back to the original queue for handling retry.\n\n The death count is used as an index for `retry_delays` list. Where each\n item in the list represents a retry delay in seconds.\n\n The message will be rejected if the death count exceeded the length of\n `retry_delays` list.\n\n :param Envelope envelope: Message envelope\n \"\"\"\n death_count = self.get_death_count(envelope)\n if death_count < len(self.retry_delays):\n delay = self.retry_delays[death_count]\n retry_queue_name = self.declare_retry_queue(delay)\n if envelope.get_header('x-original-delivery-info') is None:\n self.set_original_delivery_info_header(envelope)\n self.consumer.channel.basic_publish(exchange='', routing_key=\n retry_queue_name, properties=envelope.properties, body=\n envelope.payload)\n self.consumer.channel.basic_ack(envelope.delivery_tag)\n logger.warning(\n 'Retry handling message [{}] after {}s; death count: {}'.\n format(envelope.message_id, delay, death_count + 1))\n else:\n logger.warning('Message [{}] exceeded retry limit; death count: {}'\n .format(envelope.message_id, death_count + 1))\n self.consumer.channel.basic_reject(envelope.delivery_tag,\n requeue=False)\n logger.error('Message [{}] is rejected'.format(envelope.message_id)\n )\n\n\nclass FixedDelayUnlimitedRetriesPolicy(UnlimitedRetriesPolicy):\n \"\"\"Fixed delay unlimited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n unlimited retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n super(FixedDelayUnlimitedRetriesPolicy, self).__init__(consumer=\n consumer, initial_delay=delay, max_delay=delay,\n delay_incremented_by=0, retry_queue_suffix=retry_queue_suffix,\n **kwargs)\n\n\nclass FixedDelayLimitedRetriesPolicy(LimitedRetriesPolicy):\n \"\"\"Fixed delay limited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n limited number of retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds.\n\n :attr:`retries_limit`: retries limit count.\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retries_limit, retry_queue_suffix=\n 'retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param int retries_limit: retries limit count\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert retries_limit > 0\n retry_delays = tuple([delay] * retries_limit)\n super(FixedDelayLimitedRetriesPolicy, self).__init__(consumer=\n consumer, retry_delays=retry_delays, retry_queue_suffix=\n retry_queue_suffix, **kwargs)\n",
"<docstring token>\n<import token>\n<assignment token>\n<class token>\n\n\nclass BaseRetryPolicy(RetryPolicy):\n <docstring token>\n\n def __init__(self, consumer, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param str retry_queue_suffix: Suffix used when creating retry queues. Retry\n queue names are constructed in this form \"queue_name.<suffix>.<delay>\".\n Optional, default to ``retry``\n \"\"\"\n super(BaseRetryPolicy, self).__init__(**kwargs)\n retry_queue_suffix = retry_queue_suffix.strip()\n self.consumer = consumer\n assert len(retry_queue_suffix) > 0\n self.retry_queue_suffix = retry_queue_suffix\n self.min_retry_queue_ttl = 20 * 1000\n\n def set_original_delivery_info_header(self, envelope):\n \"\"\"Save original message delivery infomation in a header.\"\"\"\n if not envelope.get_header('x-original-delivery-info'):\n original_delivery_info = {'consumer_tag': envelope.\n delivery_info.consumer_tag, 'delivery_tag': envelope.\n delivery_info.delivery_tag, 'redelivered': envelope.\n delivery_info.redelivered, 'exchange': envelope.\n delivery_info.exchange, 'routing_key': envelope.\n delivery_info.routing_key}\n envelope.set_header('x-original-delivery-info',\n original_delivery_info)\n\n def get_death_count(self, envelope):\n \"\"\"Return the death count of a message by examining \"x-death\" header.\n\n :param Envelope envelope: Message envelope\n\n :return int: death count\n \"\"\"\n death_header = envelope.get_header('x-death')\n if death_header is None:\n return 0\n count = 0\n for death in death_header:\n if not death['queue'].startswith(self.consumer.queue_name):\n continue\n count += death.get('count', 1)\n return count\n\n def declare_retry_queue(self, delay):\n \"\"\"Declare a retry queue for the provided delay.\n\n Each different delay has a different queue where all retry messages with the\n same delay will be sent to till they expire and get sent back to the original\n queue for handling retry. The queue is declared with a TTL and automatically\n gets deleted. The queue TTL is equal to the provided delay. The retry\n queue's dead letter exchange is (default) direct exchange and the dead letter\n routing key is the original queue name where the messages originally\n came from. The messages will be sent back to the original queue when they\n reach their TTL, for handling retry.\n\n The retry queue is redeclared before every a new message is sent to it.\n Redeclaration resets the queue's TTL, preventing it from being destroyed.\n\n\n :param int delay: Retry delay in seconds\n\n :return: retry queue name\n :rtype: str\n \"\"\"\n delay_in_ms = int(delay * 1000)\n retry_queue_name = '{}.{}.{}'.format(self.consumer.queue_name, self\n .retry_queue_suffix, delay_in_ms)\n queue_ttl = delay_in_ms * 2\n if queue_ttl < self.min_retry_queue_ttl:\n queue_ttl = self.min_retry_queue_ttl\n self.consumer.channel.queue_declare(callback=None, queue=\n retry_queue_name, durable=self.consumer.durable, nowait=True,\n arguments={'x-dead-letter-exchange': '',\n 'x-dead-letter-routing-key': self.consumer.queue_name,\n 'x-message-ttl': delay_in_ms, 'x-expires': queue_ttl})\n logger.warning('Retry queue \"{}\" is created/redeclared'.format(\n retry_queue_name))\n return retry_queue_name\n\n\nclass UnlimitedRetriesPolicy(BaseRetryPolicy):\n \"\"\"Unlimited Retries Policy.\n\n This is an implementation of :class:`.RetryPolicy` which does incremental backoff,\n unlimited retries.\n\n :attr:`initial_delay`: is the initial/first backoff delay in seconds\n\n :attr:`delay_incremented_by`: is number of seconds the backoff should be incremented\n by after each death\n\n :attr:`max_delay`: is the final/maximum backoff delay in seconds that should net be\n exceeded\n \"\"\"\n\n def __init__(self, consumer, initial_delay, max_delay,\n delay_incremented_by, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int initial_delay: `initial_delay` is the initial/first backoff delay\n in seconds.\n\n :param int max_delay: `max_delay` is the final/maximum backoff delay in seconds\n that should net be exceeded. When exceeded, this max is used.\n\n :param int delay_incremented_by: `delay_incremented_by` is number of seconds\n the backoff should be incremented by after each death.\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n super(UnlimitedRetriesPolicy, self).__init__(consumer,\n retry_queue_suffix, **kwargs)\n assert initial_delay >= 0\n assert delay_incremented_by >= 0\n assert max_delay >= initial_delay\n self.initial_delay = initial_delay\n self.max_delay = max_delay\n self.delay_incremented_by = delay_incremented_by\n\n def retry(self, envelope):\n \"\"\"Send message to retry queue to retry handling it later.\n\n Death count is calculated by examining 'x-death' header. Based on the death\n count, the message is sent to a retry queue where it waits there till it\n expires and gets sent back to the original queue for handling retry.\n\n :param Envelope envelope: Message envelope\n \"\"\"\n death_count = self.get_death_count(envelope)\n delay = self.initial_delay + death_count * self.delay_incremented_by\n if delay > self.max_delay:\n delay = self.max_delay\n retry_queue_name = self.declare_retry_queue(delay)\n if envelope.get_header('x-original-delivery-info') is None:\n self.set_original_delivery_info_header(envelope)\n self.consumer.channel.basic_publish(exchange='', routing_key=\n retry_queue_name, properties=envelope.properties, body=envelope\n .payload)\n self.consumer.channel.basic_ack(envelope.delivery_tag)\n logger.warning('Retry handling message [{}] after {}s; death count: {}'\n .format(envelope.message_id, delay, death_count + 1))\n\n\nclass LimitedRetriesPolicy(BaseRetryPolicy):\n \"\"\"Limited Retries Policy.\n\n This is an implementation of :class:`.RetryPolicy` which does incremental backoff,\n limited number of retries.\n\n :attr:`consumer`: message consumer instance\n\n :attr:`retry_delays`: immutable list of retry backoff delays in seconds. Message\n is sent to dlx when this list is exhausted. e.g ``(1, 5, 10, 60, 5 * 60)``\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, retry_delays, retry_queue_suffix='retry',\n **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param Iterable[int] retry_delays: Immutable list of retry backoff delays in\n seconds. Message is sent to dlx when this list is exhausted.\n e.g ``(1, 5, 10, 60, 5 * 60)``\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert len(retry_delays) > 0\n super(LimitedRetriesPolicy, self).__init__(consumer,\n retry_queue_suffix, **kwargs)\n self.retry_delays = retry_delays\n\n def retry(self, envelope):\n \"\"\"Send message to retry queue to retry handling it later.\n\n Death count is calculated by examining 'x-death' header. Based on the death\n count, the message is sent to a retry queue where it waits there till it\n expires and gets sent back to the original queue for handling retry.\n\n The death count is used as an index for `retry_delays` list. Where each\n item in the list represents a retry delay in seconds.\n\n The message will be rejected if the death count exceeded the length of\n `retry_delays` list.\n\n :param Envelope envelope: Message envelope\n \"\"\"\n death_count = self.get_death_count(envelope)\n if death_count < len(self.retry_delays):\n delay = self.retry_delays[death_count]\n retry_queue_name = self.declare_retry_queue(delay)\n if envelope.get_header('x-original-delivery-info') is None:\n self.set_original_delivery_info_header(envelope)\n self.consumer.channel.basic_publish(exchange='', routing_key=\n retry_queue_name, properties=envelope.properties, body=\n envelope.payload)\n self.consumer.channel.basic_ack(envelope.delivery_tag)\n logger.warning(\n 'Retry handling message [{}] after {}s; death count: {}'.\n format(envelope.message_id, delay, death_count + 1))\n else:\n logger.warning('Message [{}] exceeded retry limit; death count: {}'\n .format(envelope.message_id, death_count + 1))\n self.consumer.channel.basic_reject(envelope.delivery_tag,\n requeue=False)\n logger.error('Message [{}] is rejected'.format(envelope.message_id)\n )\n\n\nclass FixedDelayUnlimitedRetriesPolicy(UnlimitedRetriesPolicy):\n \"\"\"Fixed delay unlimited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n unlimited retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n super(FixedDelayUnlimitedRetriesPolicy, self).__init__(consumer=\n consumer, initial_delay=delay, max_delay=delay,\n delay_incremented_by=0, retry_queue_suffix=retry_queue_suffix,\n **kwargs)\n\n\nclass FixedDelayLimitedRetriesPolicy(LimitedRetriesPolicy):\n \"\"\"Fixed delay limited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n limited number of retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds.\n\n :attr:`retries_limit`: retries limit count.\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retries_limit, retry_queue_suffix=\n 'retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param int retries_limit: retries limit count\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert retries_limit > 0\n retry_delays = tuple([delay] * retries_limit)\n super(FixedDelayLimitedRetriesPolicy, self).__init__(consumer=\n consumer, retry_delays=retry_delays, retry_queue_suffix=\n retry_queue_suffix, **kwargs)\n",
"<docstring token>\n<import token>\n<assignment token>\n<class token>\n\n\nclass BaseRetryPolicy(RetryPolicy):\n <docstring token>\n\n def __init__(self, consumer, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param str retry_queue_suffix: Suffix used when creating retry queues. Retry\n queue names are constructed in this form \"queue_name.<suffix>.<delay>\".\n Optional, default to ``retry``\n \"\"\"\n super(BaseRetryPolicy, self).__init__(**kwargs)\n retry_queue_suffix = retry_queue_suffix.strip()\n self.consumer = consumer\n assert len(retry_queue_suffix) > 0\n self.retry_queue_suffix = retry_queue_suffix\n self.min_retry_queue_ttl = 20 * 1000\n\n def set_original_delivery_info_header(self, envelope):\n \"\"\"Save original message delivery infomation in a header.\"\"\"\n if not envelope.get_header('x-original-delivery-info'):\n original_delivery_info = {'consumer_tag': envelope.\n delivery_info.consumer_tag, 'delivery_tag': envelope.\n delivery_info.delivery_tag, 'redelivered': envelope.\n delivery_info.redelivered, 'exchange': envelope.\n delivery_info.exchange, 'routing_key': envelope.\n delivery_info.routing_key}\n envelope.set_header('x-original-delivery-info',\n original_delivery_info)\n <function token>\n\n def declare_retry_queue(self, delay):\n \"\"\"Declare a retry queue for the provided delay.\n\n Each different delay has a different queue where all retry messages with the\n same delay will be sent to till they expire and get sent back to the original\n queue for handling retry. The queue is declared with a TTL and automatically\n gets deleted. The queue TTL is equal to the provided delay. The retry\n queue's dead letter exchange is (default) direct exchange and the dead letter\n routing key is the original queue name where the messages originally\n came from. The messages will be sent back to the original queue when they\n reach their TTL, for handling retry.\n\n The retry queue is redeclared before every a new message is sent to it.\n Redeclaration resets the queue's TTL, preventing it from being destroyed.\n\n\n :param int delay: Retry delay in seconds\n\n :return: retry queue name\n :rtype: str\n \"\"\"\n delay_in_ms = int(delay * 1000)\n retry_queue_name = '{}.{}.{}'.format(self.consumer.queue_name, self\n .retry_queue_suffix, delay_in_ms)\n queue_ttl = delay_in_ms * 2\n if queue_ttl < self.min_retry_queue_ttl:\n queue_ttl = self.min_retry_queue_ttl\n self.consumer.channel.queue_declare(callback=None, queue=\n retry_queue_name, durable=self.consumer.durable, nowait=True,\n arguments={'x-dead-letter-exchange': '',\n 'x-dead-letter-routing-key': self.consumer.queue_name,\n 'x-message-ttl': delay_in_ms, 'x-expires': queue_ttl})\n logger.warning('Retry queue \"{}\" is created/redeclared'.format(\n retry_queue_name))\n return retry_queue_name\n\n\nclass UnlimitedRetriesPolicy(BaseRetryPolicy):\n \"\"\"Unlimited Retries Policy.\n\n This is an implementation of :class:`.RetryPolicy` which does incremental backoff,\n unlimited retries.\n\n :attr:`initial_delay`: is the initial/first backoff delay in seconds\n\n :attr:`delay_incremented_by`: is number of seconds the backoff should be incremented\n by after each death\n\n :attr:`max_delay`: is the final/maximum backoff delay in seconds that should net be\n exceeded\n \"\"\"\n\n def __init__(self, consumer, initial_delay, max_delay,\n delay_incremented_by, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int initial_delay: `initial_delay` is the initial/first backoff delay\n in seconds.\n\n :param int max_delay: `max_delay` is the final/maximum backoff delay in seconds\n that should net be exceeded. When exceeded, this max is used.\n\n :param int delay_incremented_by: `delay_incremented_by` is number of seconds\n the backoff should be incremented by after each death.\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n super(UnlimitedRetriesPolicy, self).__init__(consumer,\n retry_queue_suffix, **kwargs)\n assert initial_delay >= 0\n assert delay_incremented_by >= 0\n assert max_delay >= initial_delay\n self.initial_delay = initial_delay\n self.max_delay = max_delay\n self.delay_incremented_by = delay_incremented_by\n\n def retry(self, envelope):\n \"\"\"Send message to retry queue to retry handling it later.\n\n Death count is calculated by examining 'x-death' header. Based on the death\n count, the message is sent to a retry queue where it waits there till it\n expires and gets sent back to the original queue for handling retry.\n\n :param Envelope envelope: Message envelope\n \"\"\"\n death_count = self.get_death_count(envelope)\n delay = self.initial_delay + death_count * self.delay_incremented_by\n if delay > self.max_delay:\n delay = self.max_delay\n retry_queue_name = self.declare_retry_queue(delay)\n if envelope.get_header('x-original-delivery-info') is None:\n self.set_original_delivery_info_header(envelope)\n self.consumer.channel.basic_publish(exchange='', routing_key=\n retry_queue_name, properties=envelope.properties, body=envelope\n .payload)\n self.consumer.channel.basic_ack(envelope.delivery_tag)\n logger.warning('Retry handling message [{}] after {}s; death count: {}'\n .format(envelope.message_id, delay, death_count + 1))\n\n\nclass LimitedRetriesPolicy(BaseRetryPolicy):\n \"\"\"Limited Retries Policy.\n\n This is an implementation of :class:`.RetryPolicy` which does incremental backoff,\n limited number of retries.\n\n :attr:`consumer`: message consumer instance\n\n :attr:`retry_delays`: immutable list of retry backoff delays in seconds. Message\n is sent to dlx when this list is exhausted. e.g ``(1, 5, 10, 60, 5 * 60)``\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, retry_delays, retry_queue_suffix='retry',\n **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param Iterable[int] retry_delays: Immutable list of retry backoff delays in\n seconds. Message is sent to dlx when this list is exhausted.\n e.g ``(1, 5, 10, 60, 5 * 60)``\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert len(retry_delays) > 0\n super(LimitedRetriesPolicy, self).__init__(consumer,\n retry_queue_suffix, **kwargs)\n self.retry_delays = retry_delays\n\n def retry(self, envelope):\n \"\"\"Send message to retry queue to retry handling it later.\n\n Death count is calculated by examining 'x-death' header. Based on the death\n count, the message is sent to a retry queue where it waits there till it\n expires and gets sent back to the original queue for handling retry.\n\n The death count is used as an index for `retry_delays` list. Where each\n item in the list represents a retry delay in seconds.\n\n The message will be rejected if the death count exceeded the length of\n `retry_delays` list.\n\n :param Envelope envelope: Message envelope\n \"\"\"\n death_count = self.get_death_count(envelope)\n if death_count < len(self.retry_delays):\n delay = self.retry_delays[death_count]\n retry_queue_name = self.declare_retry_queue(delay)\n if envelope.get_header('x-original-delivery-info') is None:\n self.set_original_delivery_info_header(envelope)\n self.consumer.channel.basic_publish(exchange='', routing_key=\n retry_queue_name, properties=envelope.properties, body=\n envelope.payload)\n self.consumer.channel.basic_ack(envelope.delivery_tag)\n logger.warning(\n 'Retry handling message [{}] after {}s; death count: {}'.\n format(envelope.message_id, delay, death_count + 1))\n else:\n logger.warning('Message [{}] exceeded retry limit; death count: {}'\n .format(envelope.message_id, death_count + 1))\n self.consumer.channel.basic_reject(envelope.delivery_tag,\n requeue=False)\n logger.error('Message [{}] is rejected'.format(envelope.message_id)\n )\n\n\nclass FixedDelayUnlimitedRetriesPolicy(UnlimitedRetriesPolicy):\n \"\"\"Fixed delay unlimited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n unlimited retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n super(FixedDelayUnlimitedRetriesPolicy, self).__init__(consumer=\n consumer, initial_delay=delay, max_delay=delay,\n delay_incremented_by=0, retry_queue_suffix=retry_queue_suffix,\n **kwargs)\n\n\nclass FixedDelayLimitedRetriesPolicy(LimitedRetriesPolicy):\n \"\"\"Fixed delay limited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n limited number of retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds.\n\n :attr:`retries_limit`: retries limit count.\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retries_limit, retry_queue_suffix=\n 'retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param int retries_limit: retries limit count\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert retries_limit > 0\n retry_delays = tuple([delay] * retries_limit)\n super(FixedDelayLimitedRetriesPolicy, self).__init__(consumer=\n consumer, retry_delays=retry_delays, retry_queue_suffix=\n retry_queue_suffix, **kwargs)\n",
"<docstring token>\n<import token>\n<assignment token>\n<class token>\n\n\nclass BaseRetryPolicy(RetryPolicy):\n <docstring token>\n\n def __init__(self, consumer, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param str retry_queue_suffix: Suffix used when creating retry queues. Retry\n queue names are constructed in this form \"queue_name.<suffix>.<delay>\".\n Optional, default to ``retry``\n \"\"\"\n super(BaseRetryPolicy, self).__init__(**kwargs)\n retry_queue_suffix = retry_queue_suffix.strip()\n self.consumer = consumer\n assert len(retry_queue_suffix) > 0\n self.retry_queue_suffix = retry_queue_suffix\n self.min_retry_queue_ttl = 20 * 1000\n <function token>\n <function token>\n\n def declare_retry_queue(self, delay):\n \"\"\"Declare a retry queue for the provided delay.\n\n Each different delay has a different queue where all retry messages with the\n same delay will be sent to till they expire and get sent back to the original\n queue for handling retry. The queue is declared with a TTL and automatically\n gets deleted. The queue TTL is equal to the provided delay. The retry\n queue's dead letter exchange is (default) direct exchange and the dead letter\n routing key is the original queue name where the messages originally\n came from. The messages will be sent back to the original queue when they\n reach their TTL, for handling retry.\n\n The retry queue is redeclared before every a new message is sent to it.\n Redeclaration resets the queue's TTL, preventing it from being destroyed.\n\n\n :param int delay: Retry delay in seconds\n\n :return: retry queue name\n :rtype: str\n \"\"\"\n delay_in_ms = int(delay * 1000)\n retry_queue_name = '{}.{}.{}'.format(self.consumer.queue_name, self\n .retry_queue_suffix, delay_in_ms)\n queue_ttl = delay_in_ms * 2\n if queue_ttl < self.min_retry_queue_ttl:\n queue_ttl = self.min_retry_queue_ttl\n self.consumer.channel.queue_declare(callback=None, queue=\n retry_queue_name, durable=self.consumer.durable, nowait=True,\n arguments={'x-dead-letter-exchange': '',\n 'x-dead-letter-routing-key': self.consumer.queue_name,\n 'x-message-ttl': delay_in_ms, 'x-expires': queue_ttl})\n logger.warning('Retry queue \"{}\" is created/redeclared'.format(\n retry_queue_name))\n return retry_queue_name\n\n\nclass UnlimitedRetriesPolicy(BaseRetryPolicy):\n \"\"\"Unlimited Retries Policy.\n\n This is an implementation of :class:`.RetryPolicy` which does incremental backoff,\n unlimited retries.\n\n :attr:`initial_delay`: is the initial/first backoff delay in seconds\n\n :attr:`delay_incremented_by`: is number of seconds the backoff should be incremented\n by after each death\n\n :attr:`max_delay`: is the final/maximum backoff delay in seconds that should net be\n exceeded\n \"\"\"\n\n def __init__(self, consumer, initial_delay, max_delay,\n delay_incremented_by, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int initial_delay: `initial_delay` is the initial/first backoff delay\n in seconds.\n\n :param int max_delay: `max_delay` is the final/maximum backoff delay in seconds\n that should net be exceeded. When exceeded, this max is used.\n\n :param int delay_incremented_by: `delay_incremented_by` is number of seconds\n the backoff should be incremented by after each death.\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n super(UnlimitedRetriesPolicy, self).__init__(consumer,\n retry_queue_suffix, **kwargs)\n assert initial_delay >= 0\n assert delay_incremented_by >= 0\n assert max_delay >= initial_delay\n self.initial_delay = initial_delay\n self.max_delay = max_delay\n self.delay_incremented_by = delay_incremented_by\n\n def retry(self, envelope):\n \"\"\"Send message to retry queue to retry handling it later.\n\n Death count is calculated by examining 'x-death' header. Based on the death\n count, the message is sent to a retry queue where it waits there till it\n expires and gets sent back to the original queue for handling retry.\n\n :param Envelope envelope: Message envelope\n \"\"\"\n death_count = self.get_death_count(envelope)\n delay = self.initial_delay + death_count * self.delay_incremented_by\n if delay > self.max_delay:\n delay = self.max_delay\n retry_queue_name = self.declare_retry_queue(delay)\n if envelope.get_header('x-original-delivery-info') is None:\n self.set_original_delivery_info_header(envelope)\n self.consumer.channel.basic_publish(exchange='', routing_key=\n retry_queue_name, properties=envelope.properties, body=envelope\n .payload)\n self.consumer.channel.basic_ack(envelope.delivery_tag)\n logger.warning('Retry handling message [{}] after {}s; death count: {}'\n .format(envelope.message_id, delay, death_count + 1))\n\n\nclass LimitedRetriesPolicy(BaseRetryPolicy):\n \"\"\"Limited Retries Policy.\n\n This is an implementation of :class:`.RetryPolicy` which does incremental backoff,\n limited number of retries.\n\n :attr:`consumer`: message consumer instance\n\n :attr:`retry_delays`: immutable list of retry backoff delays in seconds. Message\n is sent to dlx when this list is exhausted. e.g ``(1, 5, 10, 60, 5 * 60)``\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, retry_delays, retry_queue_suffix='retry',\n **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param Iterable[int] retry_delays: Immutable list of retry backoff delays in\n seconds. Message is sent to dlx when this list is exhausted.\n e.g ``(1, 5, 10, 60, 5 * 60)``\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert len(retry_delays) > 0\n super(LimitedRetriesPolicy, self).__init__(consumer,\n retry_queue_suffix, **kwargs)\n self.retry_delays = retry_delays\n\n def retry(self, envelope):\n \"\"\"Send message to retry queue to retry handling it later.\n\n Death count is calculated by examining 'x-death' header. Based on the death\n count, the message is sent to a retry queue where it waits there till it\n expires and gets sent back to the original queue for handling retry.\n\n The death count is used as an index for `retry_delays` list. Where each\n item in the list represents a retry delay in seconds.\n\n The message will be rejected if the death count exceeded the length of\n `retry_delays` list.\n\n :param Envelope envelope: Message envelope\n \"\"\"\n death_count = self.get_death_count(envelope)\n if death_count < len(self.retry_delays):\n delay = self.retry_delays[death_count]\n retry_queue_name = self.declare_retry_queue(delay)\n if envelope.get_header('x-original-delivery-info') is None:\n self.set_original_delivery_info_header(envelope)\n self.consumer.channel.basic_publish(exchange='', routing_key=\n retry_queue_name, properties=envelope.properties, body=\n envelope.payload)\n self.consumer.channel.basic_ack(envelope.delivery_tag)\n logger.warning(\n 'Retry handling message [{}] after {}s; death count: {}'.\n format(envelope.message_id, delay, death_count + 1))\n else:\n logger.warning('Message [{}] exceeded retry limit; death count: {}'\n .format(envelope.message_id, death_count + 1))\n self.consumer.channel.basic_reject(envelope.delivery_tag,\n requeue=False)\n logger.error('Message [{}] is rejected'.format(envelope.message_id)\n )\n\n\nclass FixedDelayUnlimitedRetriesPolicy(UnlimitedRetriesPolicy):\n \"\"\"Fixed delay unlimited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n unlimited retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n super(FixedDelayUnlimitedRetriesPolicy, self).__init__(consumer=\n consumer, initial_delay=delay, max_delay=delay,\n delay_incremented_by=0, retry_queue_suffix=retry_queue_suffix,\n **kwargs)\n\n\nclass FixedDelayLimitedRetriesPolicy(LimitedRetriesPolicy):\n \"\"\"Fixed delay limited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n limited number of retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds.\n\n :attr:`retries_limit`: retries limit count.\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retries_limit, retry_queue_suffix=\n 'retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param int retries_limit: retries limit count\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert retries_limit > 0\n retry_delays = tuple([delay] * retries_limit)\n super(FixedDelayLimitedRetriesPolicy, self).__init__(consumer=\n consumer, retry_delays=retry_delays, retry_queue_suffix=\n retry_queue_suffix, **kwargs)\n",
"<docstring token>\n<import token>\n<assignment token>\n<class token>\n\n\nclass BaseRetryPolicy(RetryPolicy):\n <docstring token>\n\n def __init__(self, consumer, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param str retry_queue_suffix: Suffix used when creating retry queues. Retry\n queue names are constructed in this form \"queue_name.<suffix>.<delay>\".\n Optional, default to ``retry``\n \"\"\"\n super(BaseRetryPolicy, self).__init__(**kwargs)\n retry_queue_suffix = retry_queue_suffix.strip()\n self.consumer = consumer\n assert len(retry_queue_suffix) > 0\n self.retry_queue_suffix = retry_queue_suffix\n self.min_retry_queue_ttl = 20 * 1000\n <function token>\n <function token>\n <function token>\n\n\nclass UnlimitedRetriesPolicy(BaseRetryPolicy):\n \"\"\"Unlimited Retries Policy.\n\n This is an implementation of :class:`.RetryPolicy` which does incremental backoff,\n unlimited retries.\n\n :attr:`initial_delay`: is the initial/first backoff delay in seconds\n\n :attr:`delay_incremented_by`: is number of seconds the backoff should be incremented\n by after each death\n\n :attr:`max_delay`: is the final/maximum backoff delay in seconds that should net be\n exceeded\n \"\"\"\n\n def __init__(self, consumer, initial_delay, max_delay,\n delay_incremented_by, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int initial_delay: `initial_delay` is the initial/first backoff delay\n in seconds.\n\n :param int max_delay: `max_delay` is the final/maximum backoff delay in seconds\n that should net be exceeded. When exceeded, this max is used.\n\n :param int delay_incremented_by: `delay_incremented_by` is number of seconds\n the backoff should be incremented by after each death.\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n super(UnlimitedRetriesPolicy, self).__init__(consumer,\n retry_queue_suffix, **kwargs)\n assert initial_delay >= 0\n assert delay_incremented_by >= 0\n assert max_delay >= initial_delay\n self.initial_delay = initial_delay\n self.max_delay = max_delay\n self.delay_incremented_by = delay_incremented_by\n\n def retry(self, envelope):\n \"\"\"Send message to retry queue to retry handling it later.\n\n Death count is calculated by examining 'x-death' header. Based on the death\n count, the message is sent to a retry queue where it waits there till it\n expires and gets sent back to the original queue for handling retry.\n\n :param Envelope envelope: Message envelope\n \"\"\"\n death_count = self.get_death_count(envelope)\n delay = self.initial_delay + death_count * self.delay_incremented_by\n if delay > self.max_delay:\n delay = self.max_delay\n retry_queue_name = self.declare_retry_queue(delay)\n if envelope.get_header('x-original-delivery-info') is None:\n self.set_original_delivery_info_header(envelope)\n self.consumer.channel.basic_publish(exchange='', routing_key=\n retry_queue_name, properties=envelope.properties, body=envelope\n .payload)\n self.consumer.channel.basic_ack(envelope.delivery_tag)\n logger.warning('Retry handling message [{}] after {}s; death count: {}'\n .format(envelope.message_id, delay, death_count + 1))\n\n\nclass LimitedRetriesPolicy(BaseRetryPolicy):\n \"\"\"Limited Retries Policy.\n\n This is an implementation of :class:`.RetryPolicy` which does incremental backoff,\n limited number of retries.\n\n :attr:`consumer`: message consumer instance\n\n :attr:`retry_delays`: immutable list of retry backoff delays in seconds. Message\n is sent to dlx when this list is exhausted. e.g ``(1, 5, 10, 60, 5 * 60)``\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, retry_delays, retry_queue_suffix='retry',\n **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param Iterable[int] retry_delays: Immutable list of retry backoff delays in\n seconds. Message is sent to dlx when this list is exhausted.\n e.g ``(1, 5, 10, 60, 5 * 60)``\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert len(retry_delays) > 0\n super(LimitedRetriesPolicy, self).__init__(consumer,\n retry_queue_suffix, **kwargs)\n self.retry_delays = retry_delays\n\n def retry(self, envelope):\n \"\"\"Send message to retry queue to retry handling it later.\n\n Death count is calculated by examining 'x-death' header. Based on the death\n count, the message is sent to a retry queue where it waits there till it\n expires and gets sent back to the original queue for handling retry.\n\n The death count is used as an index for `retry_delays` list. Where each\n item in the list represents a retry delay in seconds.\n\n The message will be rejected if the death count exceeded the length of\n `retry_delays` list.\n\n :param Envelope envelope: Message envelope\n \"\"\"\n death_count = self.get_death_count(envelope)\n if death_count < len(self.retry_delays):\n delay = self.retry_delays[death_count]\n retry_queue_name = self.declare_retry_queue(delay)\n if envelope.get_header('x-original-delivery-info') is None:\n self.set_original_delivery_info_header(envelope)\n self.consumer.channel.basic_publish(exchange='', routing_key=\n retry_queue_name, properties=envelope.properties, body=\n envelope.payload)\n self.consumer.channel.basic_ack(envelope.delivery_tag)\n logger.warning(\n 'Retry handling message [{}] after {}s; death count: {}'.\n format(envelope.message_id, delay, death_count + 1))\n else:\n logger.warning('Message [{}] exceeded retry limit; death count: {}'\n .format(envelope.message_id, death_count + 1))\n self.consumer.channel.basic_reject(envelope.delivery_tag,\n requeue=False)\n logger.error('Message [{}] is rejected'.format(envelope.message_id)\n )\n\n\nclass FixedDelayUnlimitedRetriesPolicy(UnlimitedRetriesPolicy):\n \"\"\"Fixed delay unlimited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n unlimited retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n super(FixedDelayUnlimitedRetriesPolicy, self).__init__(consumer=\n consumer, initial_delay=delay, max_delay=delay,\n delay_incremented_by=0, retry_queue_suffix=retry_queue_suffix,\n **kwargs)\n\n\nclass FixedDelayLimitedRetriesPolicy(LimitedRetriesPolicy):\n \"\"\"Fixed delay limited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n limited number of retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds.\n\n :attr:`retries_limit`: retries limit count.\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retries_limit, retry_queue_suffix=\n 'retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param int retries_limit: retries limit count\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert retries_limit > 0\n retry_delays = tuple([delay] * retries_limit)\n super(FixedDelayLimitedRetriesPolicy, self).__init__(consumer=\n consumer, retry_delays=retry_delays, retry_queue_suffix=\n retry_queue_suffix, **kwargs)\n",
"<docstring token>\n<import token>\n<assignment token>\n<class token>\n\n\nclass BaseRetryPolicy(RetryPolicy):\n <docstring token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\nclass UnlimitedRetriesPolicy(BaseRetryPolicy):\n \"\"\"Unlimited Retries Policy.\n\n This is an implementation of :class:`.RetryPolicy` which does incremental backoff,\n unlimited retries.\n\n :attr:`initial_delay`: is the initial/first backoff delay in seconds\n\n :attr:`delay_incremented_by`: is number of seconds the backoff should be incremented\n by after each death\n\n :attr:`max_delay`: is the final/maximum backoff delay in seconds that should net be\n exceeded\n \"\"\"\n\n def __init__(self, consumer, initial_delay, max_delay,\n delay_incremented_by, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int initial_delay: `initial_delay` is the initial/first backoff delay\n in seconds.\n\n :param int max_delay: `max_delay` is the final/maximum backoff delay in seconds\n that should net be exceeded. When exceeded, this max is used.\n\n :param int delay_incremented_by: `delay_incremented_by` is number of seconds\n the backoff should be incremented by after each death.\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n super(UnlimitedRetriesPolicy, self).__init__(consumer,\n retry_queue_suffix, **kwargs)\n assert initial_delay >= 0\n assert delay_incremented_by >= 0\n assert max_delay >= initial_delay\n self.initial_delay = initial_delay\n self.max_delay = max_delay\n self.delay_incremented_by = delay_incremented_by\n\n def retry(self, envelope):\n \"\"\"Send message to retry queue to retry handling it later.\n\n Death count is calculated by examining 'x-death' header. Based on the death\n count, the message is sent to a retry queue where it waits there till it\n expires and gets sent back to the original queue for handling retry.\n\n :param Envelope envelope: Message envelope\n \"\"\"\n death_count = self.get_death_count(envelope)\n delay = self.initial_delay + death_count * self.delay_incremented_by\n if delay > self.max_delay:\n delay = self.max_delay\n retry_queue_name = self.declare_retry_queue(delay)\n if envelope.get_header('x-original-delivery-info') is None:\n self.set_original_delivery_info_header(envelope)\n self.consumer.channel.basic_publish(exchange='', routing_key=\n retry_queue_name, properties=envelope.properties, body=envelope\n .payload)\n self.consumer.channel.basic_ack(envelope.delivery_tag)\n logger.warning('Retry handling message [{}] after {}s; death count: {}'\n .format(envelope.message_id, delay, death_count + 1))\n\n\nclass LimitedRetriesPolicy(BaseRetryPolicy):\n \"\"\"Limited Retries Policy.\n\n This is an implementation of :class:`.RetryPolicy` which does incremental backoff,\n limited number of retries.\n\n :attr:`consumer`: message consumer instance\n\n :attr:`retry_delays`: immutable list of retry backoff delays in seconds. Message\n is sent to dlx when this list is exhausted. e.g ``(1, 5, 10, 60, 5 * 60)``\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, retry_delays, retry_queue_suffix='retry',\n **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param Iterable[int] retry_delays: Immutable list of retry backoff delays in\n seconds. Message is sent to dlx when this list is exhausted.\n e.g ``(1, 5, 10, 60, 5 * 60)``\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert len(retry_delays) > 0\n super(LimitedRetriesPolicy, self).__init__(consumer,\n retry_queue_suffix, **kwargs)\n self.retry_delays = retry_delays\n\n def retry(self, envelope):\n \"\"\"Send message to retry queue to retry handling it later.\n\n Death count is calculated by examining 'x-death' header. Based on the death\n count, the message is sent to a retry queue where it waits there till it\n expires and gets sent back to the original queue for handling retry.\n\n The death count is used as an index for `retry_delays` list. Where each\n item in the list represents a retry delay in seconds.\n\n The message will be rejected if the death count exceeded the length of\n `retry_delays` list.\n\n :param Envelope envelope: Message envelope\n \"\"\"\n death_count = self.get_death_count(envelope)\n if death_count < len(self.retry_delays):\n delay = self.retry_delays[death_count]\n retry_queue_name = self.declare_retry_queue(delay)\n if envelope.get_header('x-original-delivery-info') is None:\n self.set_original_delivery_info_header(envelope)\n self.consumer.channel.basic_publish(exchange='', routing_key=\n retry_queue_name, properties=envelope.properties, body=\n envelope.payload)\n self.consumer.channel.basic_ack(envelope.delivery_tag)\n logger.warning(\n 'Retry handling message [{}] after {}s; death count: {}'.\n format(envelope.message_id, delay, death_count + 1))\n else:\n logger.warning('Message [{}] exceeded retry limit; death count: {}'\n .format(envelope.message_id, death_count + 1))\n self.consumer.channel.basic_reject(envelope.delivery_tag,\n requeue=False)\n logger.error('Message [{}] is rejected'.format(envelope.message_id)\n )\n\n\nclass FixedDelayUnlimitedRetriesPolicy(UnlimitedRetriesPolicy):\n \"\"\"Fixed delay unlimited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n unlimited retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n super(FixedDelayUnlimitedRetriesPolicy, self).__init__(consumer=\n consumer, initial_delay=delay, max_delay=delay,\n delay_incremented_by=0, retry_queue_suffix=retry_queue_suffix,\n **kwargs)\n\n\nclass FixedDelayLimitedRetriesPolicy(LimitedRetriesPolicy):\n \"\"\"Fixed delay limited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n limited number of retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds.\n\n :attr:`retries_limit`: retries limit count.\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retries_limit, retry_queue_suffix=\n 'retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param int retries_limit: retries limit count\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert retries_limit > 0\n retry_delays = tuple([delay] * retries_limit)\n super(FixedDelayLimitedRetriesPolicy, self).__init__(consumer=\n consumer, retry_delays=retry_delays, retry_queue_suffix=\n retry_queue_suffix, **kwargs)\n",
"<docstring token>\n<import token>\n<assignment token>\n<class token>\n<class token>\n\n\nclass UnlimitedRetriesPolicy(BaseRetryPolicy):\n \"\"\"Unlimited Retries Policy.\n\n This is an implementation of :class:`.RetryPolicy` which does incremental backoff,\n unlimited retries.\n\n :attr:`initial_delay`: is the initial/first backoff delay in seconds\n\n :attr:`delay_incremented_by`: is number of seconds the backoff should be incremented\n by after each death\n\n :attr:`max_delay`: is the final/maximum backoff delay in seconds that should net be\n exceeded\n \"\"\"\n\n def __init__(self, consumer, initial_delay, max_delay,\n delay_incremented_by, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int initial_delay: `initial_delay` is the initial/first backoff delay\n in seconds.\n\n :param int max_delay: `max_delay` is the final/maximum backoff delay in seconds\n that should net be exceeded. When exceeded, this max is used.\n\n :param int delay_incremented_by: `delay_incremented_by` is number of seconds\n the backoff should be incremented by after each death.\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n super(UnlimitedRetriesPolicy, self).__init__(consumer,\n retry_queue_suffix, **kwargs)\n assert initial_delay >= 0\n assert delay_incremented_by >= 0\n assert max_delay >= initial_delay\n self.initial_delay = initial_delay\n self.max_delay = max_delay\n self.delay_incremented_by = delay_incremented_by\n\n def retry(self, envelope):\n \"\"\"Send message to retry queue to retry handling it later.\n\n Death count is calculated by examining 'x-death' header. Based on the death\n count, the message is sent to a retry queue where it waits there till it\n expires and gets sent back to the original queue for handling retry.\n\n :param Envelope envelope: Message envelope\n \"\"\"\n death_count = self.get_death_count(envelope)\n delay = self.initial_delay + death_count * self.delay_incremented_by\n if delay > self.max_delay:\n delay = self.max_delay\n retry_queue_name = self.declare_retry_queue(delay)\n if envelope.get_header('x-original-delivery-info') is None:\n self.set_original_delivery_info_header(envelope)\n self.consumer.channel.basic_publish(exchange='', routing_key=\n retry_queue_name, properties=envelope.properties, body=envelope\n .payload)\n self.consumer.channel.basic_ack(envelope.delivery_tag)\n logger.warning('Retry handling message [{}] after {}s; death count: {}'\n .format(envelope.message_id, delay, death_count + 1))\n\n\nclass LimitedRetriesPolicy(BaseRetryPolicy):\n \"\"\"Limited Retries Policy.\n\n This is an implementation of :class:`.RetryPolicy` which does incremental backoff,\n limited number of retries.\n\n :attr:`consumer`: message consumer instance\n\n :attr:`retry_delays`: immutable list of retry backoff delays in seconds. Message\n is sent to dlx when this list is exhausted. e.g ``(1, 5, 10, 60, 5 * 60)``\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, retry_delays, retry_queue_suffix='retry',\n **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param Iterable[int] retry_delays: Immutable list of retry backoff delays in\n seconds. Message is sent to dlx when this list is exhausted.\n e.g ``(1, 5, 10, 60, 5 * 60)``\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert len(retry_delays) > 0\n super(LimitedRetriesPolicy, self).__init__(consumer,\n retry_queue_suffix, **kwargs)\n self.retry_delays = retry_delays\n\n def retry(self, envelope):\n \"\"\"Send message to retry queue to retry handling it later.\n\n Death count is calculated by examining 'x-death' header. Based on the death\n count, the message is sent to a retry queue where it waits there till it\n expires and gets sent back to the original queue for handling retry.\n\n The death count is used as an index for `retry_delays` list. Where each\n item in the list represents a retry delay in seconds.\n\n The message will be rejected if the death count exceeded the length of\n `retry_delays` list.\n\n :param Envelope envelope: Message envelope\n \"\"\"\n death_count = self.get_death_count(envelope)\n if death_count < len(self.retry_delays):\n delay = self.retry_delays[death_count]\n retry_queue_name = self.declare_retry_queue(delay)\n if envelope.get_header('x-original-delivery-info') is None:\n self.set_original_delivery_info_header(envelope)\n self.consumer.channel.basic_publish(exchange='', routing_key=\n retry_queue_name, properties=envelope.properties, body=\n envelope.payload)\n self.consumer.channel.basic_ack(envelope.delivery_tag)\n logger.warning(\n 'Retry handling message [{}] after {}s; death count: {}'.\n format(envelope.message_id, delay, death_count + 1))\n else:\n logger.warning('Message [{}] exceeded retry limit; death count: {}'\n .format(envelope.message_id, death_count + 1))\n self.consumer.channel.basic_reject(envelope.delivery_tag,\n requeue=False)\n logger.error('Message [{}] is rejected'.format(envelope.message_id)\n )\n\n\nclass FixedDelayUnlimitedRetriesPolicy(UnlimitedRetriesPolicy):\n \"\"\"Fixed delay unlimited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n unlimited retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n super(FixedDelayUnlimitedRetriesPolicy, self).__init__(consumer=\n consumer, initial_delay=delay, max_delay=delay,\n delay_incremented_by=0, retry_queue_suffix=retry_queue_suffix,\n **kwargs)\n\n\nclass FixedDelayLimitedRetriesPolicy(LimitedRetriesPolicy):\n \"\"\"Fixed delay limited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n limited number of retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds.\n\n :attr:`retries_limit`: retries limit count.\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retries_limit, retry_queue_suffix=\n 'retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param int retries_limit: retries limit count\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert retries_limit > 0\n retry_delays = tuple([delay] * retries_limit)\n super(FixedDelayLimitedRetriesPolicy, self).__init__(consumer=\n consumer, retry_delays=retry_delays, retry_queue_suffix=\n retry_queue_suffix, **kwargs)\n",
"<docstring token>\n<import token>\n<assignment token>\n<class token>\n<class token>\n\n\nclass UnlimitedRetriesPolicy(BaseRetryPolicy):\n <docstring token>\n\n def __init__(self, consumer, initial_delay, max_delay,\n delay_incremented_by, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int initial_delay: `initial_delay` is the initial/first backoff delay\n in seconds.\n\n :param int max_delay: `max_delay` is the final/maximum backoff delay in seconds\n that should net be exceeded. When exceeded, this max is used.\n\n :param int delay_incremented_by: `delay_incremented_by` is number of seconds\n the backoff should be incremented by after each death.\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n super(UnlimitedRetriesPolicy, self).__init__(consumer,\n retry_queue_suffix, **kwargs)\n assert initial_delay >= 0\n assert delay_incremented_by >= 0\n assert max_delay >= initial_delay\n self.initial_delay = initial_delay\n self.max_delay = max_delay\n self.delay_incremented_by = delay_incremented_by\n\n def retry(self, envelope):\n \"\"\"Send message to retry queue to retry handling it later.\n\n Death count is calculated by examining 'x-death' header. Based on the death\n count, the message is sent to a retry queue where it waits there till it\n expires and gets sent back to the original queue for handling retry.\n\n :param Envelope envelope: Message envelope\n \"\"\"\n death_count = self.get_death_count(envelope)\n delay = self.initial_delay + death_count * self.delay_incremented_by\n if delay > self.max_delay:\n delay = self.max_delay\n retry_queue_name = self.declare_retry_queue(delay)\n if envelope.get_header('x-original-delivery-info') is None:\n self.set_original_delivery_info_header(envelope)\n self.consumer.channel.basic_publish(exchange='', routing_key=\n retry_queue_name, properties=envelope.properties, body=envelope\n .payload)\n self.consumer.channel.basic_ack(envelope.delivery_tag)\n logger.warning('Retry handling message [{}] after {}s; death count: {}'\n .format(envelope.message_id, delay, death_count + 1))\n\n\nclass LimitedRetriesPolicy(BaseRetryPolicy):\n \"\"\"Limited Retries Policy.\n\n This is an implementation of :class:`.RetryPolicy` which does incremental backoff,\n limited number of retries.\n\n :attr:`consumer`: message consumer instance\n\n :attr:`retry_delays`: immutable list of retry backoff delays in seconds. Message\n is sent to dlx when this list is exhausted. e.g ``(1, 5, 10, 60, 5 * 60)``\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, retry_delays, retry_queue_suffix='retry',\n **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param Iterable[int] retry_delays: Immutable list of retry backoff delays in\n seconds. Message is sent to dlx when this list is exhausted.\n e.g ``(1, 5, 10, 60, 5 * 60)``\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert len(retry_delays) > 0\n super(LimitedRetriesPolicy, self).__init__(consumer,\n retry_queue_suffix, **kwargs)\n self.retry_delays = retry_delays\n\n def retry(self, envelope):\n \"\"\"Send message to retry queue to retry handling it later.\n\n Death count is calculated by examining 'x-death' header. Based on the death\n count, the message is sent to a retry queue where it waits there till it\n expires and gets sent back to the original queue for handling retry.\n\n The death count is used as an index for `retry_delays` list. Where each\n item in the list represents a retry delay in seconds.\n\n The message will be rejected if the death count exceeded the length of\n `retry_delays` list.\n\n :param Envelope envelope: Message envelope\n \"\"\"\n death_count = self.get_death_count(envelope)\n if death_count < len(self.retry_delays):\n delay = self.retry_delays[death_count]\n retry_queue_name = self.declare_retry_queue(delay)\n if envelope.get_header('x-original-delivery-info') is None:\n self.set_original_delivery_info_header(envelope)\n self.consumer.channel.basic_publish(exchange='', routing_key=\n retry_queue_name, properties=envelope.properties, body=\n envelope.payload)\n self.consumer.channel.basic_ack(envelope.delivery_tag)\n logger.warning(\n 'Retry handling message [{}] after {}s; death count: {}'.\n format(envelope.message_id, delay, death_count + 1))\n else:\n logger.warning('Message [{}] exceeded retry limit; death count: {}'\n .format(envelope.message_id, death_count + 1))\n self.consumer.channel.basic_reject(envelope.delivery_tag,\n requeue=False)\n logger.error('Message [{}] is rejected'.format(envelope.message_id)\n )\n\n\nclass FixedDelayUnlimitedRetriesPolicy(UnlimitedRetriesPolicy):\n \"\"\"Fixed delay unlimited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n unlimited retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n super(FixedDelayUnlimitedRetriesPolicy, self).__init__(consumer=\n consumer, initial_delay=delay, max_delay=delay,\n delay_incremented_by=0, retry_queue_suffix=retry_queue_suffix,\n **kwargs)\n\n\nclass FixedDelayLimitedRetriesPolicy(LimitedRetriesPolicy):\n \"\"\"Fixed delay limited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n limited number of retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds.\n\n :attr:`retries_limit`: retries limit count.\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retries_limit, retry_queue_suffix=\n 'retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param int retries_limit: retries limit count\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert retries_limit > 0\n retry_delays = tuple([delay] * retries_limit)\n super(FixedDelayLimitedRetriesPolicy, self).__init__(consumer=\n consumer, retry_delays=retry_delays, retry_queue_suffix=\n retry_queue_suffix, **kwargs)\n",
"<docstring token>\n<import token>\n<assignment token>\n<class token>\n<class token>\n\n\nclass UnlimitedRetriesPolicy(BaseRetryPolicy):\n <docstring token>\n\n def __init__(self, consumer, initial_delay, max_delay,\n delay_incremented_by, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int initial_delay: `initial_delay` is the initial/first backoff delay\n in seconds.\n\n :param int max_delay: `max_delay` is the final/maximum backoff delay in seconds\n that should net be exceeded. When exceeded, this max is used.\n\n :param int delay_incremented_by: `delay_incremented_by` is number of seconds\n the backoff should be incremented by after each death.\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n super(UnlimitedRetriesPolicy, self).__init__(consumer,\n retry_queue_suffix, **kwargs)\n assert initial_delay >= 0\n assert delay_incremented_by >= 0\n assert max_delay >= initial_delay\n self.initial_delay = initial_delay\n self.max_delay = max_delay\n self.delay_incremented_by = delay_incremented_by\n <function token>\n\n\nclass LimitedRetriesPolicy(BaseRetryPolicy):\n \"\"\"Limited Retries Policy.\n\n This is an implementation of :class:`.RetryPolicy` which does incremental backoff,\n limited number of retries.\n\n :attr:`consumer`: message consumer instance\n\n :attr:`retry_delays`: immutable list of retry backoff delays in seconds. Message\n is sent to dlx when this list is exhausted. e.g ``(1, 5, 10, 60, 5 * 60)``\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, retry_delays, retry_queue_suffix='retry',\n **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param Iterable[int] retry_delays: Immutable list of retry backoff delays in\n seconds. Message is sent to dlx when this list is exhausted.\n e.g ``(1, 5, 10, 60, 5 * 60)``\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert len(retry_delays) > 0\n super(LimitedRetriesPolicy, self).__init__(consumer,\n retry_queue_suffix, **kwargs)\n self.retry_delays = retry_delays\n\n def retry(self, envelope):\n \"\"\"Send message to retry queue to retry handling it later.\n\n Death count is calculated by examining 'x-death' header. Based on the death\n count, the message is sent to a retry queue where it waits there till it\n expires and gets sent back to the original queue for handling retry.\n\n The death count is used as an index for `retry_delays` list. Where each\n item in the list represents a retry delay in seconds.\n\n The message will be rejected if the death count exceeded the length of\n `retry_delays` list.\n\n :param Envelope envelope: Message envelope\n \"\"\"\n death_count = self.get_death_count(envelope)\n if death_count < len(self.retry_delays):\n delay = self.retry_delays[death_count]\n retry_queue_name = self.declare_retry_queue(delay)\n if envelope.get_header('x-original-delivery-info') is None:\n self.set_original_delivery_info_header(envelope)\n self.consumer.channel.basic_publish(exchange='', routing_key=\n retry_queue_name, properties=envelope.properties, body=\n envelope.payload)\n self.consumer.channel.basic_ack(envelope.delivery_tag)\n logger.warning(\n 'Retry handling message [{}] after {}s; death count: {}'.\n format(envelope.message_id, delay, death_count + 1))\n else:\n logger.warning('Message [{}] exceeded retry limit; death count: {}'\n .format(envelope.message_id, death_count + 1))\n self.consumer.channel.basic_reject(envelope.delivery_tag,\n requeue=False)\n logger.error('Message [{}] is rejected'.format(envelope.message_id)\n )\n\n\nclass FixedDelayUnlimitedRetriesPolicy(UnlimitedRetriesPolicy):\n \"\"\"Fixed delay unlimited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n unlimited retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n super(FixedDelayUnlimitedRetriesPolicy, self).__init__(consumer=\n consumer, initial_delay=delay, max_delay=delay,\n delay_incremented_by=0, retry_queue_suffix=retry_queue_suffix,\n **kwargs)\n\n\nclass FixedDelayLimitedRetriesPolicy(LimitedRetriesPolicy):\n \"\"\"Fixed delay limited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n limited number of retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds.\n\n :attr:`retries_limit`: retries limit count.\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retries_limit, retry_queue_suffix=\n 'retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param int retries_limit: retries limit count\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert retries_limit > 0\n retry_delays = tuple([delay] * retries_limit)\n super(FixedDelayLimitedRetriesPolicy, self).__init__(consumer=\n consumer, retry_delays=retry_delays, retry_queue_suffix=\n retry_queue_suffix, **kwargs)\n",
"<docstring token>\n<import token>\n<assignment token>\n<class token>\n<class token>\n\n\nclass UnlimitedRetriesPolicy(BaseRetryPolicy):\n <docstring token>\n <function token>\n <function token>\n\n\nclass LimitedRetriesPolicy(BaseRetryPolicy):\n \"\"\"Limited Retries Policy.\n\n This is an implementation of :class:`.RetryPolicy` which does incremental backoff,\n limited number of retries.\n\n :attr:`consumer`: message consumer instance\n\n :attr:`retry_delays`: immutable list of retry backoff delays in seconds. Message\n is sent to dlx when this list is exhausted. e.g ``(1, 5, 10, 60, 5 * 60)``\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, retry_delays, retry_queue_suffix='retry',\n **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param Iterable[int] retry_delays: Immutable list of retry backoff delays in\n seconds. Message is sent to dlx when this list is exhausted.\n e.g ``(1, 5, 10, 60, 5 * 60)``\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert len(retry_delays) > 0\n super(LimitedRetriesPolicy, self).__init__(consumer,\n retry_queue_suffix, **kwargs)\n self.retry_delays = retry_delays\n\n def retry(self, envelope):\n \"\"\"Send message to retry queue to retry handling it later.\n\n Death count is calculated by examining 'x-death' header. Based on the death\n count, the message is sent to a retry queue where it waits there till it\n expires and gets sent back to the original queue for handling retry.\n\n The death count is used as an index for `retry_delays` list. Where each\n item in the list represents a retry delay in seconds.\n\n The message will be rejected if the death count exceeded the length of\n `retry_delays` list.\n\n :param Envelope envelope: Message envelope\n \"\"\"\n death_count = self.get_death_count(envelope)\n if death_count < len(self.retry_delays):\n delay = self.retry_delays[death_count]\n retry_queue_name = self.declare_retry_queue(delay)\n if envelope.get_header('x-original-delivery-info') is None:\n self.set_original_delivery_info_header(envelope)\n self.consumer.channel.basic_publish(exchange='', routing_key=\n retry_queue_name, properties=envelope.properties, body=\n envelope.payload)\n self.consumer.channel.basic_ack(envelope.delivery_tag)\n logger.warning(\n 'Retry handling message [{}] after {}s; death count: {}'.\n format(envelope.message_id, delay, death_count + 1))\n else:\n logger.warning('Message [{}] exceeded retry limit; death count: {}'\n .format(envelope.message_id, death_count + 1))\n self.consumer.channel.basic_reject(envelope.delivery_tag,\n requeue=False)\n logger.error('Message [{}] is rejected'.format(envelope.message_id)\n )\n\n\nclass FixedDelayUnlimitedRetriesPolicy(UnlimitedRetriesPolicy):\n \"\"\"Fixed delay unlimited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n unlimited retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n super(FixedDelayUnlimitedRetriesPolicy, self).__init__(consumer=\n consumer, initial_delay=delay, max_delay=delay,\n delay_incremented_by=0, retry_queue_suffix=retry_queue_suffix,\n **kwargs)\n\n\nclass FixedDelayLimitedRetriesPolicy(LimitedRetriesPolicy):\n \"\"\"Fixed delay limited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n limited number of retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds.\n\n :attr:`retries_limit`: retries limit count.\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retries_limit, retry_queue_suffix=\n 'retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param int retries_limit: retries limit count\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert retries_limit > 0\n retry_delays = tuple([delay] * retries_limit)\n super(FixedDelayLimitedRetriesPolicy, self).__init__(consumer=\n consumer, retry_delays=retry_delays, retry_queue_suffix=\n retry_queue_suffix, **kwargs)\n",
"<docstring token>\n<import token>\n<assignment token>\n<class token>\n<class token>\n<class token>\n\n\nclass LimitedRetriesPolicy(BaseRetryPolicy):\n \"\"\"Limited Retries Policy.\n\n This is an implementation of :class:`.RetryPolicy` which does incremental backoff,\n limited number of retries.\n\n :attr:`consumer`: message consumer instance\n\n :attr:`retry_delays`: immutable list of retry backoff delays in seconds. Message\n is sent to dlx when this list is exhausted. e.g ``(1, 5, 10, 60, 5 * 60)``\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, retry_delays, retry_queue_suffix='retry',\n **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param Iterable[int] retry_delays: Immutable list of retry backoff delays in\n seconds. Message is sent to dlx when this list is exhausted.\n e.g ``(1, 5, 10, 60, 5 * 60)``\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert len(retry_delays) > 0\n super(LimitedRetriesPolicy, self).__init__(consumer,\n retry_queue_suffix, **kwargs)\n self.retry_delays = retry_delays\n\n def retry(self, envelope):\n \"\"\"Send message to retry queue to retry handling it later.\n\n Death count is calculated by examining 'x-death' header. Based on the death\n count, the message is sent to a retry queue where it waits there till it\n expires and gets sent back to the original queue for handling retry.\n\n The death count is used as an index for `retry_delays` list. Where each\n item in the list represents a retry delay in seconds.\n\n The message will be rejected if the death count exceeded the length of\n `retry_delays` list.\n\n :param Envelope envelope: Message envelope\n \"\"\"\n death_count = self.get_death_count(envelope)\n if death_count < len(self.retry_delays):\n delay = self.retry_delays[death_count]\n retry_queue_name = self.declare_retry_queue(delay)\n if envelope.get_header('x-original-delivery-info') is None:\n self.set_original_delivery_info_header(envelope)\n self.consumer.channel.basic_publish(exchange='', routing_key=\n retry_queue_name, properties=envelope.properties, body=\n envelope.payload)\n self.consumer.channel.basic_ack(envelope.delivery_tag)\n logger.warning(\n 'Retry handling message [{}] after {}s; death count: {}'.\n format(envelope.message_id, delay, death_count + 1))\n else:\n logger.warning('Message [{}] exceeded retry limit; death count: {}'\n .format(envelope.message_id, death_count + 1))\n self.consumer.channel.basic_reject(envelope.delivery_tag,\n requeue=False)\n logger.error('Message [{}] is rejected'.format(envelope.message_id)\n )\n\n\nclass FixedDelayUnlimitedRetriesPolicy(UnlimitedRetriesPolicy):\n \"\"\"Fixed delay unlimited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n unlimited retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n super(FixedDelayUnlimitedRetriesPolicy, self).__init__(consumer=\n consumer, initial_delay=delay, max_delay=delay,\n delay_incremented_by=0, retry_queue_suffix=retry_queue_suffix,\n **kwargs)\n\n\nclass FixedDelayLimitedRetriesPolicy(LimitedRetriesPolicy):\n \"\"\"Fixed delay limited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n limited number of retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds.\n\n :attr:`retries_limit`: retries limit count.\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retries_limit, retry_queue_suffix=\n 'retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param int retries_limit: retries limit count\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert retries_limit > 0\n retry_delays = tuple([delay] * retries_limit)\n super(FixedDelayLimitedRetriesPolicy, self).__init__(consumer=\n consumer, retry_delays=retry_delays, retry_queue_suffix=\n retry_queue_suffix, **kwargs)\n",
"<docstring token>\n<import token>\n<assignment token>\n<class token>\n<class token>\n<class token>\n\n\nclass LimitedRetriesPolicy(BaseRetryPolicy):\n <docstring token>\n\n def __init__(self, consumer, retry_delays, retry_queue_suffix='retry',\n **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param Iterable[int] retry_delays: Immutable list of retry backoff delays in\n seconds. Message is sent to dlx when this list is exhausted.\n e.g ``(1, 5, 10, 60, 5 * 60)``\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert len(retry_delays) > 0\n super(LimitedRetriesPolicy, self).__init__(consumer,\n retry_queue_suffix, **kwargs)\n self.retry_delays = retry_delays\n\n def retry(self, envelope):\n \"\"\"Send message to retry queue to retry handling it later.\n\n Death count is calculated by examining 'x-death' header. Based on the death\n count, the message is sent to a retry queue where it waits there till it\n expires and gets sent back to the original queue for handling retry.\n\n The death count is used as an index for `retry_delays` list. Where each\n item in the list represents a retry delay in seconds.\n\n The message will be rejected if the death count exceeded the length of\n `retry_delays` list.\n\n :param Envelope envelope: Message envelope\n \"\"\"\n death_count = self.get_death_count(envelope)\n if death_count < len(self.retry_delays):\n delay = self.retry_delays[death_count]\n retry_queue_name = self.declare_retry_queue(delay)\n if envelope.get_header('x-original-delivery-info') is None:\n self.set_original_delivery_info_header(envelope)\n self.consumer.channel.basic_publish(exchange='', routing_key=\n retry_queue_name, properties=envelope.properties, body=\n envelope.payload)\n self.consumer.channel.basic_ack(envelope.delivery_tag)\n logger.warning(\n 'Retry handling message [{}] after {}s; death count: {}'.\n format(envelope.message_id, delay, death_count + 1))\n else:\n logger.warning('Message [{}] exceeded retry limit; death count: {}'\n .format(envelope.message_id, death_count + 1))\n self.consumer.channel.basic_reject(envelope.delivery_tag,\n requeue=False)\n logger.error('Message [{}] is rejected'.format(envelope.message_id)\n )\n\n\nclass FixedDelayUnlimitedRetriesPolicy(UnlimitedRetriesPolicy):\n \"\"\"Fixed delay unlimited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n unlimited retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n super(FixedDelayUnlimitedRetriesPolicy, self).__init__(consumer=\n consumer, initial_delay=delay, max_delay=delay,\n delay_incremented_by=0, retry_queue_suffix=retry_queue_suffix,\n **kwargs)\n\n\nclass FixedDelayLimitedRetriesPolicy(LimitedRetriesPolicy):\n \"\"\"Fixed delay limited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n limited number of retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds.\n\n :attr:`retries_limit`: retries limit count.\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retries_limit, retry_queue_suffix=\n 'retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param int retries_limit: retries limit count\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert retries_limit > 0\n retry_delays = tuple([delay] * retries_limit)\n super(FixedDelayLimitedRetriesPolicy, self).__init__(consumer=\n consumer, retry_delays=retry_delays, retry_queue_suffix=\n retry_queue_suffix, **kwargs)\n",
"<docstring token>\n<import token>\n<assignment token>\n<class token>\n<class token>\n<class token>\n\n\nclass LimitedRetriesPolicy(BaseRetryPolicy):\n <docstring token>\n <function token>\n\n def retry(self, envelope):\n \"\"\"Send message to retry queue to retry handling it later.\n\n Death count is calculated by examining 'x-death' header. Based on the death\n count, the message is sent to a retry queue where it waits there till it\n expires and gets sent back to the original queue for handling retry.\n\n The death count is used as an index for `retry_delays` list. Where each\n item in the list represents a retry delay in seconds.\n\n The message will be rejected if the death count exceeded the length of\n `retry_delays` list.\n\n :param Envelope envelope: Message envelope\n \"\"\"\n death_count = self.get_death_count(envelope)\n if death_count < len(self.retry_delays):\n delay = self.retry_delays[death_count]\n retry_queue_name = self.declare_retry_queue(delay)\n if envelope.get_header('x-original-delivery-info') is None:\n self.set_original_delivery_info_header(envelope)\n self.consumer.channel.basic_publish(exchange='', routing_key=\n retry_queue_name, properties=envelope.properties, body=\n envelope.payload)\n self.consumer.channel.basic_ack(envelope.delivery_tag)\n logger.warning(\n 'Retry handling message [{}] after {}s; death count: {}'.\n format(envelope.message_id, delay, death_count + 1))\n else:\n logger.warning('Message [{}] exceeded retry limit; death count: {}'\n .format(envelope.message_id, death_count + 1))\n self.consumer.channel.basic_reject(envelope.delivery_tag,\n requeue=False)\n logger.error('Message [{}] is rejected'.format(envelope.message_id)\n )\n\n\nclass FixedDelayUnlimitedRetriesPolicy(UnlimitedRetriesPolicy):\n \"\"\"Fixed delay unlimited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n unlimited retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n super(FixedDelayUnlimitedRetriesPolicy, self).__init__(consumer=\n consumer, initial_delay=delay, max_delay=delay,\n delay_incremented_by=0, retry_queue_suffix=retry_queue_suffix,\n **kwargs)\n\n\nclass FixedDelayLimitedRetriesPolicy(LimitedRetriesPolicy):\n \"\"\"Fixed delay limited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n limited number of retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds.\n\n :attr:`retries_limit`: retries limit count.\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retries_limit, retry_queue_suffix=\n 'retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param int retries_limit: retries limit count\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert retries_limit > 0\n retry_delays = tuple([delay] * retries_limit)\n super(FixedDelayLimitedRetriesPolicy, self).__init__(consumer=\n consumer, retry_delays=retry_delays, retry_queue_suffix=\n retry_queue_suffix, **kwargs)\n",
"<docstring token>\n<import token>\n<assignment token>\n<class token>\n<class token>\n<class token>\n\n\nclass LimitedRetriesPolicy(BaseRetryPolicy):\n <docstring token>\n <function token>\n <function token>\n\n\nclass FixedDelayUnlimitedRetriesPolicy(UnlimitedRetriesPolicy):\n \"\"\"Fixed delay unlimited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n unlimited retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n super(FixedDelayUnlimitedRetriesPolicy, self).__init__(consumer=\n consumer, initial_delay=delay, max_delay=delay,\n delay_incremented_by=0, retry_queue_suffix=retry_queue_suffix,\n **kwargs)\n\n\nclass FixedDelayLimitedRetriesPolicy(LimitedRetriesPolicy):\n \"\"\"Fixed delay limited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n limited number of retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds.\n\n :attr:`retries_limit`: retries limit count.\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retries_limit, retry_queue_suffix=\n 'retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param int retries_limit: retries limit count\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert retries_limit > 0\n retry_delays = tuple([delay] * retries_limit)\n super(FixedDelayLimitedRetriesPolicy, self).__init__(consumer=\n consumer, retry_delays=retry_delays, retry_queue_suffix=\n retry_queue_suffix, **kwargs)\n",
"<docstring token>\n<import token>\n<assignment token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass FixedDelayUnlimitedRetriesPolicy(UnlimitedRetriesPolicy):\n \"\"\"Fixed delay unlimited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n unlimited retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n super(FixedDelayUnlimitedRetriesPolicy, self).__init__(consumer=\n consumer, initial_delay=delay, max_delay=delay,\n delay_incremented_by=0, retry_queue_suffix=retry_queue_suffix,\n **kwargs)\n\n\nclass FixedDelayLimitedRetriesPolicy(LimitedRetriesPolicy):\n \"\"\"Fixed delay limited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n limited number of retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds.\n\n :attr:`retries_limit`: retries limit count.\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retries_limit, retry_queue_suffix=\n 'retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param int retries_limit: retries limit count\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert retries_limit > 0\n retry_delays = tuple([delay] * retries_limit)\n super(FixedDelayLimitedRetriesPolicy, self).__init__(consumer=\n consumer, retry_delays=retry_delays, retry_queue_suffix=\n retry_queue_suffix, **kwargs)\n",
"<docstring token>\n<import token>\n<assignment token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass FixedDelayUnlimitedRetriesPolicy(UnlimitedRetriesPolicy):\n <docstring token>\n\n def __init__(self, consumer, delay, retry_queue_suffix='retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n super(FixedDelayUnlimitedRetriesPolicy, self).__init__(consumer=\n consumer, initial_delay=delay, max_delay=delay,\n delay_incremented_by=0, retry_queue_suffix=retry_queue_suffix,\n **kwargs)\n\n\nclass FixedDelayLimitedRetriesPolicy(LimitedRetriesPolicy):\n \"\"\"Fixed delay limited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n limited number of retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds.\n\n :attr:`retries_limit`: retries limit count.\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retries_limit, retry_queue_suffix=\n 'retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param int retries_limit: retries limit count\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert retries_limit > 0\n retry_delays = tuple([delay] * retries_limit)\n super(FixedDelayLimitedRetriesPolicy, self).__init__(consumer=\n consumer, retry_delays=retry_delays, retry_queue_suffix=\n retry_queue_suffix, **kwargs)\n",
"<docstring token>\n<import token>\n<assignment token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass FixedDelayUnlimitedRetriesPolicy(UnlimitedRetriesPolicy):\n <docstring token>\n <function token>\n\n\nclass FixedDelayLimitedRetriesPolicy(LimitedRetriesPolicy):\n \"\"\"Fixed delay limited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n limited number of retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds.\n\n :attr:`retries_limit`: retries limit count.\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retries_limit, retry_queue_suffix=\n 'retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param int retries_limit: retries limit count\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert retries_limit > 0\n retry_delays = tuple([delay] * retries_limit)\n super(FixedDelayLimitedRetriesPolicy, self).__init__(consumer=\n consumer, retry_delays=retry_delays, retry_queue_suffix=\n retry_queue_suffix, **kwargs)\n",
"<docstring token>\n<import token>\n<assignment token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass FixedDelayLimitedRetriesPolicy(LimitedRetriesPolicy):\n \"\"\"Fixed delay limited retries policy.\n\n This is an implementation of :class:`.RetryPolicy` which does fix backoff delay,\n limited number of retries.\n\n :attr:`consumer`: consumer instance\n\n :attr:`delay`: retry delay in seconds.\n\n :attr:`retries_limit`: retries limit count.\n\n :attr:`retry_queue_suffix`: suffix str used when naming retry queues.\n \"\"\"\n\n def __init__(self, consumer, delay, retries_limit, retry_queue_suffix=\n 'retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param int retries_limit: retries limit count\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert retries_limit > 0\n retry_delays = tuple([delay] * retries_limit)\n super(FixedDelayLimitedRetriesPolicy, self).__init__(consumer=\n consumer, retry_delays=retry_delays, retry_queue_suffix=\n retry_queue_suffix, **kwargs)\n",
"<docstring token>\n<import token>\n<assignment token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass FixedDelayLimitedRetriesPolicy(LimitedRetriesPolicy):\n <docstring token>\n\n def __init__(self, consumer, delay, retries_limit, retry_queue_suffix=\n 'retry', **kwargs):\n \"\"\"\n :param Consumer consumer: message consumer instance\n\n :param int delay: retry delay in seconds\n\n :param int retries_limit: retries limit count\n\n :param: str retry_queue_suffix: suffix used when naming retry queues.\n \"\"\"\n assert retries_limit > 0\n retry_delays = tuple([delay] * retries_limit)\n super(FixedDelayLimitedRetriesPolicy, self).__init__(consumer=\n consumer, retry_delays=retry_delays, retry_queue_suffix=\n retry_queue_suffix, **kwargs)\n",
"<docstring token>\n<import token>\n<assignment token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass FixedDelayLimitedRetriesPolicy(LimitedRetriesPolicy):\n <docstring token>\n <function token>\n",
"<docstring token>\n<import token>\n<assignment token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n"
] | false |
802 |
892eb8d1802b01c035993232cc80c710211ab102
|
#processes are described by generator functions
#during the lifetime of a process, the process function(generator function)
#creates events and yields them
#when a process yields an event, it gets suspended
#Simpy resumes the process when the event is triggered
#multiple processes waiting on the same event is resumed in the same order
#it yielded the event
import simpy
def car(env):
# i = 0
# while i<=10:
while True:
print("The car will start parking at: ",env.now)
parking_timeout = 5
yield env.timeout(parking_timeout)
print("The car will start driving at: ",env.now)
driving_timeout = 2
yield env.timeout(driving_timeout)
# if i == 10:
# print("the car is done moving")
# yield env.timeout(1)
# i += 1
env = simpy.Environment()
env.process(car(env)) #the generator function creates the process called car
#env.run()
env.run(until=20)
|
[
"#processes are described by generator functions\n#during the lifetime of a process, the process function(generator function) \n#creates events and yields them\n\n#when a process yields an event, it gets suspended\n#Simpy resumes the process when the event is triggered\n#multiple processes waiting on the same event is resumed in the same order\n#it yielded the event\n\nimport simpy\n\ndef car(env):\n # i = 0\n # while i<=10:\n while True:\n print(\"The car will start parking at: \",env.now)\n parking_timeout = 5\n yield env.timeout(parking_timeout)\n\n print(\"The car will start driving at: \",env.now)\n driving_timeout = 2\n yield env.timeout(driving_timeout)\n\n # if i == 10:\n # print(\"the car is done moving\")\n # yield env.timeout(1)\n # i += 1\n\n\nenv = simpy.Environment()\nenv.process(car(env)) #the generator function creates the process called car\n#env.run()\nenv.run(until=20)\n\n\n ",
"import simpy\n\n\ndef car(env):\n while True:\n print('The car will start parking at: ', env.now)\n parking_timeout = 5\n yield env.timeout(parking_timeout)\n print('The car will start driving at: ', env.now)\n driving_timeout = 2\n yield env.timeout(driving_timeout)\n\n\nenv = simpy.Environment()\nenv.process(car(env))\nenv.run(until=20)\n",
"<import token>\n\n\ndef car(env):\n while True:\n print('The car will start parking at: ', env.now)\n parking_timeout = 5\n yield env.timeout(parking_timeout)\n print('The car will start driving at: ', env.now)\n driving_timeout = 2\n yield env.timeout(driving_timeout)\n\n\nenv = simpy.Environment()\nenv.process(car(env))\nenv.run(until=20)\n",
"<import token>\n\n\ndef car(env):\n while True:\n print('The car will start parking at: ', env.now)\n parking_timeout = 5\n yield env.timeout(parking_timeout)\n print('The car will start driving at: ', env.now)\n driving_timeout = 2\n yield env.timeout(driving_timeout)\n\n\n<assignment token>\nenv.process(car(env))\nenv.run(until=20)\n",
"<import token>\n\n\ndef car(env):\n while True:\n print('The car will start parking at: ', env.now)\n parking_timeout = 5\n yield env.timeout(parking_timeout)\n print('The car will start driving at: ', env.now)\n driving_timeout = 2\n yield env.timeout(driving_timeout)\n\n\n<assignment token>\n<code token>\n",
"<import token>\n<function token>\n<assignment token>\n<code token>\n"
] | false |
803 |
1f69cf5f6d15048e6ead37b5da836c9e2f783f74
|
# The actual code begins here
# This file is intended to load everything downloaded from loaddata.py, preventing user getting banned from IMDB
# The code is written to see what are some key words of the reviews from critics and normal viewers
# And to see what are some of the differences
# The second task is to asses the people's emotion vs. actual score given
# First, we need to load back everything we dumped to folder via pickle.
import pickle
print('loading data...')
with open('movienumbers.pickle','rb') as input_file:
movienumbers = pickle.load(input_file)
with open('ratings.pickle','rb') as input_file:
ratings = pickle.load(input_file)
with open('userratings.pickle','rb') as input_file:
userratings = pickle.load(input_file)
with open('metaratings.pickle','rb') as input_file:
metaratings = pickle.load(input_file)
print('Pickled data successfully loaded.')
# then, it's time to use nltp to see the score of the critics vs. viewers on movies
from nltk.sentiment.vader import SentimentIntensityAnalyzer
# print(movienumbers)
# print(ratings)
# print(userratings)
# print(metaratings)
# Userratings is a dictionary in ways like this "ttxxxxxx : [reviews1, reviews2,...]"
# print(userratings['tt0111161'])
#
# print(metaratings['tt0111161'])
# print(ratings['tt0111161'])
userscore = {}
for movieid, reviews in userratings.items():
score = 0
for eachreviews in reviews:
score += SentimentIntensityAnalyzer().polarity_scores(eachreviews)['compound']
average = score / len(reviews)
userscore[movieid] = average
print(userscore)
# Meta ratings is a dictionary in ways like this "ttxxxxxx : [reviews1, reviews2,...]"
criticsscore = {}
for movieid, reviews in metaratings.items():
score_1 = 0
for eachreviews in reviews:
score_1 += SentimentIntensityAnalyzer().polarity_scores(eachreviews)['compound']
average = score_1 / len(reviews)
criticsscore[movieid] = average
print(criticsscore)
# Question 1: Are critics always more positive than the audience?
counter = 0
for movieid, score in userscore.items():
if movieid in criticsscore and criticsscore[movieid] > score:
counter += 1
else:
counter += 0
# Displaying results to question 1
print("Critics overpraise these movies " + str(counter) + " times more than normal viewers out of "
+ str(len(criticsscore)) + " movies in total.")
if counter < (len(criticsscore) - counter):
print("Because the critics overpraise less than half of the movies sampled here, the critics are more refrained "
"than the users on IMDb.")
else:
print("Because the critics overpraise no less than half of the movies sampled here, the critics are less refrained "
"than the users on IMDb.")
# Question 2: Is the IMDB score closer to the users' sentiment? Or the critics.
useriscloser = 0
criticiscloser = 0
for movieid, score in criticsscore.items():
if abs(userscore[movieid] - (ratings[movieid])/10) > abs(score - (ratings[movieid]/10)):
useriscloser += 1
else:
criticiscloser += 1
# Displaying results to question 2
print("Critics are more closer to the ratings for " + str(criticiscloser) +
" times, while normal viewers are closer " + str(useriscloser) + " times out of " +
str(len(criticsscore)) + " movies in total.")
if useriscloser > criticiscloser:
print("Because the more movies have users resembling closer to the rating, the critics are less accurate "
"than the users on IMDb.")
else:
print("Because the more movies have critics resembling closer to the rating, the users are less accurate "
"than the users on IMDb.")
|
[
"# The actual code begins here\n# This file is intended to load everything downloaded from loaddata.py, preventing user getting banned from IMDB\n# The code is written to see what are some key words of the reviews from critics and normal viewers\n# And to see what are some of the differences\n# The second task is to asses the people's emotion vs. actual score given\n\n# First, we need to load back everything we dumped to folder via pickle.\n\nimport pickle\nprint('loading data...')\n\nwith open('movienumbers.pickle','rb') as input_file:\n movienumbers = pickle.load(input_file)\n\nwith open('ratings.pickle','rb') as input_file:\n ratings = pickle.load(input_file)\n\nwith open('userratings.pickle','rb') as input_file:\n userratings = pickle.load(input_file)\n\nwith open('metaratings.pickle','rb') as input_file:\n metaratings = pickle.load(input_file)\n\nprint('Pickled data successfully loaded.')\n\n# then, it's time to use nltp to see the score of the critics vs. viewers on movies\n\nfrom nltk.sentiment.vader import SentimentIntensityAnalyzer\n\n# print(movienumbers)\n# print(ratings)\n# print(userratings)\n# print(metaratings)\n\n# Userratings is a dictionary in ways like this \"ttxxxxxx : [reviews1, reviews2,...]\"\n\n# print(userratings['tt0111161'])\n#\n# print(metaratings['tt0111161'])\n# print(ratings['tt0111161'])\n\nuserscore = {}\nfor movieid, reviews in userratings.items():\n score = 0\n for eachreviews in reviews:\n score += SentimentIntensityAnalyzer().polarity_scores(eachreviews)['compound']\n average = score / len(reviews)\n userscore[movieid] = average\n\nprint(userscore)\n\n# Meta ratings is a dictionary in ways like this \"ttxxxxxx : [reviews1, reviews2,...]\"\n\n\n\ncriticsscore = {}\nfor movieid, reviews in metaratings.items():\n score_1 = 0\n for eachreviews in reviews:\n score_1 += SentimentIntensityAnalyzer().polarity_scores(eachreviews)['compound']\n average = score_1 / len(reviews)\n criticsscore[movieid] = average\n\nprint(criticsscore)\n\n\n# Question 1: Are critics always more positive than the audience?\n\ncounter = 0\nfor movieid, score in userscore.items():\n if movieid in criticsscore and criticsscore[movieid] > score:\n counter += 1\n else:\n counter += 0\n\n# Displaying results to question 1\nprint(\"Critics overpraise these movies \" + str(counter) + \" times more than normal viewers out of \"\n + str(len(criticsscore)) + \" movies in total.\")\nif counter < (len(criticsscore) - counter):\n print(\"Because the critics overpraise less than half of the movies sampled here, the critics are more refrained \"\n \"than the users on IMDb.\")\nelse:\n print(\"Because the critics overpraise no less than half of the movies sampled here, the critics are less refrained \"\n \"than the users on IMDb.\")\n\n# Question 2: Is the IMDB score closer to the users' sentiment? Or the critics.\n\nuseriscloser = 0\ncriticiscloser = 0\nfor movieid, score in criticsscore.items():\n if abs(userscore[movieid] - (ratings[movieid])/10) > abs(score - (ratings[movieid]/10)):\n useriscloser += 1\n else:\n criticiscloser += 1\n\n# Displaying results to question 2\nprint(\"Critics are more closer to the ratings for \" + str(criticiscloser) +\n \" times, while normal viewers are closer \" + str(useriscloser) + \" times out of \" +\n str(len(criticsscore)) + \" movies in total.\")\n\nif useriscloser > criticiscloser:\n print(\"Because the more movies have users resembling closer to the rating, the critics are less accurate \"\n \"than the users on IMDb.\")\nelse:\n print(\"Because the more movies have critics resembling closer to the rating, the users are less accurate \"\n \"than the users on IMDb.\")",
"import pickle\nprint('loading data...')\nwith open('movienumbers.pickle', 'rb') as input_file:\n movienumbers = pickle.load(input_file)\nwith open('ratings.pickle', 'rb') as input_file:\n ratings = pickle.load(input_file)\nwith open('userratings.pickle', 'rb') as input_file:\n userratings = pickle.load(input_file)\nwith open('metaratings.pickle', 'rb') as input_file:\n metaratings = pickle.load(input_file)\nprint('Pickled data successfully loaded.')\nfrom nltk.sentiment.vader import SentimentIntensityAnalyzer\nuserscore = {}\nfor movieid, reviews in userratings.items():\n score = 0\n for eachreviews in reviews:\n score += SentimentIntensityAnalyzer().polarity_scores(eachreviews)[\n 'compound']\n average = score / len(reviews)\n userscore[movieid] = average\nprint(userscore)\ncriticsscore = {}\nfor movieid, reviews in metaratings.items():\n score_1 = 0\n for eachreviews in reviews:\n score_1 += SentimentIntensityAnalyzer().polarity_scores(eachreviews)[\n 'compound']\n average = score_1 / len(reviews)\n criticsscore[movieid] = average\nprint(criticsscore)\ncounter = 0\nfor movieid, score in userscore.items():\n if movieid in criticsscore and criticsscore[movieid] > score:\n counter += 1\n else:\n counter += 0\nprint('Critics overpraise these movies ' + str(counter) +\n ' times more than normal viewers out of ' + str(len(criticsscore)) +\n ' movies in total.')\nif counter < len(criticsscore) - counter:\n print(\n 'Because the critics overpraise less than half of the movies sampled here, the critics are more refrained than the users on IMDb.'\n )\nelse:\n print(\n 'Because the critics overpraise no less than half of the movies sampled here, the critics are less refrained than the users on IMDb.'\n )\nuseriscloser = 0\ncriticiscloser = 0\nfor movieid, score in criticsscore.items():\n if abs(userscore[movieid] - ratings[movieid] / 10) > abs(score - \n ratings[movieid] / 10):\n useriscloser += 1\n else:\n criticiscloser += 1\nprint('Critics are more closer to the ratings for ' + str(criticiscloser) +\n ' times, while normal viewers are closer ' + str(useriscloser) +\n ' times out of ' + str(len(criticsscore)) + ' movies in total.')\nif useriscloser > criticiscloser:\n print(\n 'Because the more movies have users resembling closer to the rating, the critics are less accurate than the users on IMDb.'\n )\nelse:\n print(\n 'Because the more movies have critics resembling closer to the rating, the users are less accurate than the users on IMDb.'\n )\n",
"<import token>\nprint('loading data...')\nwith open('movienumbers.pickle', 'rb') as input_file:\n movienumbers = pickle.load(input_file)\nwith open('ratings.pickle', 'rb') as input_file:\n ratings = pickle.load(input_file)\nwith open('userratings.pickle', 'rb') as input_file:\n userratings = pickle.load(input_file)\nwith open('metaratings.pickle', 'rb') as input_file:\n metaratings = pickle.load(input_file)\nprint('Pickled data successfully loaded.')\n<import token>\nuserscore = {}\nfor movieid, reviews in userratings.items():\n score = 0\n for eachreviews in reviews:\n score += SentimentIntensityAnalyzer().polarity_scores(eachreviews)[\n 'compound']\n average = score / len(reviews)\n userscore[movieid] = average\nprint(userscore)\ncriticsscore = {}\nfor movieid, reviews in metaratings.items():\n score_1 = 0\n for eachreviews in reviews:\n score_1 += SentimentIntensityAnalyzer().polarity_scores(eachreviews)[\n 'compound']\n average = score_1 / len(reviews)\n criticsscore[movieid] = average\nprint(criticsscore)\ncounter = 0\nfor movieid, score in userscore.items():\n if movieid in criticsscore and criticsscore[movieid] > score:\n counter += 1\n else:\n counter += 0\nprint('Critics overpraise these movies ' + str(counter) +\n ' times more than normal viewers out of ' + str(len(criticsscore)) +\n ' movies in total.')\nif counter < len(criticsscore) - counter:\n print(\n 'Because the critics overpraise less than half of the movies sampled here, the critics are more refrained than the users on IMDb.'\n )\nelse:\n print(\n 'Because the critics overpraise no less than half of the movies sampled here, the critics are less refrained than the users on IMDb.'\n )\nuseriscloser = 0\ncriticiscloser = 0\nfor movieid, score in criticsscore.items():\n if abs(userscore[movieid] - ratings[movieid] / 10) > abs(score - \n ratings[movieid] / 10):\n useriscloser += 1\n else:\n criticiscloser += 1\nprint('Critics are more closer to the ratings for ' + str(criticiscloser) +\n ' times, while normal viewers are closer ' + str(useriscloser) +\n ' times out of ' + str(len(criticsscore)) + ' movies in total.')\nif useriscloser > criticiscloser:\n print(\n 'Because the more movies have users resembling closer to the rating, the critics are less accurate than the users on IMDb.'\n )\nelse:\n print(\n 'Because the more movies have critics resembling closer to the rating, the users are less accurate than the users on IMDb.'\n )\n",
"<import token>\nprint('loading data...')\nwith open('movienumbers.pickle', 'rb') as input_file:\n movienumbers = pickle.load(input_file)\nwith open('ratings.pickle', 'rb') as input_file:\n ratings = pickle.load(input_file)\nwith open('userratings.pickle', 'rb') as input_file:\n userratings = pickle.load(input_file)\nwith open('metaratings.pickle', 'rb') as input_file:\n metaratings = pickle.load(input_file)\nprint('Pickled data successfully loaded.')\n<import token>\n<assignment token>\nfor movieid, reviews in userratings.items():\n score = 0\n for eachreviews in reviews:\n score += SentimentIntensityAnalyzer().polarity_scores(eachreviews)[\n 'compound']\n average = score / len(reviews)\n userscore[movieid] = average\nprint(userscore)\n<assignment token>\nfor movieid, reviews in metaratings.items():\n score_1 = 0\n for eachreviews in reviews:\n score_1 += SentimentIntensityAnalyzer().polarity_scores(eachreviews)[\n 'compound']\n average = score_1 / len(reviews)\n criticsscore[movieid] = average\nprint(criticsscore)\n<assignment token>\nfor movieid, score in userscore.items():\n if movieid in criticsscore and criticsscore[movieid] > score:\n counter += 1\n else:\n counter += 0\nprint('Critics overpraise these movies ' + str(counter) +\n ' times more than normal viewers out of ' + str(len(criticsscore)) +\n ' movies in total.')\nif counter < len(criticsscore) - counter:\n print(\n 'Because the critics overpraise less than half of the movies sampled here, the critics are more refrained than the users on IMDb.'\n )\nelse:\n print(\n 'Because the critics overpraise no less than half of the movies sampled here, the critics are less refrained than the users on IMDb.'\n )\n<assignment token>\nfor movieid, score in criticsscore.items():\n if abs(userscore[movieid] - ratings[movieid] / 10) > abs(score - \n ratings[movieid] / 10):\n useriscloser += 1\n else:\n criticiscloser += 1\nprint('Critics are more closer to the ratings for ' + str(criticiscloser) +\n ' times, while normal viewers are closer ' + str(useriscloser) +\n ' times out of ' + str(len(criticsscore)) + ' movies in total.')\nif useriscloser > criticiscloser:\n print(\n 'Because the more movies have users resembling closer to the rating, the critics are less accurate than the users on IMDb.'\n )\nelse:\n print(\n 'Because the more movies have critics resembling closer to the rating, the users are less accurate than the users on IMDb.'\n )\n",
"<import token>\n<code token>\n<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n"
] | false |
804 |
e31267871453d87aee409f1c751c36908f7f151a
|
"""
Package with a facade to the several expansion strategies.
"""
from acres.resolution import resolver
__all__ = ['resolver']
|
[
"\"\"\"\nPackage with a facade to the several expansion strategies.\n\"\"\"\nfrom acres.resolution import resolver\n\n__all__ = ['resolver']\n",
"<docstring token>\nfrom acres.resolution import resolver\n__all__ = ['resolver']\n",
"<docstring token>\n<import token>\n__all__ = ['resolver']\n",
"<docstring token>\n<import token>\n<assignment token>\n"
] | false |
805 |
be58862b66708c9de8cf7642c9de52ec744b079e
|
# $Header: //depot/cs/s/ajax_support.wsgi#10 $
from werkzeug.wrappers import Response
from p.DRequest import DRequest
from db.Support import SupportSession
from db.Exceptions import DbError, SupportSessionExpired
import db.Db as Db
import db.Support
import cgi
import simplejson as json
def application(environ, start_response):
"""AJAX scripts for email templates."""
request = DRequest(environ)
resp = None
try :
Db.start_transaction()
form = cgi.FieldStorage(fp=environ['wsgi.input'], environ=environ)
args = form['args'].value
req = json.loads(args)
support = SupportSession(key=request.support_key())
handler = handlers[req['command']]
resp = Response(json.dumps(handler(request, req)))
Db.finish_transaction()
except SupportSessionExpired:
Db.cancel_transaction()
resp = Response(json.dumps({ 'Error': 'Session Expired' }))
except DbError as e:
Db.cancel_transaction()
resp = Response(json.dumps({ 'Error': e.args[0]}))
except Exception as e:
Db.cancel_transaction()
import traceback
traceback.print_exc()
resp = Response(json.dumps({ 'Error': "Internal Error"}))
request.cookie_freshen(resp)
resp.headers['content-type'] = 'application/json'
resp.headers['cache-control'] = 'no-cache, must-revalidate, no-store'
return resp(environ, start_response)
def get(request, req):
return db.Support.get_all()
def edit(request, req):
return db.Support.edit(req);
def delete(request, req):
return db.Support.delete(req['support_id'])
def add(request, req):
return db.Support.new()
handlers = { 'get': get, 'edit': edit, 'delete': delete, 'add': add }
|
[
"# $Header: //depot/cs/s/ajax_support.wsgi#10 $\nfrom werkzeug.wrappers import Response\nfrom p.DRequest import DRequest\nfrom db.Support import SupportSession\nfrom db.Exceptions import DbError, SupportSessionExpired\nimport db.Db as Db\nimport db.Support\n\nimport cgi\nimport simplejson as json\n\ndef application(environ, start_response):\n \"\"\"AJAX scripts for email templates.\"\"\"\n\n request = DRequest(environ)\n\n resp = None\n\n try :\n Db.start_transaction()\n form = cgi.FieldStorage(fp=environ['wsgi.input'], environ=environ)\n args = form['args'].value\n req = json.loads(args)\n\n support = SupportSession(key=request.support_key())\n handler = handlers[req['command']]\n resp = Response(json.dumps(handler(request, req)))\n Db.finish_transaction()\n\n except SupportSessionExpired:\n Db.cancel_transaction()\n resp = Response(json.dumps({ 'Error': 'Session Expired' }))\n except DbError as e:\n Db.cancel_transaction()\n resp = Response(json.dumps({ 'Error': e.args[0]}))\n except Exception as e:\n Db.cancel_transaction()\n import traceback\n traceback.print_exc()\n resp = Response(json.dumps({ 'Error': \"Internal Error\"}))\n\n request.cookie_freshen(resp)\n resp.headers['content-type'] = 'application/json'\n resp.headers['cache-control'] = 'no-cache, must-revalidate, no-store'\n return resp(environ, start_response)\n\n\ndef get(request, req):\n return db.Support.get_all()\n\ndef edit(request, req):\n return db.Support.edit(req);\n\ndef delete(request, req):\n return db.Support.delete(req['support_id'])\n\ndef add(request, req):\n return db.Support.new()\n\n\nhandlers = { 'get': get, 'edit': edit, 'delete': delete, 'add': add }\n\n",
"from werkzeug.wrappers import Response\nfrom p.DRequest import DRequest\nfrom db.Support import SupportSession\nfrom db.Exceptions import DbError, SupportSessionExpired\nimport db.Db as Db\nimport db.Support\nimport cgi\nimport simplejson as json\n\n\ndef application(environ, start_response):\n \"\"\"AJAX scripts for email templates.\"\"\"\n request = DRequest(environ)\n resp = None\n try:\n Db.start_transaction()\n form = cgi.FieldStorage(fp=environ['wsgi.input'], environ=environ)\n args = form['args'].value\n req = json.loads(args)\n support = SupportSession(key=request.support_key())\n handler = handlers[req['command']]\n resp = Response(json.dumps(handler(request, req)))\n Db.finish_transaction()\n except SupportSessionExpired:\n Db.cancel_transaction()\n resp = Response(json.dumps({'Error': 'Session Expired'}))\n except DbError as e:\n Db.cancel_transaction()\n resp = Response(json.dumps({'Error': e.args[0]}))\n except Exception as e:\n Db.cancel_transaction()\n import traceback\n traceback.print_exc()\n resp = Response(json.dumps({'Error': 'Internal Error'}))\n request.cookie_freshen(resp)\n resp.headers['content-type'] = 'application/json'\n resp.headers['cache-control'] = 'no-cache, must-revalidate, no-store'\n return resp(environ, start_response)\n\n\ndef get(request, req):\n return db.Support.get_all()\n\n\ndef edit(request, req):\n return db.Support.edit(req)\n\n\ndef delete(request, req):\n return db.Support.delete(req['support_id'])\n\n\ndef add(request, req):\n return db.Support.new()\n\n\nhandlers = {'get': get, 'edit': edit, 'delete': delete, 'add': add}\n",
"<import token>\n\n\ndef application(environ, start_response):\n \"\"\"AJAX scripts for email templates.\"\"\"\n request = DRequest(environ)\n resp = None\n try:\n Db.start_transaction()\n form = cgi.FieldStorage(fp=environ['wsgi.input'], environ=environ)\n args = form['args'].value\n req = json.loads(args)\n support = SupportSession(key=request.support_key())\n handler = handlers[req['command']]\n resp = Response(json.dumps(handler(request, req)))\n Db.finish_transaction()\n except SupportSessionExpired:\n Db.cancel_transaction()\n resp = Response(json.dumps({'Error': 'Session Expired'}))\n except DbError as e:\n Db.cancel_transaction()\n resp = Response(json.dumps({'Error': e.args[0]}))\n except Exception as e:\n Db.cancel_transaction()\n import traceback\n traceback.print_exc()\n resp = Response(json.dumps({'Error': 'Internal Error'}))\n request.cookie_freshen(resp)\n resp.headers['content-type'] = 'application/json'\n resp.headers['cache-control'] = 'no-cache, must-revalidate, no-store'\n return resp(environ, start_response)\n\n\ndef get(request, req):\n return db.Support.get_all()\n\n\ndef edit(request, req):\n return db.Support.edit(req)\n\n\ndef delete(request, req):\n return db.Support.delete(req['support_id'])\n\n\ndef add(request, req):\n return db.Support.new()\n\n\nhandlers = {'get': get, 'edit': edit, 'delete': delete, 'add': add}\n",
"<import token>\n\n\ndef application(environ, start_response):\n \"\"\"AJAX scripts for email templates.\"\"\"\n request = DRequest(environ)\n resp = None\n try:\n Db.start_transaction()\n form = cgi.FieldStorage(fp=environ['wsgi.input'], environ=environ)\n args = form['args'].value\n req = json.loads(args)\n support = SupportSession(key=request.support_key())\n handler = handlers[req['command']]\n resp = Response(json.dumps(handler(request, req)))\n Db.finish_transaction()\n except SupportSessionExpired:\n Db.cancel_transaction()\n resp = Response(json.dumps({'Error': 'Session Expired'}))\n except DbError as e:\n Db.cancel_transaction()\n resp = Response(json.dumps({'Error': e.args[0]}))\n except Exception as e:\n Db.cancel_transaction()\n import traceback\n traceback.print_exc()\n resp = Response(json.dumps({'Error': 'Internal Error'}))\n request.cookie_freshen(resp)\n resp.headers['content-type'] = 'application/json'\n resp.headers['cache-control'] = 'no-cache, must-revalidate, no-store'\n return resp(environ, start_response)\n\n\ndef get(request, req):\n return db.Support.get_all()\n\n\ndef edit(request, req):\n return db.Support.edit(req)\n\n\ndef delete(request, req):\n return db.Support.delete(req['support_id'])\n\n\ndef add(request, req):\n return db.Support.new()\n\n\n<assignment token>\n",
"<import token>\n\n\ndef application(environ, start_response):\n \"\"\"AJAX scripts for email templates.\"\"\"\n request = DRequest(environ)\n resp = None\n try:\n Db.start_transaction()\n form = cgi.FieldStorage(fp=environ['wsgi.input'], environ=environ)\n args = form['args'].value\n req = json.loads(args)\n support = SupportSession(key=request.support_key())\n handler = handlers[req['command']]\n resp = Response(json.dumps(handler(request, req)))\n Db.finish_transaction()\n except SupportSessionExpired:\n Db.cancel_transaction()\n resp = Response(json.dumps({'Error': 'Session Expired'}))\n except DbError as e:\n Db.cancel_transaction()\n resp = Response(json.dumps({'Error': e.args[0]}))\n except Exception as e:\n Db.cancel_transaction()\n import traceback\n traceback.print_exc()\n resp = Response(json.dumps({'Error': 'Internal Error'}))\n request.cookie_freshen(resp)\n resp.headers['content-type'] = 'application/json'\n resp.headers['cache-control'] = 'no-cache, must-revalidate, no-store'\n return resp(environ, start_response)\n\n\ndef get(request, req):\n return db.Support.get_all()\n\n\n<function token>\n\n\ndef delete(request, req):\n return db.Support.delete(req['support_id'])\n\n\ndef add(request, req):\n return db.Support.new()\n\n\n<assignment token>\n",
"<import token>\n\n\ndef application(environ, start_response):\n \"\"\"AJAX scripts for email templates.\"\"\"\n request = DRequest(environ)\n resp = None\n try:\n Db.start_transaction()\n form = cgi.FieldStorage(fp=environ['wsgi.input'], environ=environ)\n args = form['args'].value\n req = json.loads(args)\n support = SupportSession(key=request.support_key())\n handler = handlers[req['command']]\n resp = Response(json.dumps(handler(request, req)))\n Db.finish_transaction()\n except SupportSessionExpired:\n Db.cancel_transaction()\n resp = Response(json.dumps({'Error': 'Session Expired'}))\n except DbError as e:\n Db.cancel_transaction()\n resp = Response(json.dumps({'Error': e.args[0]}))\n except Exception as e:\n Db.cancel_transaction()\n import traceback\n traceback.print_exc()\n resp = Response(json.dumps({'Error': 'Internal Error'}))\n request.cookie_freshen(resp)\n resp.headers['content-type'] = 'application/json'\n resp.headers['cache-control'] = 'no-cache, must-revalidate, no-store'\n return resp(environ, start_response)\n\n\ndef get(request, req):\n return db.Support.get_all()\n\n\n<function token>\n<function token>\n\n\ndef add(request, req):\n return db.Support.new()\n\n\n<assignment token>\n",
"<import token>\n\n\ndef application(environ, start_response):\n \"\"\"AJAX scripts for email templates.\"\"\"\n request = DRequest(environ)\n resp = None\n try:\n Db.start_transaction()\n form = cgi.FieldStorage(fp=environ['wsgi.input'], environ=environ)\n args = form['args'].value\n req = json.loads(args)\n support = SupportSession(key=request.support_key())\n handler = handlers[req['command']]\n resp = Response(json.dumps(handler(request, req)))\n Db.finish_transaction()\n except SupportSessionExpired:\n Db.cancel_transaction()\n resp = Response(json.dumps({'Error': 'Session Expired'}))\n except DbError as e:\n Db.cancel_transaction()\n resp = Response(json.dumps({'Error': e.args[0]}))\n except Exception as e:\n Db.cancel_transaction()\n import traceback\n traceback.print_exc()\n resp = Response(json.dumps({'Error': 'Internal Error'}))\n request.cookie_freshen(resp)\n resp.headers['content-type'] = 'application/json'\n resp.headers['cache-control'] = 'no-cache, must-revalidate, no-store'\n return resp(environ, start_response)\n\n\n<function token>\n<function token>\n<function token>\n\n\ndef add(request, req):\n return db.Support.new()\n\n\n<assignment token>\n",
"<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\ndef add(request, req):\n return db.Support.new()\n\n\n<assignment token>\n",
"<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<assignment token>\n"
] | false |
806 |
328a03acab2a0550bea0795d22110a152db6c503
|
# %%
import os
print(os.getcwd())
# %%
from TransformerModel.Model import Model
from dataset.DatasetLoader import DatasetLoader
import pytorch_lightning as pl
from pytorch_lightning.callbacks import EarlyStopping
import argparse
from argparse import ArgumentParser, ArgumentTypeError
# %%
def run_training(arguments_parser):
data = DatasetLoader(arguments_parser)
data.setup()
arguments_parser.num_training_steps = (
len(data.train_dataloader()) * arguments_parser.max_epochs
)
dict_args = vars(arguments_parser)
model = Model(**dict_args)
arguments_parser.early_stop_callback = EarlyStopping("val_loss")
trainer = pl.Trainer.from_argparse_args(arguments_parser)
trainer.fit(model, data)
# %%
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("--pretrained", type=str, default="bert-base-uncased")
parser.add_argument("--nr_frozen_epochs", type=int, default=5)
parser.add_argument("--training_portion", type=float, default=0.9)
parser.add_argument("--batch_size", type=float, default=32)
parser.add_argument("--learning_rate", type=float, default=2e-5)
parser.add_argument("--frac", type=float, default=1)
parser = pl.Trainer.add_argparse_args(parser)
args = parser.parse_args()
run_training(args)
# %%
|
[
"# %%\nimport os\n\nprint(os.getcwd())\n# %%\nfrom TransformerModel.Model import Model\nfrom dataset.DatasetLoader import DatasetLoader\nimport pytorch_lightning as pl\nfrom pytorch_lightning.callbacks import EarlyStopping\nimport argparse\nfrom argparse import ArgumentParser, ArgumentTypeError\n\n# %%\n\n\ndef run_training(arguments_parser):\n data = DatasetLoader(arguments_parser)\n data.setup()\n\n arguments_parser.num_training_steps = (\n len(data.train_dataloader()) * arguments_parser.max_epochs\n )\n\n dict_args = vars(arguments_parser)\n\n model = Model(**dict_args)\n\n arguments_parser.early_stop_callback = EarlyStopping(\"val_loss\")\n\n trainer = pl.Trainer.from_argparse_args(arguments_parser)\n\n trainer.fit(model, data)\n\n\n# %%\nif __name__ == \"__main__\":\n\n parser = ArgumentParser()\n parser.add_argument(\"--pretrained\", type=str, default=\"bert-base-uncased\")\n parser.add_argument(\"--nr_frozen_epochs\", type=int, default=5)\n parser.add_argument(\"--training_portion\", type=float, default=0.9)\n parser.add_argument(\"--batch_size\", type=float, default=32)\n parser.add_argument(\"--learning_rate\", type=float, default=2e-5)\n parser.add_argument(\"--frac\", type=float, default=1)\n\n parser = pl.Trainer.add_argparse_args(parser)\n args = parser.parse_args()\n run_training(args)\n\n\n# %%\n",
"import os\nprint(os.getcwd())\nfrom TransformerModel.Model import Model\nfrom dataset.DatasetLoader import DatasetLoader\nimport pytorch_lightning as pl\nfrom pytorch_lightning.callbacks import EarlyStopping\nimport argparse\nfrom argparse import ArgumentParser, ArgumentTypeError\n\n\ndef run_training(arguments_parser):\n data = DatasetLoader(arguments_parser)\n data.setup()\n arguments_parser.num_training_steps = len(data.train_dataloader()\n ) * arguments_parser.max_epochs\n dict_args = vars(arguments_parser)\n model = Model(**dict_args)\n arguments_parser.early_stop_callback = EarlyStopping('val_loss')\n trainer = pl.Trainer.from_argparse_args(arguments_parser)\n trainer.fit(model, data)\n\n\nif __name__ == '__main__':\n parser = ArgumentParser()\n parser.add_argument('--pretrained', type=str, default='bert-base-uncased')\n parser.add_argument('--nr_frozen_epochs', type=int, default=5)\n parser.add_argument('--training_portion', type=float, default=0.9)\n parser.add_argument('--batch_size', type=float, default=32)\n parser.add_argument('--learning_rate', type=float, default=2e-05)\n parser.add_argument('--frac', type=float, default=1)\n parser = pl.Trainer.add_argparse_args(parser)\n args = parser.parse_args()\n run_training(args)\n",
"<import token>\nprint(os.getcwd())\n<import token>\n\n\ndef run_training(arguments_parser):\n data = DatasetLoader(arguments_parser)\n data.setup()\n arguments_parser.num_training_steps = len(data.train_dataloader()\n ) * arguments_parser.max_epochs\n dict_args = vars(arguments_parser)\n model = Model(**dict_args)\n arguments_parser.early_stop_callback = EarlyStopping('val_loss')\n trainer = pl.Trainer.from_argparse_args(arguments_parser)\n trainer.fit(model, data)\n\n\nif __name__ == '__main__':\n parser = ArgumentParser()\n parser.add_argument('--pretrained', type=str, default='bert-base-uncased')\n parser.add_argument('--nr_frozen_epochs', type=int, default=5)\n parser.add_argument('--training_portion', type=float, default=0.9)\n parser.add_argument('--batch_size', type=float, default=32)\n parser.add_argument('--learning_rate', type=float, default=2e-05)\n parser.add_argument('--frac', type=float, default=1)\n parser = pl.Trainer.add_argparse_args(parser)\n args = parser.parse_args()\n run_training(args)\n",
"<import token>\n<code token>\n<import token>\n\n\ndef run_training(arguments_parser):\n data = DatasetLoader(arguments_parser)\n data.setup()\n arguments_parser.num_training_steps = len(data.train_dataloader()\n ) * arguments_parser.max_epochs\n dict_args = vars(arguments_parser)\n model = Model(**dict_args)\n arguments_parser.early_stop_callback = EarlyStopping('val_loss')\n trainer = pl.Trainer.from_argparse_args(arguments_parser)\n trainer.fit(model, data)\n\n\n<code token>\n",
"<import token>\n<code token>\n<import token>\n<function token>\n<code token>\n"
] | false |
807 |
c712875273f988a3aa6dab61f79e99a077823060
|
#! /usr/bin/python
#
# convert the swig -debug-lsymbols output text file format into
# a simple list of lua module names and classes
#
# Dan Wilcox <[email protected]> 2017
#
import sys
import re
if len(sys.argv) < 2:
print("USAGE: lua_syntax.py MODULENAME INFILE")
exit(0)
module = sys.argv[1]
infile = sys.argv[2]
sections = []
sectionMatches = [
"string", # swig std::string wrappers
"string.SwigStatic" # swig std::string wrappers
]
sectionEnds = [
"Vector" # swig std::vector wrappers
]
lineMatches = [
"string", # swig std::string wrappers
"lua:cdata", # c pointers
]
lineStarts = [
"~", # destructors
"__", # lua metatable __add, __sub, etc
"of", # of core type prefixes
"ofx" # ofx addon type prefixes
]
lineEnds = [
"Vector" # swig std::vector wrappers
]
# any other user-supplied section ignores
for arg in sys.argv[3:]:
sectionIgnores.append(arg)
# check if a string matches one in an array
def matches(needle, haystack):
for straw in haystack:
if needle == straw:
return True
return False
# append a section to the sections array if the name passes muster
def appendSection(section):
# drop static classes which don't have any symbols
if len(section) < 2:
return
# section names are followed by a " -", so double check
if not section[0].endswith("-"):
print("warning: section name does not end with -: "+section[0])
return
# grab first non-whitespace name ie. "Color" from "Color -"
match = re.match("\S+", section[0])
if match:
if section[0] == "-": # main module is just a "-"
section[0] = module
else: # class name
section[0] = match.group(0)
else:
print("warning: section name had no non-whitespace match: "+section[0])
return
# drop sections which match certain strings
if matches(section[0], sectionMatches):
return
# drop sections which contain certain strings
if any(section[0].endswith(x) for x in sectionEnds):
return
# if got this far, the section must be good...
sections.append(section)
# parse swig output into sections
file = open(infile)
section = []
for line in file:
# strip whitespace
line = line.strip()
# ignore beginning and end lines
if line.startswith("LANGUAGE"):
continue
# section headers are a series of = chars, ie. ==========
if line.startswith("="):
appendSection(section)
section = []
# append line within a section
else:
# empty line
if len(line) == 0:
continue
# drop lines with certain prefixes
if any(line.startswith(x) for x in lineStarts):
continue
# drop lines with certain suffixes
if any(line.endswith(x) for x in lineEnds):
continue
# drop lines which match certain strings
if matches(line, lineMatches):
continue
# line must be good
section.append(line)
appendSection(section) # catch any left overs
file.close()
section = []
# for section in sections:
# print(section)
# exit(0)
# output module & section names to each section line
file = open(module+"_syntax.txt", "w")
num = 0
for section in sections:
# grab name from first line and output
prefix = " "
name = section[0]
if name == module: # main module
prefix = module+"."
file.write(module+"\n")
elif name.endswith(".SwigStatic"): # static members
name = name.split(".")[0] # drop SwigStatic suffix
prefix = module+"."+name+"."
else: # class instance members
file.write(module+"."+name+"\n")
# sort remaining lines
lines = section[1:]
lines.sort()
# output with module.class prefix
for line in lines:
if not line.endswith(".SwigStatic"): # drop statics from main module
file.write(prefix+line+"\n")
num = num + 1
# linebreak between sections
if num < len(sections):
file.write("\n")
file.close()
|
[
"#! /usr/bin/python\n#\n# convert the swig -debug-lsymbols output text file format into\n# a simple list of lua module names and classes\n#\n# Dan Wilcox <[email protected]> 2017\n#\nimport sys\nimport re\n\nif len(sys.argv) < 2:\n print(\"USAGE: lua_syntax.py MODULENAME INFILE\")\n exit(0)\n\nmodule = sys.argv[1]\ninfile = sys.argv[2]\nsections = []\nsectionMatches = [\n \"string\", # swig std::string wrappers\n \"string.SwigStatic\" # swig std::string wrappers\n]\nsectionEnds = [\n \"Vector\" # swig std::vector wrappers\n]\nlineMatches = [ \n \"string\", # swig std::string wrappers\n \"lua:cdata\", # c pointers\n]\nlineStarts = [\n \"~\", # destructors\n \"__\", # lua metatable __add, __sub, etc\n \"of\", # of core type prefixes\n \"ofx\" # ofx addon type prefixes\n]\nlineEnds = [\n \"Vector\" # swig std::vector wrappers\n]\n\n# any other user-supplied section ignores\nfor arg in sys.argv[3:]:\n sectionIgnores.append(arg)\n\n# check if a string matches one in an array\ndef matches(needle, haystack):\n for straw in haystack:\n if needle == straw:\n return True\n return False\n\n# append a section to the sections array if the name passes muster\ndef appendSection(section):\n # drop static classes which don't have any symbols\n if len(section) < 2:\n return\n # section names are followed by a \" -\", so double check\n if not section[0].endswith(\"-\"):\n print(\"warning: section name does not end with -: \"+section[0])\n return\n # grab first non-whitespace name ie. \"Color\" from \"Color -\"\n match = re.match(\"\\S+\", section[0])\n if match:\n if section[0] == \"-\": # main module is just a \"-\"\n section[0] = module\n else: # class name\n section[0] = match.group(0)\n else:\n print(\"warning: section name had no non-whitespace match: \"+section[0])\n return\n # drop sections which match certain strings\n if matches(section[0], sectionMatches):\n return\n # drop sections which contain certain strings\n if any(section[0].endswith(x) for x in sectionEnds):\n return\n # if got this far, the section must be good...\n sections.append(section)\n\n# parse swig output into sections\nfile = open(infile)\nsection = []\nfor line in file:\n # strip whitespace\n line = line.strip()\n # ignore beginning and end lines\n if line.startswith(\"LANGUAGE\"):\n continue\n # section headers are a series of = chars, ie. ==========\n if line.startswith(\"=\"):\n appendSection(section)\n section = []\n # append line within a section\n else:\n # empty line\n if len(line) == 0:\n continue\n # drop lines with certain prefixes\n if any(line.startswith(x) for x in lineStarts):\n continue\n # drop lines with certain suffixes\n if any(line.endswith(x) for x in lineEnds):\n continue\n # drop lines which match certain strings\n if matches(line, lineMatches):\n continue\n # line must be good\n section.append(line)\nappendSection(section) # catch any left overs\nfile.close()\nsection = []\n\n# for section in sections:\n# print(section)\n# exit(0)\n\n# output module & section names to each section line\nfile = open(module+\"_syntax.txt\", \"w\")\nnum = 0\nfor section in sections:\n\n # grab name from first line and output\n prefix = \" \"\n name = section[0]\n if name == module: # main module\n prefix = module+\".\"\n file.write(module+\"\\n\")\n elif name.endswith(\".SwigStatic\"): # static members\n name = name.split(\".\")[0] # drop SwigStatic suffix\n prefix = module+\".\"+name+\".\"\n else: # class instance members\n file.write(module+\".\"+name+\"\\n\")\n\n # sort remaining lines\n lines = section[1:]\n lines.sort()\n\n # output with module.class prefix\n for line in lines:\n if not line.endswith(\".SwigStatic\"): # drop statics from main module\n file.write(prefix+line+\"\\n\")\n num = num + 1\n\n # linebreak between sections\n if num < len(sections):\n file.write(\"\\n\")\nfile.close()\n",
"import sys\nimport re\nif len(sys.argv) < 2:\n print('USAGE: lua_syntax.py MODULENAME INFILE')\n exit(0)\nmodule = sys.argv[1]\ninfile = sys.argv[2]\nsections = []\nsectionMatches = ['string', 'string.SwigStatic']\nsectionEnds = ['Vector']\nlineMatches = ['string', 'lua:cdata']\nlineStarts = ['~', '__', 'of', 'ofx']\nlineEnds = ['Vector']\nfor arg in sys.argv[3:]:\n sectionIgnores.append(arg)\n\n\ndef matches(needle, haystack):\n for straw in haystack:\n if needle == straw:\n return True\n return False\n\n\ndef appendSection(section):\n if len(section) < 2:\n return\n if not section[0].endswith('-'):\n print('warning: section name does not end with -: ' + section[0])\n return\n match = re.match('\\\\S+', section[0])\n if match:\n if section[0] == '-':\n section[0] = module\n else:\n section[0] = match.group(0)\n else:\n print('warning: section name had no non-whitespace match: ' +\n section[0])\n return\n if matches(section[0], sectionMatches):\n return\n if any(section[0].endswith(x) for x in sectionEnds):\n return\n sections.append(section)\n\n\nfile = open(infile)\nsection = []\nfor line in file:\n line = line.strip()\n if line.startswith('LANGUAGE'):\n continue\n if line.startswith('='):\n appendSection(section)\n section = []\n else:\n if len(line) == 0:\n continue\n if any(line.startswith(x) for x in lineStarts):\n continue\n if any(line.endswith(x) for x in lineEnds):\n continue\n if matches(line, lineMatches):\n continue\n section.append(line)\nappendSection(section)\nfile.close()\nsection = []\nfile = open(module + '_syntax.txt', 'w')\nnum = 0\nfor section in sections:\n prefix = ' '\n name = section[0]\n if name == module:\n prefix = module + '.'\n file.write(module + '\\n')\n elif name.endswith('.SwigStatic'):\n name = name.split('.')[0]\n prefix = module + '.' + name + '.'\n else:\n file.write(module + '.' + name + '\\n')\n lines = section[1:]\n lines.sort()\n for line in lines:\n if not line.endswith('.SwigStatic'):\n file.write(prefix + line + '\\n')\n num = num + 1\n if num < len(sections):\n file.write('\\n')\nfile.close()\n",
"<import token>\nif len(sys.argv) < 2:\n print('USAGE: lua_syntax.py MODULENAME INFILE')\n exit(0)\nmodule = sys.argv[1]\ninfile = sys.argv[2]\nsections = []\nsectionMatches = ['string', 'string.SwigStatic']\nsectionEnds = ['Vector']\nlineMatches = ['string', 'lua:cdata']\nlineStarts = ['~', '__', 'of', 'ofx']\nlineEnds = ['Vector']\nfor arg in sys.argv[3:]:\n sectionIgnores.append(arg)\n\n\ndef matches(needle, haystack):\n for straw in haystack:\n if needle == straw:\n return True\n return False\n\n\ndef appendSection(section):\n if len(section) < 2:\n return\n if not section[0].endswith('-'):\n print('warning: section name does not end with -: ' + section[0])\n return\n match = re.match('\\\\S+', section[0])\n if match:\n if section[0] == '-':\n section[0] = module\n else:\n section[0] = match.group(0)\n else:\n print('warning: section name had no non-whitespace match: ' +\n section[0])\n return\n if matches(section[0], sectionMatches):\n return\n if any(section[0].endswith(x) for x in sectionEnds):\n return\n sections.append(section)\n\n\nfile = open(infile)\nsection = []\nfor line in file:\n line = line.strip()\n if line.startswith('LANGUAGE'):\n continue\n if line.startswith('='):\n appendSection(section)\n section = []\n else:\n if len(line) == 0:\n continue\n if any(line.startswith(x) for x in lineStarts):\n continue\n if any(line.endswith(x) for x in lineEnds):\n continue\n if matches(line, lineMatches):\n continue\n section.append(line)\nappendSection(section)\nfile.close()\nsection = []\nfile = open(module + '_syntax.txt', 'w')\nnum = 0\nfor section in sections:\n prefix = ' '\n name = section[0]\n if name == module:\n prefix = module + '.'\n file.write(module + '\\n')\n elif name.endswith('.SwigStatic'):\n name = name.split('.')[0]\n prefix = module + '.' + name + '.'\n else:\n file.write(module + '.' + name + '\\n')\n lines = section[1:]\n lines.sort()\n for line in lines:\n if not line.endswith('.SwigStatic'):\n file.write(prefix + line + '\\n')\n num = num + 1\n if num < len(sections):\n file.write('\\n')\nfile.close()\n",
"<import token>\nif len(sys.argv) < 2:\n print('USAGE: lua_syntax.py MODULENAME INFILE')\n exit(0)\n<assignment token>\nfor arg in sys.argv[3:]:\n sectionIgnores.append(arg)\n\n\ndef matches(needle, haystack):\n for straw in haystack:\n if needle == straw:\n return True\n return False\n\n\ndef appendSection(section):\n if len(section) < 2:\n return\n if not section[0].endswith('-'):\n print('warning: section name does not end with -: ' + section[0])\n return\n match = re.match('\\\\S+', section[0])\n if match:\n if section[0] == '-':\n section[0] = module\n else:\n section[0] = match.group(0)\n else:\n print('warning: section name had no non-whitespace match: ' +\n section[0])\n return\n if matches(section[0], sectionMatches):\n return\n if any(section[0].endswith(x) for x in sectionEnds):\n return\n sections.append(section)\n\n\n<assignment token>\nfor line in file:\n line = line.strip()\n if line.startswith('LANGUAGE'):\n continue\n if line.startswith('='):\n appendSection(section)\n section = []\n else:\n if len(line) == 0:\n continue\n if any(line.startswith(x) for x in lineStarts):\n continue\n if any(line.endswith(x) for x in lineEnds):\n continue\n if matches(line, lineMatches):\n continue\n section.append(line)\nappendSection(section)\nfile.close()\n<assignment token>\nfor section in sections:\n prefix = ' '\n name = section[0]\n if name == module:\n prefix = module + '.'\n file.write(module + '\\n')\n elif name.endswith('.SwigStatic'):\n name = name.split('.')[0]\n prefix = module + '.' + name + '.'\n else:\n file.write(module + '.' + name + '\\n')\n lines = section[1:]\n lines.sort()\n for line in lines:\n if not line.endswith('.SwigStatic'):\n file.write(prefix + line + '\\n')\n num = num + 1\n if num < len(sections):\n file.write('\\n')\nfile.close()\n",
"<import token>\n<code token>\n<assignment token>\n<code token>\n\n\ndef matches(needle, haystack):\n for straw in haystack:\n if needle == straw:\n return True\n return False\n\n\ndef appendSection(section):\n if len(section) < 2:\n return\n if not section[0].endswith('-'):\n print('warning: section name does not end with -: ' + section[0])\n return\n match = re.match('\\\\S+', section[0])\n if match:\n if section[0] == '-':\n section[0] = module\n else:\n section[0] = match.group(0)\n else:\n print('warning: section name had no non-whitespace match: ' +\n section[0])\n return\n if matches(section[0], sectionMatches):\n return\n if any(section[0].endswith(x) for x in sectionEnds):\n return\n sections.append(section)\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n<code token>\n<assignment token>\n<code token>\n<function token>\n\n\ndef appendSection(section):\n if len(section) < 2:\n return\n if not section[0].endswith('-'):\n print('warning: section name does not end with -: ' + section[0])\n return\n match = re.match('\\\\S+', section[0])\n if match:\n if section[0] == '-':\n section[0] = module\n else:\n section[0] = match.group(0)\n else:\n print('warning: section name had no non-whitespace match: ' +\n section[0])\n return\n if matches(section[0], sectionMatches):\n return\n if any(section[0].endswith(x) for x in sectionEnds):\n return\n sections.append(section)\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n<code token>\n<assignment token>\n<code token>\n<function token>\n<function token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n"
] | false |
808 |
3d3b9956a98f11a170d66280abe7f193cef9ccfb
|
#%%
# -*- coding: utf-8 -*-
import numpy as np
import plotly
from plotly.subplots import make_subplots
import plotly.graph_objects as go
import pandas as pd
import os
output_directory = r'C:/Users/jgamm/Desktop/rssi_measurement/2020-06-10/figures'
antennas = ['original_whip']
folder = r'C:/Users/jgamm/Desktop/rssi_measurement/2020-06-10/data'
ri_filenames = []
for i_angle in np.arange(0, 360, 45):
ri_filenames.append('r%di%d.csv'%(i_angle, i_angle))
ri_filenames.append('r%di%d.csv'%(i_angle+45, i_angle))
ri_filenames.append('r360i360.csv')
angle_filenames = ['%d.csv'%(n) for n in np.arange(0, 405, 45)]
distance_filenames = ['%1.2f.csv'%(n) for n in np.arange(.75, 3.25, .25)]
ref_line = dict(color='white', width=1)
# Plot yaw data
for antenna in antennas:
fig = make_subplots(rows=2, cols=1,
subplot_titles=['Initiator RSSI vs. yaw',
'Calculated distance vs. yaw'],
shared_xaxes=True)
rssi_hist2d = []
dist_hist2d = []
experiment = 'orientation_exp1'
dist_lim = [100, 0]
db_lim = [-100, 0]
for filename in ri_filenames:
data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))
Dist = np.around(data['distance'], 1)
for rssi in data['i_rssi']:
if rssi-5 < db_lim[1]:
db_lim[1] = rssi-5
if rssi+5 > db_lim[0]:
db_lim[0] = rssi+5
for dist in Dist:
if dist-.5 < dist_lim[0]:
dist_lim[0] = dist-.5
if dist+.5 > dist_lim[1]:
dist_lim[1] = dist+.5
dist_lim[0] = np.max([0, dist_lim[0]])
column = np.zeros(200)
hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T
for row in hist:
row_idx = -int(row[0])
column[row_idx] = row[1]/len(data['i_rssi'])
rssi_hist2d.append(column)
column = np.zeros(100)
hist = np.array(np.unique(Dist, return_counts=True)).T
for row in hist:
row_idx = int(np.around(row[0]/.1))
column[row_idx] = row[1]/len(Dist)
dist_hist2d.append(column)
rssi_hist2d = np.array(rssi_hist2d).T
dist_hist2d = np.array(dist_hist2d).T
maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])
fig.add_trace(go.Heatmap(
x=np.arange(0, 765, 45),
y=np.arange(db_lim[0], db_lim[1], -1),
z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],
zmin=0, zmax=maxz), row=1, col=1)
fig.add_trace(go.Heatmap(
x=np.arange(0, 765, 45),
y=np.arange(dist_lim[0], dist_lim[1], .1),
z=dist_hist2d[int(dist_lim[0]/.1):int(dist_lim[1]/.1), :],
zmin=0, zmax=maxz), row=2, col=1)
fig.add_trace(go.Scatter(x=np.arange(0, 765, 45), y=np.array([1]*16), mode='lines', line=ref_line), row=2, col=1)
fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna'%(antenna),
'xanchor': 'center', 'yanchor': 'top', 'y': .95, 'x': .5})
fig.update_xaxes(title='Angle (°)', row=2, col=1)
fig.update_layout(showlegend=False)
fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)
fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)
fig.write_image(os.path.join(output_directory, 'orientation_exp1_%s.png'%(antenna)))
# Plot pitch data
for antenna in antennas:
fig = make_subplots(rows=2, cols=1,
subplot_titles=['Initiator RSSI vs. pitch',
'Calculated distance vs. pitch'],
shared_xaxes=True)
rssi_hist2d = []
dist_hist2d = []
experiment = 'orientation_exp2'
dist_lim = [100, 0]
db_lim = [-100, 0]
for filename in ri_filenames:
data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))
Dist = np.around(data['distance'], 1)
for rssi in data['i_rssi']:
if rssi-5 < db_lim[1]:
db_lim[1] = rssi-5
if rssi+5 > db_lim[0]:
db_lim[0] = rssi+5
for dist in Dist:
if dist-.5 < dist_lim[0]:
dist_lim[0] = dist-.5
if dist+.5 > dist_lim[1]:
dist_lim[1] = dist+.5
dist_lim[0] = np.max([0, dist_lim[0]])
column = np.zeros(200)
hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T
for row in hist:
row_idx = -int(row[0])
column[row_idx] = row[1]/len(data['i_rssi'])
rssi_hist2d.append(column)
column = np.zeros(100)
hist = np.array(np.unique(Dist, return_counts=True)).T
for row in hist:
row_idx = int(np.around(row[0]/.1))
column[row_idx] = row[1]/len(Dist)
dist_hist2d.append(column)
rssi_hist2d = np.array(rssi_hist2d).T
dist_hist2d = np.array(dist_hist2d).T
maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])
fig.add_trace(go.Heatmap(
x=np.arange(0, 765, 45),
y=np.arange(db_lim[0], db_lim[1], -1),
z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],
zmin=0, zmax=maxz), row=1, col=1)
fig.add_trace(go.Heatmap(
x=np.arange(0, 765, 45),
y=np.arange(dist_lim[0], dist_lim[1], .1),
z=dist_hist2d[int(dist_lim[0]/.1):int(dist_lim[1]/.1), :],
zmin=0, zmax=maxz), row=2, col=1)
fig.add_trace(go.Scatter(x=np.arange(0, 765, 45), y=np.array([1]*16), mode='lines', line=ref_line), row=2, col=1)
fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna'%(antenna),
'xanchor': 'center', 'yanchor': 'top', 'y': .95, 'x': .5})
fig.update_xaxes(title='Angle (°)', row=2, col=1)
fig.update_layout(showlegend=False)
fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)
fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)
fig.write_image(os.path.join(output_directory, 'orientation_exp2_%s.png'%(antenna)))
# Plot roll data
for antenna in antennas:
fig = make_subplots(rows=2, cols=1,
subplot_titles=['Initiator RSSI vs. roll',
'Calculated distance vs. roll'],
shared_xaxes=True)
rssi_hist2d = []
dist_hist2d = []
experiment = 'orientation_exp3'
dist_lim = [100, 0]
db_lim = [-100, 0]
for filename in ri_filenames:
data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))
Dist = np.around(data['distance'], 1)
for rssi in data['i_rssi']:
if rssi-5 < db_lim[1]:
db_lim[1] = rssi-5
if rssi+5 > db_lim[0]:
db_lim[0] = rssi+5
for dist in Dist:
if dist-.5 < dist_lim[0]:
dist_lim[0] = dist-.5
if dist+.5 > dist_lim[1]:
dist_lim[1] = dist+.5
dist_lim[0] = np.max([0, dist_lim[0]])
column = np.zeros(200)
hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T
for row in hist:
row_idx = -int(row[0])
column[row_idx] = row[1]/len(data['i_rssi'])
rssi_hist2d.append(column)
column = np.zeros(100)
hist = np.array(np.unique(Dist, return_counts=True)).T
for row in hist:
row_idx = int(np.around(row[0]/.1))
column[row_idx] = row[1]/len(Dist)
dist_hist2d.append(column)
rssi_hist2d = np.array(rssi_hist2d).T
dist_hist2d = np.array(dist_hist2d).T
maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])
fig.add_trace(go.Heatmap(
x=np.arange(0, 765, 45),
y=np.arange(db_lim[0], db_lim[1], -1),
z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],
zmin=0, zmax=maxz), row=1, col=1)
fig.add_trace(go.Heatmap(
x=np.arange(0, 765, 45),
y=np.arange(dist_lim[0], dist_lim[1], .1),
z=dist_hist2d[int(dist_lim[0]/.1):int(dist_lim[1]/.1), :],
zmin=0, zmax=maxz), row=2, col=1)
fig.add_trace(go.Scatter(x=np.arange(0, 765, 45), y=np.array([1]*16), mode='lines', line=ref_line), row=2, col=1)
fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna'%(antenna),
'xanchor': 'center', 'yanchor': 'top', 'y': .95, 'x': .5})
fig.update_xaxes(title='Angle (°)', row=2, col=1)
fig.update_layout(showlegend=False)
fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)
fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)
fig.write_image(os.path.join(output_directory, 'orientation_exp3_%s.png'%(antenna)))
# Plot position data
for antenna in antennas:
fig = make_subplots(rows=2, cols=1,
subplot_titles=['Initiator RSSI vs. position',
'Calculated distance vs. position'],
shared_xaxes=True)
rssi_hist2d = []
dist_hist2d = []
experiment = 'orientation_exp4'
dist_lim = [100, 0]
db_lim = [-100, 0]
for filename in angle_filenames:
data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))
Dist = np.around(data['distance'], 1)
for rssi in data['i_rssi']:
if rssi-5 < db_lim[1]:
db_lim[1] = rssi-5
if rssi+5 > db_lim[0]:
db_lim[0] = rssi+5
for dist in Dist:
if dist-.5 < dist_lim[0]:
dist_lim[0] = dist-.5
if dist+.5 > dist_lim[1]:
dist_lim[1] = dist+.5
dist_lim[0] = np.max([0, dist_lim[0]])
column = np.zeros(200)
hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T
for row in hist:
row_idx = -int(row[0])
column[row_idx] = row[1]/len(data['i_rssi'])
rssi_hist2d.append(column)
column = np.zeros(100)
hist = np.array(np.unique(Dist, return_counts=True)).T
for row in hist:
row_idx = int(np.around(row[0]/.1))
column[row_idx] = row[1]/len(Dist)
dist_hist2d.append(column)
rssi_hist2d = np.array(rssi_hist2d).T
dist_hist2d = np.array(dist_hist2d).T
maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])
fig.add_trace(go.Heatmap(
x=np.arange(0, 360, 45),
y=np.arange(db_lim[0], db_lim[1], -1),
z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],
zmin=0, zmax=maxz), row=1, col=1)
fig.add_trace(go.Heatmap(
x=np.arange(0, 360, 45),
y=np.arange(dist_lim[0], dist_lim[1], .1),
z=dist_hist2d[int(dist_lim[0]/.1):int(dist_lim[1]/.1), :],
zmin=0, zmax=maxz), row=2, col=1)
fig.add_trace(go.Scatter(x=np.arange(0, 360, 45), y=np.array([1]*16), mode='lines', line=ref_line), row=2, col=1)
fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna'%(antenna),
'xanchor': 'center', 'yanchor': 'top', 'y': .95, 'x': .5})
fig.update_xaxes(title='Angle (°)', row=2, col=1)
fig.update_layout(showlegend=False)
fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)
fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)
fig.write_image(os.path.join(output_directory, 'orientation_exp4_%s.png'%(antenna)))
# Plot separation data
for antenna in antennas:
fig = make_subplots(rows=2, cols=2,
subplot_titles=['Line of sight', 'Blocked'],
shared_xaxes=True)
rssi_los_hist2d = []
dist_los_hist2d = []
experiment = 'distance_los'
dist_lim = [100, 0]
db_lim = [-100, 0]
for filename in distance_filenames:
data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))
Dist = np.around(data['distance'], 1)
for rssi in data['i_rssi']:
if rssi-5 < db_lim[1]:
db_lim[1] = rssi-5
if rssi+5 > db_lim[0]:
db_lim[0] = rssi+5
for dist in Dist:
if dist-.5 < dist_lim[0]:
dist_lim[0] = dist-.5
if dist+.5 > dist_lim[1]:
dist_lim[1] = dist+.5
dist_lim[0] = np.max([0, dist_lim[0]])
column = np.zeros(200)
hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T
for row in hist:
row_idx = -int(row[0])
column[row_idx] = row[1]/len(data['i_rssi'])
rssi_los_hist2d.append(column)
column = np.zeros(100)
hist = np.array(np.unique(Dist, return_counts=True)).T
for row in hist:
row_idx = int(np.around(row[0]/.1))
column[row_idx] = row[1]/len(Dist)
dist_los_hist2d.append(column)
rssi_los_hist2d = np.array(rssi_los_hist2d).T
dist_los_hist2d = np.array(dist_los_hist2d).T
rssi_blocked_hist2d = []
dist_blocked_hist2d = []
experiment = 'distance_blocked'
for filename in distance_filenames:
data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))
Dist = np.around(data['distance'], 1)
for rssi in data['i_rssi']:
if rssi-5 < db_lim[1]:
db_lim[1] = rssi-5
if rssi+5 > db_lim[0]:
db_lim[0] = rssi+5
for dist in Dist:
if dist-.5 < dist_lim[0]:
dist_lim[0] = dist-.5
if dist+.5 > dist_lim[1]:
dist_lim[1] = dist+.5
dist_lim[0] = np.max([0, dist_lim[0]])
column = np.zeros(200)
hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T
for row in hist:
row_idx = -int(row[0])
column[row_idx] = row[1]/len(data['i_rssi'])
rssi_blocked_hist2d.append(column)
column = np.zeros(1000)
hist = np.array(np.unique(Dist, return_counts=True)).T
for row in hist:
row_idx = int(np.around(row[0]/.1))
column[row_idx] = row[1]/len(Dist)
dist_blocked_hist2d.append(column)
rssi_blocked_hist2d = np.array(rssi_blocked_hist2d).T
dist_blocked_hist2d = np.array(dist_blocked_hist2d).T
maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])
fig.add_trace(go.Heatmap(
x=np.arange(.75, 3.25, .25),
y=np.arange(db_lim[0], db_lim[1], -1),
z=rssi_los_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],
zmin=0, zmax=maxz), row=1, col=1)
fig.add_trace(go.Heatmap(
x=np.arange(.75, 3.25, .25),
y=np.arange(dist_lim[0], dist_lim[1], .1),
z=dist_los_hist2d[int(dist_lim[0]/.1):int(dist_lim[1]/.1), :],
zmin=0, zmax=maxz), row=2, col=1)
fig.add_trace(go.Heatmap(
x=np.arange(.75, 3.25, .25),
y=np.arange(db_lim[0], db_lim[1], -1),
z=rssi_blocked_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],
zmin=0, zmax=maxz), row=1, col=2)
fig.add_trace(go.Heatmap(
x=np.arange(.75, 3.25, .25),
y=np.arange(dist_lim[0], dist_lim[1], .1),
z=dist_blocked_hist2d[int(dist_lim[0]/.1):int(dist_lim[1]/.1), :],
zmin=0, zmax=maxz), row=2, col=2)
fig.add_trace(go.Scatter(x=np.arange(.75, 3.25, .25), y=np.arange(.75, 3.25, .25), mode='lines', line=ref_line), row=2, col=1)
fig.add_trace(go.Scatter(x=np.arange(.75, 3.25, .25), y=np.arange(.75, 3.25, .25), mode='lines', line=ref_line), row=2, col=2)
fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna'%(antenna),
'xanchor': 'center', 'yanchor': 'top', 'y': .95, 'x': .5})
fig.update_xaxes(title='Separation (m)', row=2, col=1)
fig.update_xaxes(title='Separation (m)', row=2, col=2)
fig.update_layout(showlegend=False)
fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)
fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)
fig.write_image(os.path.join(output_directory, 'distance_%s.png'%(antenna)))
|
[
"#%%\n# -*- coding: utf-8 -*-\n\nimport numpy as np\nimport plotly\nfrom plotly.subplots import make_subplots\nimport plotly.graph_objects as go\nimport pandas as pd\nimport os\n\noutput_directory = r'C:/Users/jgamm/Desktop/rssi_measurement/2020-06-10/figures'\nantennas = ['original_whip']\nfolder = r'C:/Users/jgamm/Desktop/rssi_measurement/2020-06-10/data'\nri_filenames = []\nfor i_angle in np.arange(0, 360, 45):\n ri_filenames.append('r%di%d.csv'%(i_angle, i_angle))\n ri_filenames.append('r%di%d.csv'%(i_angle+45, i_angle))\nri_filenames.append('r360i360.csv')\nangle_filenames = ['%d.csv'%(n) for n in np.arange(0, 405, 45)]\ndistance_filenames = ['%1.2f.csv'%(n) for n in np.arange(.75, 3.25, .25)]\n\nref_line = dict(color='white', width=1)\n\n# Plot yaw data\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=1,\n subplot_titles=['Initiator RSSI vs. yaw',\n 'Calculated distance vs. yaw'],\n shared_xaxes=True)\n rssi_hist2d = []\n dist_hist2d = []\n experiment = 'orientation_exp1'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in ri_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi-5 < db_lim[1]:\n db_lim[1] = rssi-5\n if rssi+5 > db_lim[0]:\n db_lim[0] = rssi+5\n for dist in Dist:\n if dist-.5 < dist_lim[0]:\n dist_lim[0] = dist-.5\n if dist+.5 > dist_lim[1]:\n dist_lim[1] = dist+.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1]/len(data['i_rssi'])\n rssi_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0]/.1))\n column[row_idx] = row[1]/len(Dist)\n dist_hist2d.append(column)\n rssi_hist2d = np.array(rssi_hist2d).T\n dist_hist2d = np.array(dist_hist2d).T\n \n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(\n x=np.arange(0, 765, 45),\n y=np.arange(db_lim[0], db_lim[1], -1),\n z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],\n zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(\n x=np.arange(0, 765, 45),\n y=np.arange(dist_lim[0], dist_lim[1], .1),\n z=dist_hist2d[int(dist_lim[0]/.1):int(dist_lim[1]/.1), :],\n zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(0, 765, 45), y=np.array([1]*16), mode='lines', line=ref_line), row=2, col=1)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna'%(antenna),\n 'xanchor': 'center', 'yanchor': 'top', 'y': .95, 'x': .5})\n fig.update_xaxes(title='Angle (°)', row=2, col=1)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, 'orientation_exp1_%s.png'%(antenna)))\n\n# Plot pitch data\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=1,\n subplot_titles=['Initiator RSSI vs. pitch',\n 'Calculated distance vs. pitch'],\n shared_xaxes=True)\n rssi_hist2d = []\n dist_hist2d = []\n experiment = 'orientation_exp2'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in ri_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi-5 < db_lim[1]:\n db_lim[1] = rssi-5\n if rssi+5 > db_lim[0]:\n db_lim[0] = rssi+5\n for dist in Dist:\n if dist-.5 < dist_lim[0]:\n dist_lim[0] = dist-.5\n if dist+.5 > dist_lim[1]:\n dist_lim[1] = dist+.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1]/len(data['i_rssi'])\n rssi_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0]/.1))\n column[row_idx] = row[1]/len(Dist)\n dist_hist2d.append(column)\n rssi_hist2d = np.array(rssi_hist2d).T\n dist_hist2d = np.array(dist_hist2d).T\n \n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(\n x=np.arange(0, 765, 45),\n y=np.arange(db_lim[0], db_lim[1], -1),\n z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],\n zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(\n x=np.arange(0, 765, 45),\n y=np.arange(dist_lim[0], dist_lim[1], .1),\n z=dist_hist2d[int(dist_lim[0]/.1):int(dist_lim[1]/.1), :],\n zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(0, 765, 45), y=np.array([1]*16), mode='lines', line=ref_line), row=2, col=1)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna'%(antenna),\n 'xanchor': 'center', 'yanchor': 'top', 'y': .95, 'x': .5})\n fig.update_xaxes(title='Angle (°)', row=2, col=1)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, 'orientation_exp2_%s.png'%(antenna)))\n\n# Plot roll data\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=1,\n subplot_titles=['Initiator RSSI vs. roll',\n 'Calculated distance vs. roll'],\n shared_xaxes=True)\n rssi_hist2d = []\n dist_hist2d = []\n experiment = 'orientation_exp3'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in ri_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi-5 < db_lim[1]:\n db_lim[1] = rssi-5\n if rssi+5 > db_lim[0]:\n db_lim[0] = rssi+5\n for dist in Dist:\n if dist-.5 < dist_lim[0]:\n dist_lim[0] = dist-.5\n if dist+.5 > dist_lim[1]:\n dist_lim[1] = dist+.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1]/len(data['i_rssi'])\n rssi_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0]/.1))\n column[row_idx] = row[1]/len(Dist)\n dist_hist2d.append(column)\n rssi_hist2d = np.array(rssi_hist2d).T\n dist_hist2d = np.array(dist_hist2d).T\n \n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(\n x=np.arange(0, 765, 45),\n y=np.arange(db_lim[0], db_lim[1], -1),\n z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],\n zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(\n x=np.arange(0, 765, 45),\n y=np.arange(dist_lim[0], dist_lim[1], .1),\n z=dist_hist2d[int(dist_lim[0]/.1):int(dist_lim[1]/.1), :],\n zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(0, 765, 45), y=np.array([1]*16), mode='lines', line=ref_line), row=2, col=1)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna'%(antenna),\n 'xanchor': 'center', 'yanchor': 'top', 'y': .95, 'x': .5})\n fig.update_xaxes(title='Angle (°)', row=2, col=1)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, 'orientation_exp3_%s.png'%(antenna)))\n\n# Plot position data\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=1,\n subplot_titles=['Initiator RSSI vs. position',\n 'Calculated distance vs. position'],\n shared_xaxes=True)\n rssi_hist2d = []\n dist_hist2d = []\n experiment = 'orientation_exp4'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in angle_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi-5 < db_lim[1]:\n db_lim[1] = rssi-5\n if rssi+5 > db_lim[0]:\n db_lim[0] = rssi+5\n for dist in Dist:\n if dist-.5 < dist_lim[0]:\n dist_lim[0] = dist-.5\n if dist+.5 > dist_lim[1]:\n dist_lim[1] = dist+.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1]/len(data['i_rssi'])\n rssi_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0]/.1))\n column[row_idx] = row[1]/len(Dist)\n dist_hist2d.append(column)\n rssi_hist2d = np.array(rssi_hist2d).T\n dist_hist2d = np.array(dist_hist2d).T\n \n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(\n x=np.arange(0, 360, 45),\n y=np.arange(db_lim[0], db_lim[1], -1),\n z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],\n zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(\n x=np.arange(0, 360, 45),\n y=np.arange(dist_lim[0], dist_lim[1], .1),\n z=dist_hist2d[int(dist_lim[0]/.1):int(dist_lim[1]/.1), :],\n zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(0, 360, 45), y=np.array([1]*16), mode='lines', line=ref_line), row=2, col=1)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna'%(antenna),\n 'xanchor': 'center', 'yanchor': 'top', 'y': .95, 'x': .5})\n fig.update_xaxes(title='Angle (°)', row=2, col=1)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, 'orientation_exp4_%s.png'%(antenna)))\n\n# Plot separation data\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=2,\n subplot_titles=['Line of sight', 'Blocked'],\n shared_xaxes=True)\n rssi_los_hist2d = []\n dist_los_hist2d = []\n experiment = 'distance_los'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in distance_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi-5 < db_lim[1]:\n db_lim[1] = rssi-5\n if rssi+5 > db_lim[0]:\n db_lim[0] = rssi+5\n for dist in Dist:\n if dist-.5 < dist_lim[0]:\n dist_lim[0] = dist-.5\n if dist+.5 > dist_lim[1]:\n dist_lim[1] = dist+.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1]/len(data['i_rssi'])\n rssi_los_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0]/.1))\n column[row_idx] = row[1]/len(Dist)\n dist_los_hist2d.append(column)\n rssi_los_hist2d = np.array(rssi_los_hist2d).T\n dist_los_hist2d = np.array(dist_los_hist2d).T\n \n rssi_blocked_hist2d = []\n dist_blocked_hist2d = []\n experiment = 'distance_blocked'\n for filename in distance_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi-5 < db_lim[1]:\n db_lim[1] = rssi-5\n if rssi+5 > db_lim[0]:\n db_lim[0] = rssi+5\n for dist in Dist:\n if dist-.5 < dist_lim[0]:\n dist_lim[0] = dist-.5\n if dist+.5 > dist_lim[1]:\n dist_lim[1] = dist+.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1]/len(data['i_rssi'])\n rssi_blocked_hist2d.append(column)\n column = np.zeros(1000)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0]/.1))\n column[row_idx] = row[1]/len(Dist)\n dist_blocked_hist2d.append(column)\n rssi_blocked_hist2d = np.array(rssi_blocked_hist2d).T\n dist_blocked_hist2d = np.array(dist_blocked_hist2d).T\n \n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(\n x=np.arange(.75, 3.25, .25),\n y=np.arange(db_lim[0], db_lim[1], -1),\n z=rssi_los_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],\n zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(\n x=np.arange(.75, 3.25, .25),\n y=np.arange(dist_lim[0], dist_lim[1], .1),\n z=dist_los_hist2d[int(dist_lim[0]/.1):int(dist_lim[1]/.1), :],\n zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Heatmap(\n x=np.arange(.75, 3.25, .25),\n y=np.arange(db_lim[0], db_lim[1], -1),\n z=rssi_blocked_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],\n zmin=0, zmax=maxz), row=1, col=2)\n fig.add_trace(go.Heatmap(\n x=np.arange(.75, 3.25, .25),\n y=np.arange(dist_lim[0], dist_lim[1], .1),\n z=dist_blocked_hist2d[int(dist_lim[0]/.1):int(dist_lim[1]/.1), :],\n zmin=0, zmax=maxz), row=2, col=2)\n fig.add_trace(go.Scatter(x=np.arange(.75, 3.25, .25), y=np.arange(.75, 3.25, .25), mode='lines', line=ref_line), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(.75, 3.25, .25), y=np.arange(.75, 3.25, .25), mode='lines', line=ref_line), row=2, col=2)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna'%(antenna),\n 'xanchor': 'center', 'yanchor': 'top', 'y': .95, 'x': .5})\n fig.update_xaxes(title='Separation (m)', row=2, col=1)\n fig.update_xaxes(title='Separation (m)', row=2, col=2)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, 'distance_%s.png'%(antenna)))",
"import numpy as np\nimport plotly\nfrom plotly.subplots import make_subplots\nimport plotly.graph_objects as go\nimport pandas as pd\nimport os\noutput_directory = 'C:/Users/jgamm/Desktop/rssi_measurement/2020-06-10/figures'\nantennas = ['original_whip']\nfolder = 'C:/Users/jgamm/Desktop/rssi_measurement/2020-06-10/data'\nri_filenames = []\nfor i_angle in np.arange(0, 360, 45):\n ri_filenames.append('r%di%d.csv' % (i_angle, i_angle))\n ri_filenames.append('r%di%d.csv' % (i_angle + 45, i_angle))\nri_filenames.append('r360i360.csv')\nangle_filenames = [('%d.csv' % n) for n in np.arange(0, 405, 45)]\ndistance_filenames = [('%1.2f.csv' % n) for n in np.arange(0.75, 3.25, 0.25)]\nref_line = dict(color='white', width=1)\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=1, subplot_titles=[\n 'Initiator RSSI vs. yaw', 'Calculated distance vs. yaw'],\n shared_xaxes=True)\n rssi_hist2d = []\n dist_hist2d = []\n experiment = 'orientation_exp1'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in ri_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi - 5 < db_lim[1]:\n db_lim[1] = rssi - 5\n if rssi + 5 > db_lim[0]:\n db_lim[0] = rssi + 5\n for dist in Dist:\n if dist - 0.5 < dist_lim[0]:\n dist_lim[0] = dist - 0.5\n if dist + 0.5 > dist_lim[1]:\n dist_lim[1] = dist + 0.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1] / len(data['i_rssi'])\n rssi_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0] / 0.1))\n column[row_idx] = row[1] / len(Dist)\n dist_hist2d.append(column)\n rssi_hist2d = np.array(rssi_hist2d).T\n dist_hist2d = np.array(dist_hist2d).T\n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(x=np.arange(0, 765, 45), y=np.arange(db_lim[0],\n db_lim[1], -1), z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],\n zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(x=np.arange(0, 765, 45), y=np.arange(dist_lim[\n 0], dist_lim[1], 0.1), z=dist_hist2d[int(dist_lim[0] / 0.1):int(\n dist_lim[1] / 0.1), :], zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(0, 765, 45), y=np.array([1] * 16),\n mode='lines', line=ref_line), row=2, col=1)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna' %\n antenna, 'xanchor': 'center', 'yanchor': 'top', 'y': 0.95, 'x': 0.5})\n fig.update_xaxes(title='Angle (°)', row=2, col=1)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, \n 'orientation_exp1_%s.png' % antenna))\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=1, subplot_titles=[\n 'Initiator RSSI vs. pitch', 'Calculated distance vs. pitch'],\n shared_xaxes=True)\n rssi_hist2d = []\n dist_hist2d = []\n experiment = 'orientation_exp2'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in ri_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi - 5 < db_lim[1]:\n db_lim[1] = rssi - 5\n if rssi + 5 > db_lim[0]:\n db_lim[0] = rssi + 5\n for dist in Dist:\n if dist - 0.5 < dist_lim[0]:\n dist_lim[0] = dist - 0.5\n if dist + 0.5 > dist_lim[1]:\n dist_lim[1] = dist + 0.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1] / len(data['i_rssi'])\n rssi_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0] / 0.1))\n column[row_idx] = row[1] / len(Dist)\n dist_hist2d.append(column)\n rssi_hist2d = np.array(rssi_hist2d).T\n dist_hist2d = np.array(dist_hist2d).T\n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(x=np.arange(0, 765, 45), y=np.arange(db_lim[0],\n db_lim[1], -1), z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],\n zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(x=np.arange(0, 765, 45), y=np.arange(dist_lim[\n 0], dist_lim[1], 0.1), z=dist_hist2d[int(dist_lim[0] / 0.1):int(\n dist_lim[1] / 0.1), :], zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(0, 765, 45), y=np.array([1] * 16),\n mode='lines', line=ref_line), row=2, col=1)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna' %\n antenna, 'xanchor': 'center', 'yanchor': 'top', 'y': 0.95, 'x': 0.5})\n fig.update_xaxes(title='Angle (°)', row=2, col=1)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, \n 'orientation_exp2_%s.png' % antenna))\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=1, subplot_titles=[\n 'Initiator RSSI vs. roll', 'Calculated distance vs. roll'],\n shared_xaxes=True)\n rssi_hist2d = []\n dist_hist2d = []\n experiment = 'orientation_exp3'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in ri_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi - 5 < db_lim[1]:\n db_lim[1] = rssi - 5\n if rssi + 5 > db_lim[0]:\n db_lim[0] = rssi + 5\n for dist in Dist:\n if dist - 0.5 < dist_lim[0]:\n dist_lim[0] = dist - 0.5\n if dist + 0.5 > dist_lim[1]:\n dist_lim[1] = dist + 0.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1] / len(data['i_rssi'])\n rssi_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0] / 0.1))\n column[row_idx] = row[1] / len(Dist)\n dist_hist2d.append(column)\n rssi_hist2d = np.array(rssi_hist2d).T\n dist_hist2d = np.array(dist_hist2d).T\n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(x=np.arange(0, 765, 45), y=np.arange(db_lim[0],\n db_lim[1], -1), z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],\n zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(x=np.arange(0, 765, 45), y=np.arange(dist_lim[\n 0], dist_lim[1], 0.1), z=dist_hist2d[int(dist_lim[0] / 0.1):int(\n dist_lim[1] / 0.1), :], zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(0, 765, 45), y=np.array([1] * 16),\n mode='lines', line=ref_line), row=2, col=1)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna' %\n antenna, 'xanchor': 'center', 'yanchor': 'top', 'y': 0.95, 'x': 0.5})\n fig.update_xaxes(title='Angle (°)', row=2, col=1)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, \n 'orientation_exp3_%s.png' % antenna))\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=1, subplot_titles=[\n 'Initiator RSSI vs. position', 'Calculated distance vs. position'],\n shared_xaxes=True)\n rssi_hist2d = []\n dist_hist2d = []\n experiment = 'orientation_exp4'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in angle_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi - 5 < db_lim[1]:\n db_lim[1] = rssi - 5\n if rssi + 5 > db_lim[0]:\n db_lim[0] = rssi + 5\n for dist in Dist:\n if dist - 0.5 < dist_lim[0]:\n dist_lim[0] = dist - 0.5\n if dist + 0.5 > dist_lim[1]:\n dist_lim[1] = dist + 0.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1] / len(data['i_rssi'])\n rssi_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0] / 0.1))\n column[row_idx] = row[1] / len(Dist)\n dist_hist2d.append(column)\n rssi_hist2d = np.array(rssi_hist2d).T\n dist_hist2d = np.array(dist_hist2d).T\n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(x=np.arange(0, 360, 45), y=np.arange(db_lim[0],\n db_lim[1], -1), z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],\n zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(x=np.arange(0, 360, 45), y=np.arange(dist_lim[\n 0], dist_lim[1], 0.1), z=dist_hist2d[int(dist_lim[0] / 0.1):int(\n dist_lim[1] / 0.1), :], zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(0, 360, 45), y=np.array([1] * 16),\n mode='lines', line=ref_line), row=2, col=1)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna' %\n antenna, 'xanchor': 'center', 'yanchor': 'top', 'y': 0.95, 'x': 0.5})\n fig.update_xaxes(title='Angle (°)', row=2, col=1)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, \n 'orientation_exp4_%s.png' % antenna))\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=2, subplot_titles=['Line of sight',\n 'Blocked'], shared_xaxes=True)\n rssi_los_hist2d = []\n dist_los_hist2d = []\n experiment = 'distance_los'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in distance_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi - 5 < db_lim[1]:\n db_lim[1] = rssi - 5\n if rssi + 5 > db_lim[0]:\n db_lim[0] = rssi + 5\n for dist in Dist:\n if dist - 0.5 < dist_lim[0]:\n dist_lim[0] = dist - 0.5\n if dist + 0.5 > dist_lim[1]:\n dist_lim[1] = dist + 0.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1] / len(data['i_rssi'])\n rssi_los_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0] / 0.1))\n column[row_idx] = row[1] / len(Dist)\n dist_los_hist2d.append(column)\n rssi_los_hist2d = np.array(rssi_los_hist2d).T\n dist_los_hist2d = np.array(dist_los_hist2d).T\n rssi_blocked_hist2d = []\n dist_blocked_hist2d = []\n experiment = 'distance_blocked'\n for filename in distance_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi - 5 < db_lim[1]:\n db_lim[1] = rssi - 5\n if rssi + 5 > db_lim[0]:\n db_lim[0] = rssi + 5\n for dist in Dist:\n if dist - 0.5 < dist_lim[0]:\n dist_lim[0] = dist - 0.5\n if dist + 0.5 > dist_lim[1]:\n dist_lim[1] = dist + 0.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1] / len(data['i_rssi'])\n rssi_blocked_hist2d.append(column)\n column = np.zeros(1000)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0] / 0.1))\n column[row_idx] = row[1] / len(Dist)\n dist_blocked_hist2d.append(column)\n rssi_blocked_hist2d = np.array(rssi_blocked_hist2d).T\n dist_blocked_hist2d = np.array(dist_blocked_hist2d).T\n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(x=np.arange(0.75, 3.25, 0.25), y=np.arange(\n db_lim[0], db_lim[1], -1), z=rssi_los_hist2d[int(-db_lim[0]):int(-\n db_lim[1]), :], zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(x=np.arange(0.75, 3.25, 0.25), y=np.arange(\n dist_lim[0], dist_lim[1], 0.1), z=dist_los_hist2d[int(dist_lim[0] /\n 0.1):int(dist_lim[1] / 0.1), :], zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Heatmap(x=np.arange(0.75, 3.25, 0.25), y=np.arange(\n db_lim[0], db_lim[1], -1), z=rssi_blocked_hist2d[int(-db_lim[0]):\n int(-db_lim[1]), :], zmin=0, zmax=maxz), row=1, col=2)\n fig.add_trace(go.Heatmap(x=np.arange(0.75, 3.25, 0.25), y=np.arange(\n dist_lim[0], dist_lim[1], 0.1), z=dist_blocked_hist2d[int(dist_lim[\n 0] / 0.1):int(dist_lim[1] / 0.1), :], zmin=0, zmax=maxz), row=2, col=2)\n fig.add_trace(go.Scatter(x=np.arange(0.75, 3.25, 0.25), y=np.arange(\n 0.75, 3.25, 0.25), mode='lines', line=ref_line), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(0.75, 3.25, 0.25), y=np.arange(\n 0.75, 3.25, 0.25), mode='lines', line=ref_line), row=2, col=2)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna' %\n antenna, 'xanchor': 'center', 'yanchor': 'top', 'y': 0.95, 'x': 0.5})\n fig.update_xaxes(title='Separation (m)', row=2, col=1)\n fig.update_xaxes(title='Separation (m)', row=2, col=2)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, 'distance_%s.png' % antenna)\n )\n",
"<import token>\noutput_directory = 'C:/Users/jgamm/Desktop/rssi_measurement/2020-06-10/figures'\nantennas = ['original_whip']\nfolder = 'C:/Users/jgamm/Desktop/rssi_measurement/2020-06-10/data'\nri_filenames = []\nfor i_angle in np.arange(0, 360, 45):\n ri_filenames.append('r%di%d.csv' % (i_angle, i_angle))\n ri_filenames.append('r%di%d.csv' % (i_angle + 45, i_angle))\nri_filenames.append('r360i360.csv')\nangle_filenames = [('%d.csv' % n) for n in np.arange(0, 405, 45)]\ndistance_filenames = [('%1.2f.csv' % n) for n in np.arange(0.75, 3.25, 0.25)]\nref_line = dict(color='white', width=1)\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=1, subplot_titles=[\n 'Initiator RSSI vs. yaw', 'Calculated distance vs. yaw'],\n shared_xaxes=True)\n rssi_hist2d = []\n dist_hist2d = []\n experiment = 'orientation_exp1'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in ri_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi - 5 < db_lim[1]:\n db_lim[1] = rssi - 5\n if rssi + 5 > db_lim[0]:\n db_lim[0] = rssi + 5\n for dist in Dist:\n if dist - 0.5 < dist_lim[0]:\n dist_lim[0] = dist - 0.5\n if dist + 0.5 > dist_lim[1]:\n dist_lim[1] = dist + 0.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1] / len(data['i_rssi'])\n rssi_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0] / 0.1))\n column[row_idx] = row[1] / len(Dist)\n dist_hist2d.append(column)\n rssi_hist2d = np.array(rssi_hist2d).T\n dist_hist2d = np.array(dist_hist2d).T\n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(x=np.arange(0, 765, 45), y=np.arange(db_lim[0],\n db_lim[1], -1), z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],\n zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(x=np.arange(0, 765, 45), y=np.arange(dist_lim[\n 0], dist_lim[1], 0.1), z=dist_hist2d[int(dist_lim[0] / 0.1):int(\n dist_lim[1] / 0.1), :], zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(0, 765, 45), y=np.array([1] * 16),\n mode='lines', line=ref_line), row=2, col=1)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna' %\n antenna, 'xanchor': 'center', 'yanchor': 'top', 'y': 0.95, 'x': 0.5})\n fig.update_xaxes(title='Angle (°)', row=2, col=1)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, \n 'orientation_exp1_%s.png' % antenna))\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=1, subplot_titles=[\n 'Initiator RSSI vs. pitch', 'Calculated distance vs. pitch'],\n shared_xaxes=True)\n rssi_hist2d = []\n dist_hist2d = []\n experiment = 'orientation_exp2'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in ri_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi - 5 < db_lim[1]:\n db_lim[1] = rssi - 5\n if rssi + 5 > db_lim[0]:\n db_lim[0] = rssi + 5\n for dist in Dist:\n if dist - 0.5 < dist_lim[0]:\n dist_lim[0] = dist - 0.5\n if dist + 0.5 > dist_lim[1]:\n dist_lim[1] = dist + 0.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1] / len(data['i_rssi'])\n rssi_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0] / 0.1))\n column[row_idx] = row[1] / len(Dist)\n dist_hist2d.append(column)\n rssi_hist2d = np.array(rssi_hist2d).T\n dist_hist2d = np.array(dist_hist2d).T\n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(x=np.arange(0, 765, 45), y=np.arange(db_lim[0],\n db_lim[1], -1), z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],\n zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(x=np.arange(0, 765, 45), y=np.arange(dist_lim[\n 0], dist_lim[1], 0.1), z=dist_hist2d[int(dist_lim[0] / 0.1):int(\n dist_lim[1] / 0.1), :], zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(0, 765, 45), y=np.array([1] * 16),\n mode='lines', line=ref_line), row=2, col=1)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna' %\n antenna, 'xanchor': 'center', 'yanchor': 'top', 'y': 0.95, 'x': 0.5})\n fig.update_xaxes(title='Angle (°)', row=2, col=1)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, \n 'orientation_exp2_%s.png' % antenna))\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=1, subplot_titles=[\n 'Initiator RSSI vs. roll', 'Calculated distance vs. roll'],\n shared_xaxes=True)\n rssi_hist2d = []\n dist_hist2d = []\n experiment = 'orientation_exp3'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in ri_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi - 5 < db_lim[1]:\n db_lim[1] = rssi - 5\n if rssi + 5 > db_lim[0]:\n db_lim[0] = rssi + 5\n for dist in Dist:\n if dist - 0.5 < dist_lim[0]:\n dist_lim[0] = dist - 0.5\n if dist + 0.5 > dist_lim[1]:\n dist_lim[1] = dist + 0.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1] / len(data['i_rssi'])\n rssi_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0] / 0.1))\n column[row_idx] = row[1] / len(Dist)\n dist_hist2d.append(column)\n rssi_hist2d = np.array(rssi_hist2d).T\n dist_hist2d = np.array(dist_hist2d).T\n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(x=np.arange(0, 765, 45), y=np.arange(db_lim[0],\n db_lim[1], -1), z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],\n zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(x=np.arange(0, 765, 45), y=np.arange(dist_lim[\n 0], dist_lim[1], 0.1), z=dist_hist2d[int(dist_lim[0] / 0.1):int(\n dist_lim[1] / 0.1), :], zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(0, 765, 45), y=np.array([1] * 16),\n mode='lines', line=ref_line), row=2, col=1)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna' %\n antenna, 'xanchor': 'center', 'yanchor': 'top', 'y': 0.95, 'x': 0.5})\n fig.update_xaxes(title='Angle (°)', row=2, col=1)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, \n 'orientation_exp3_%s.png' % antenna))\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=1, subplot_titles=[\n 'Initiator RSSI vs. position', 'Calculated distance vs. position'],\n shared_xaxes=True)\n rssi_hist2d = []\n dist_hist2d = []\n experiment = 'orientation_exp4'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in angle_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi - 5 < db_lim[1]:\n db_lim[1] = rssi - 5\n if rssi + 5 > db_lim[0]:\n db_lim[0] = rssi + 5\n for dist in Dist:\n if dist - 0.5 < dist_lim[0]:\n dist_lim[0] = dist - 0.5\n if dist + 0.5 > dist_lim[1]:\n dist_lim[1] = dist + 0.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1] / len(data['i_rssi'])\n rssi_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0] / 0.1))\n column[row_idx] = row[1] / len(Dist)\n dist_hist2d.append(column)\n rssi_hist2d = np.array(rssi_hist2d).T\n dist_hist2d = np.array(dist_hist2d).T\n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(x=np.arange(0, 360, 45), y=np.arange(db_lim[0],\n db_lim[1], -1), z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],\n zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(x=np.arange(0, 360, 45), y=np.arange(dist_lim[\n 0], dist_lim[1], 0.1), z=dist_hist2d[int(dist_lim[0] / 0.1):int(\n dist_lim[1] / 0.1), :], zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(0, 360, 45), y=np.array([1] * 16),\n mode='lines', line=ref_line), row=2, col=1)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna' %\n antenna, 'xanchor': 'center', 'yanchor': 'top', 'y': 0.95, 'x': 0.5})\n fig.update_xaxes(title='Angle (°)', row=2, col=1)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, \n 'orientation_exp4_%s.png' % antenna))\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=2, subplot_titles=['Line of sight',\n 'Blocked'], shared_xaxes=True)\n rssi_los_hist2d = []\n dist_los_hist2d = []\n experiment = 'distance_los'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in distance_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi - 5 < db_lim[1]:\n db_lim[1] = rssi - 5\n if rssi + 5 > db_lim[0]:\n db_lim[0] = rssi + 5\n for dist in Dist:\n if dist - 0.5 < dist_lim[0]:\n dist_lim[0] = dist - 0.5\n if dist + 0.5 > dist_lim[1]:\n dist_lim[1] = dist + 0.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1] / len(data['i_rssi'])\n rssi_los_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0] / 0.1))\n column[row_idx] = row[1] / len(Dist)\n dist_los_hist2d.append(column)\n rssi_los_hist2d = np.array(rssi_los_hist2d).T\n dist_los_hist2d = np.array(dist_los_hist2d).T\n rssi_blocked_hist2d = []\n dist_blocked_hist2d = []\n experiment = 'distance_blocked'\n for filename in distance_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi - 5 < db_lim[1]:\n db_lim[1] = rssi - 5\n if rssi + 5 > db_lim[0]:\n db_lim[0] = rssi + 5\n for dist in Dist:\n if dist - 0.5 < dist_lim[0]:\n dist_lim[0] = dist - 0.5\n if dist + 0.5 > dist_lim[1]:\n dist_lim[1] = dist + 0.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1] / len(data['i_rssi'])\n rssi_blocked_hist2d.append(column)\n column = np.zeros(1000)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0] / 0.1))\n column[row_idx] = row[1] / len(Dist)\n dist_blocked_hist2d.append(column)\n rssi_blocked_hist2d = np.array(rssi_blocked_hist2d).T\n dist_blocked_hist2d = np.array(dist_blocked_hist2d).T\n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(x=np.arange(0.75, 3.25, 0.25), y=np.arange(\n db_lim[0], db_lim[1], -1), z=rssi_los_hist2d[int(-db_lim[0]):int(-\n db_lim[1]), :], zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(x=np.arange(0.75, 3.25, 0.25), y=np.arange(\n dist_lim[0], dist_lim[1], 0.1), z=dist_los_hist2d[int(dist_lim[0] /\n 0.1):int(dist_lim[1] / 0.1), :], zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Heatmap(x=np.arange(0.75, 3.25, 0.25), y=np.arange(\n db_lim[0], db_lim[1], -1), z=rssi_blocked_hist2d[int(-db_lim[0]):\n int(-db_lim[1]), :], zmin=0, zmax=maxz), row=1, col=2)\n fig.add_trace(go.Heatmap(x=np.arange(0.75, 3.25, 0.25), y=np.arange(\n dist_lim[0], dist_lim[1], 0.1), z=dist_blocked_hist2d[int(dist_lim[\n 0] / 0.1):int(dist_lim[1] / 0.1), :], zmin=0, zmax=maxz), row=2, col=2)\n fig.add_trace(go.Scatter(x=np.arange(0.75, 3.25, 0.25), y=np.arange(\n 0.75, 3.25, 0.25), mode='lines', line=ref_line), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(0.75, 3.25, 0.25), y=np.arange(\n 0.75, 3.25, 0.25), mode='lines', line=ref_line), row=2, col=2)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna' %\n antenna, 'xanchor': 'center', 'yanchor': 'top', 'y': 0.95, 'x': 0.5})\n fig.update_xaxes(title='Separation (m)', row=2, col=1)\n fig.update_xaxes(title='Separation (m)', row=2, col=2)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, 'distance_%s.png' % antenna)\n )\n",
"<import token>\n<assignment token>\nfor i_angle in np.arange(0, 360, 45):\n ri_filenames.append('r%di%d.csv' % (i_angle, i_angle))\n ri_filenames.append('r%di%d.csv' % (i_angle + 45, i_angle))\nri_filenames.append('r360i360.csv')\n<assignment token>\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=1, subplot_titles=[\n 'Initiator RSSI vs. yaw', 'Calculated distance vs. yaw'],\n shared_xaxes=True)\n rssi_hist2d = []\n dist_hist2d = []\n experiment = 'orientation_exp1'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in ri_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi - 5 < db_lim[1]:\n db_lim[1] = rssi - 5\n if rssi + 5 > db_lim[0]:\n db_lim[0] = rssi + 5\n for dist in Dist:\n if dist - 0.5 < dist_lim[0]:\n dist_lim[0] = dist - 0.5\n if dist + 0.5 > dist_lim[1]:\n dist_lim[1] = dist + 0.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1] / len(data['i_rssi'])\n rssi_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0] / 0.1))\n column[row_idx] = row[1] / len(Dist)\n dist_hist2d.append(column)\n rssi_hist2d = np.array(rssi_hist2d).T\n dist_hist2d = np.array(dist_hist2d).T\n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(x=np.arange(0, 765, 45), y=np.arange(db_lim[0],\n db_lim[1], -1), z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],\n zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(x=np.arange(0, 765, 45), y=np.arange(dist_lim[\n 0], dist_lim[1], 0.1), z=dist_hist2d[int(dist_lim[0] / 0.1):int(\n dist_lim[1] / 0.1), :], zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(0, 765, 45), y=np.array([1] * 16),\n mode='lines', line=ref_line), row=2, col=1)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna' %\n antenna, 'xanchor': 'center', 'yanchor': 'top', 'y': 0.95, 'x': 0.5})\n fig.update_xaxes(title='Angle (°)', row=2, col=1)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, \n 'orientation_exp1_%s.png' % antenna))\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=1, subplot_titles=[\n 'Initiator RSSI vs. pitch', 'Calculated distance vs. pitch'],\n shared_xaxes=True)\n rssi_hist2d = []\n dist_hist2d = []\n experiment = 'orientation_exp2'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in ri_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi - 5 < db_lim[1]:\n db_lim[1] = rssi - 5\n if rssi + 5 > db_lim[0]:\n db_lim[0] = rssi + 5\n for dist in Dist:\n if dist - 0.5 < dist_lim[0]:\n dist_lim[0] = dist - 0.5\n if dist + 0.5 > dist_lim[1]:\n dist_lim[1] = dist + 0.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1] / len(data['i_rssi'])\n rssi_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0] / 0.1))\n column[row_idx] = row[1] / len(Dist)\n dist_hist2d.append(column)\n rssi_hist2d = np.array(rssi_hist2d).T\n dist_hist2d = np.array(dist_hist2d).T\n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(x=np.arange(0, 765, 45), y=np.arange(db_lim[0],\n db_lim[1], -1), z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],\n zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(x=np.arange(0, 765, 45), y=np.arange(dist_lim[\n 0], dist_lim[1], 0.1), z=dist_hist2d[int(dist_lim[0] / 0.1):int(\n dist_lim[1] / 0.1), :], zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(0, 765, 45), y=np.array([1] * 16),\n mode='lines', line=ref_line), row=2, col=1)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna' %\n antenna, 'xanchor': 'center', 'yanchor': 'top', 'y': 0.95, 'x': 0.5})\n fig.update_xaxes(title='Angle (°)', row=2, col=1)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, \n 'orientation_exp2_%s.png' % antenna))\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=1, subplot_titles=[\n 'Initiator RSSI vs. roll', 'Calculated distance vs. roll'],\n shared_xaxes=True)\n rssi_hist2d = []\n dist_hist2d = []\n experiment = 'orientation_exp3'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in ri_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi - 5 < db_lim[1]:\n db_lim[1] = rssi - 5\n if rssi + 5 > db_lim[0]:\n db_lim[0] = rssi + 5\n for dist in Dist:\n if dist - 0.5 < dist_lim[0]:\n dist_lim[0] = dist - 0.5\n if dist + 0.5 > dist_lim[1]:\n dist_lim[1] = dist + 0.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1] / len(data['i_rssi'])\n rssi_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0] / 0.1))\n column[row_idx] = row[1] / len(Dist)\n dist_hist2d.append(column)\n rssi_hist2d = np.array(rssi_hist2d).T\n dist_hist2d = np.array(dist_hist2d).T\n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(x=np.arange(0, 765, 45), y=np.arange(db_lim[0],\n db_lim[1], -1), z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],\n zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(x=np.arange(0, 765, 45), y=np.arange(dist_lim[\n 0], dist_lim[1], 0.1), z=dist_hist2d[int(dist_lim[0] / 0.1):int(\n dist_lim[1] / 0.1), :], zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(0, 765, 45), y=np.array([1] * 16),\n mode='lines', line=ref_line), row=2, col=1)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna' %\n antenna, 'xanchor': 'center', 'yanchor': 'top', 'y': 0.95, 'x': 0.5})\n fig.update_xaxes(title='Angle (°)', row=2, col=1)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, \n 'orientation_exp3_%s.png' % antenna))\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=1, subplot_titles=[\n 'Initiator RSSI vs. position', 'Calculated distance vs. position'],\n shared_xaxes=True)\n rssi_hist2d = []\n dist_hist2d = []\n experiment = 'orientation_exp4'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in angle_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi - 5 < db_lim[1]:\n db_lim[1] = rssi - 5\n if rssi + 5 > db_lim[0]:\n db_lim[0] = rssi + 5\n for dist in Dist:\n if dist - 0.5 < dist_lim[0]:\n dist_lim[0] = dist - 0.5\n if dist + 0.5 > dist_lim[1]:\n dist_lim[1] = dist + 0.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1] / len(data['i_rssi'])\n rssi_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0] / 0.1))\n column[row_idx] = row[1] / len(Dist)\n dist_hist2d.append(column)\n rssi_hist2d = np.array(rssi_hist2d).T\n dist_hist2d = np.array(dist_hist2d).T\n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(x=np.arange(0, 360, 45), y=np.arange(db_lim[0],\n db_lim[1], -1), z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],\n zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(x=np.arange(0, 360, 45), y=np.arange(dist_lim[\n 0], dist_lim[1], 0.1), z=dist_hist2d[int(dist_lim[0] / 0.1):int(\n dist_lim[1] / 0.1), :], zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(0, 360, 45), y=np.array([1] * 16),\n mode='lines', line=ref_line), row=2, col=1)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna' %\n antenna, 'xanchor': 'center', 'yanchor': 'top', 'y': 0.95, 'x': 0.5})\n fig.update_xaxes(title='Angle (°)', row=2, col=1)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, \n 'orientation_exp4_%s.png' % antenna))\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=2, subplot_titles=['Line of sight',\n 'Blocked'], shared_xaxes=True)\n rssi_los_hist2d = []\n dist_los_hist2d = []\n experiment = 'distance_los'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in distance_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi - 5 < db_lim[1]:\n db_lim[1] = rssi - 5\n if rssi + 5 > db_lim[0]:\n db_lim[0] = rssi + 5\n for dist in Dist:\n if dist - 0.5 < dist_lim[0]:\n dist_lim[0] = dist - 0.5\n if dist + 0.5 > dist_lim[1]:\n dist_lim[1] = dist + 0.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1] / len(data['i_rssi'])\n rssi_los_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0] / 0.1))\n column[row_idx] = row[1] / len(Dist)\n dist_los_hist2d.append(column)\n rssi_los_hist2d = np.array(rssi_los_hist2d).T\n dist_los_hist2d = np.array(dist_los_hist2d).T\n rssi_blocked_hist2d = []\n dist_blocked_hist2d = []\n experiment = 'distance_blocked'\n for filename in distance_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi - 5 < db_lim[1]:\n db_lim[1] = rssi - 5\n if rssi + 5 > db_lim[0]:\n db_lim[0] = rssi + 5\n for dist in Dist:\n if dist - 0.5 < dist_lim[0]:\n dist_lim[0] = dist - 0.5\n if dist + 0.5 > dist_lim[1]:\n dist_lim[1] = dist + 0.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1] / len(data['i_rssi'])\n rssi_blocked_hist2d.append(column)\n column = np.zeros(1000)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0] / 0.1))\n column[row_idx] = row[1] / len(Dist)\n dist_blocked_hist2d.append(column)\n rssi_blocked_hist2d = np.array(rssi_blocked_hist2d).T\n dist_blocked_hist2d = np.array(dist_blocked_hist2d).T\n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(x=np.arange(0.75, 3.25, 0.25), y=np.arange(\n db_lim[0], db_lim[1], -1), z=rssi_los_hist2d[int(-db_lim[0]):int(-\n db_lim[1]), :], zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(x=np.arange(0.75, 3.25, 0.25), y=np.arange(\n dist_lim[0], dist_lim[1], 0.1), z=dist_los_hist2d[int(dist_lim[0] /\n 0.1):int(dist_lim[1] / 0.1), :], zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Heatmap(x=np.arange(0.75, 3.25, 0.25), y=np.arange(\n db_lim[0], db_lim[1], -1), z=rssi_blocked_hist2d[int(-db_lim[0]):\n int(-db_lim[1]), :], zmin=0, zmax=maxz), row=1, col=2)\n fig.add_trace(go.Heatmap(x=np.arange(0.75, 3.25, 0.25), y=np.arange(\n dist_lim[0], dist_lim[1], 0.1), z=dist_blocked_hist2d[int(dist_lim[\n 0] / 0.1):int(dist_lim[1] / 0.1), :], zmin=0, zmax=maxz), row=2, col=2)\n fig.add_trace(go.Scatter(x=np.arange(0.75, 3.25, 0.25), y=np.arange(\n 0.75, 3.25, 0.25), mode='lines', line=ref_line), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(0.75, 3.25, 0.25), y=np.arange(\n 0.75, 3.25, 0.25), mode='lines', line=ref_line), row=2, col=2)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna' %\n antenna, 'xanchor': 'center', 'yanchor': 'top', 'y': 0.95, 'x': 0.5})\n fig.update_xaxes(title='Separation (m)', row=2, col=1)\n fig.update_xaxes(title='Separation (m)', row=2, col=2)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, 'distance_%s.png' % antenna)\n )\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n"
] | false |
809 |
4cb601d7fc4023e145c6d510d27507214ddbd2d3
|
from django.shortcuts import render, redirect
from .models import *
from django.contrib.auth import authenticate ,login,logout
from django.contrib.auth.models import User
from datetime import date
# Create your views here.
def home(request):
if request.method=='GET':
daily_users = User.objects.filter(date_joined__contains=date.today()).count()
return render(request,'home/home.html',{'users':daily_users})
def register(request):
if request.method=='GET':
return render(request,'home/home.html')
else:
name = request.POST['name']
username = request.POST['uname']
email = request.POST['email']
password = request.POST['password']
if name and username and email and password:
if not User.objects.filter(username=username).exists():
user = User.objects.create_user(first_name=name,
username=username,
email=email,
password=password)
u = authenticate(username=username, password=password)
if u is not None:
print("authenticated")
login(request, u)
request.session['id'] = user.id
return redirect('user')
else:
redirect('/')
def login_view(request):
if request.method=='GET':
if 'id' in request.session:
return redirect('user')
return render(request,'home/login.html')
else:
username = request.POST['uname']
password = request.POST['password']
user = authenticate(username=username, password=password)
if user is not None:
if user.is_active:
print("user active")
login(request, user)
request.session['id'] = User.objects.filter(username=username).values('id')[0]['id']
return redirect('user')
else:
return render(request, 'home/home.html')
else:
return redirect('/')
def user(request):
if request.method=='GET':
try:
uid = request.GET['id']
except:
uid = request.session['id']
#print(uid)
user = User.objects.get(pk=int(uid))
#print(user.username)
genre = Genres.objects.all()
fbook = UserBook.objects.filter(user=user)
genre_list = []
for i in fbook:
if i.book.genre.id in genre_list:
pass
else:
genre_list.append(i.book.genre.id)
if len(genre_list)!=0:
number = 5//len(genre_list)
isselected = 1
recbook = set()
for i in genre_list:
book = Books.objects.filter(genre=int(i)).order_by('-rating')
while len(recbook)<5:
if len(book)>=number:
for k in range(0,number):
recbook.add(book[k])
else:
for k in range(0,len(book)):
recbook.add(book[k])
break
else:
isselected = 0
recbook =""
return render(request,'home/user.html',{'user':user,'genre':genre,"fbook":fbook,'recbook':recbook,'isset':isselected})
else:
user = User.objects.get(pk=int(request.session['id']))
book = request.POST['book']
userbook = UserBook(
user = user,
book=Books.objects.get(pk=int(book))
)
userbook.save()
return redirect('user')
def genre(request):
if request.method=='GET':
id = request.GET['id']
books = Books.objects.filter(genre=id)
return render(request,'home/genre.html',{'books':books,})
def book(request):
if request.method=='GET':
id = request.GET['id']
book = Books.objects.get(pk=(int(id)))
if UserBook.objects.filter(user=User.objects.get(pk=int(request.session['id'])),book=book).exists():
follow = 1
else:
follow = 0
comment = UserCommentBook.objects.filter(book=book)
return render(request, 'home/book.html',{'book':book,'comment':comment,'follow':follow})
else:
comment = request.POST['comment']
book = request.POST['id']
comment = UserCommentBook(
user = User.objects.get(pk=int(request.session['id'])),
book = Books.objects.get(pk=int(book)),
comment=comment,
)
comment.save()
return redirect('book/?id='+str(book))
def logout_view(request):
logout(request)
return redirect('/')
|
[
"from django.shortcuts import render, redirect\r\nfrom .models import *\r\nfrom django.contrib.auth import authenticate ,login,logout\r\nfrom django.contrib.auth.models import User\r\nfrom datetime import date\r\n# Create your views here.\r\n\r\n\r\ndef home(request):\r\n if request.method=='GET':\r\n daily_users = User.objects.filter(date_joined__contains=date.today()).count()\r\n return render(request,'home/home.html',{'users':daily_users})\r\n\r\n\r\n\r\n\r\n\r\ndef register(request):\r\n if request.method=='GET':\r\n return render(request,'home/home.html')\r\n else:\r\n name = request.POST['name']\r\n username = request.POST['uname']\r\n email = request.POST['email']\r\n password = request.POST['password']\r\n if name and username and email and password:\r\n if not User.objects.filter(username=username).exists():\r\n user = User.objects.create_user(first_name=name,\r\n username=username,\r\n email=email,\r\n password=password)\r\n u = authenticate(username=username, password=password)\r\n if u is not None:\r\n print(\"authenticated\")\r\n login(request, u)\r\n\r\n request.session['id'] = user.id\r\n return redirect('user')\r\n else:\r\n redirect('/')\r\n\r\ndef login_view(request):\r\n if request.method=='GET':\r\n if 'id' in request.session:\r\n return redirect('user')\r\n return render(request,'home/login.html')\r\n\r\n else:\r\n username = request.POST['uname']\r\n password = request.POST['password']\r\n user = authenticate(username=username, password=password)\r\n if user is not None:\r\n if user.is_active:\r\n print(\"user active\")\r\n login(request, user)\r\n request.session['id'] = User.objects.filter(username=username).values('id')[0]['id']\r\n return redirect('user')\r\n else:\r\n return render(request, 'home/home.html')\r\n else:\r\n return redirect('/')\r\n\r\ndef user(request):\r\n if request.method=='GET':\r\n try:\r\n uid = request.GET['id']\r\n except:\r\n uid = request.session['id']\r\n #print(uid)\r\n user = User.objects.get(pk=int(uid))\r\n #print(user.username)\r\n genre = Genres.objects.all()\r\n fbook = UserBook.objects.filter(user=user)\r\n genre_list = []\r\n for i in fbook:\r\n if i.book.genre.id in genre_list:\r\n pass\r\n else:\r\n genre_list.append(i.book.genre.id)\r\n if len(genre_list)!=0:\r\n number = 5//len(genre_list)\r\n isselected = 1\r\n recbook = set()\r\n for i in genre_list:\r\n book = Books.objects.filter(genre=int(i)).order_by('-rating')\r\n while len(recbook)<5:\r\n if len(book)>=number:\r\n for k in range(0,number):\r\n recbook.add(book[k])\r\n else:\r\n for k in range(0,len(book)):\r\n recbook.add(book[k])\r\n break\r\n else:\r\n isselected = 0\r\n recbook =\"\"\r\n return render(request,'home/user.html',{'user':user,'genre':genre,\"fbook\":fbook,'recbook':recbook,'isset':isselected})\r\n else:\r\n user = User.objects.get(pk=int(request.session['id']))\r\n book = request.POST['book']\r\n userbook = UserBook(\r\n user = user,\r\n book=Books.objects.get(pk=int(book))\r\n )\r\n userbook.save()\r\n return redirect('user')\r\n\r\ndef genre(request):\r\n if request.method=='GET':\r\n id = request.GET['id']\r\n books = Books.objects.filter(genre=id)\r\n return render(request,'home/genre.html',{'books':books,})\r\n\r\ndef book(request):\r\n if request.method=='GET':\r\n id = request.GET['id']\r\n book = Books.objects.get(pk=(int(id)))\r\n if UserBook.objects.filter(user=User.objects.get(pk=int(request.session['id'])),book=book).exists():\r\n follow = 1\r\n else:\r\n follow = 0\r\n comment = UserCommentBook.objects.filter(book=book)\r\n return render(request, 'home/book.html',{'book':book,'comment':comment,'follow':follow})\r\n else:\r\n comment = request.POST['comment']\r\n book = request.POST['id']\r\n comment = UserCommentBook(\r\n user = User.objects.get(pk=int(request.session['id'])),\r\n book = Books.objects.get(pk=int(book)),\r\n comment=comment,\r\n )\r\n comment.save()\r\n return redirect('book/?id='+str(book))\r\n\r\ndef logout_view(request):\r\n logout(request)\r\n return redirect('/')",
"from django.shortcuts import render, redirect\nfrom .models import *\nfrom django.contrib.auth import authenticate, login, logout\nfrom django.contrib.auth.models import User\nfrom datetime import date\n\n\ndef home(request):\n if request.method == 'GET':\n daily_users = User.objects.filter(date_joined__contains=date.today()\n ).count()\n return render(request, 'home/home.html', {'users': daily_users})\n\n\ndef register(request):\n if request.method == 'GET':\n return render(request, 'home/home.html')\n else:\n name = request.POST['name']\n username = request.POST['uname']\n email = request.POST['email']\n password = request.POST['password']\n if name and username and email and password:\n if not User.objects.filter(username=username).exists():\n user = User.objects.create_user(first_name=name, username=\n username, email=email, password=password)\n u = authenticate(username=username, password=password)\n if u is not None:\n print('authenticated')\n login(request, u)\n request.session['id'] = user.id\n return redirect('user')\n else:\n redirect('/')\n\n\ndef login_view(request):\n if request.method == 'GET':\n if 'id' in request.session:\n return redirect('user')\n return render(request, 'home/login.html')\n else:\n username = request.POST['uname']\n password = request.POST['password']\n user = authenticate(username=username, password=password)\n if user is not None:\n if user.is_active:\n print('user active')\n login(request, user)\n request.session['id'] = User.objects.filter(username=username\n ).values('id')[0]['id']\n return redirect('user')\n else:\n return render(request, 'home/home.html')\n else:\n return redirect('/')\n\n\ndef user(request):\n if request.method == 'GET':\n try:\n uid = request.GET['id']\n except:\n uid = request.session['id']\n user = User.objects.get(pk=int(uid))\n genre = Genres.objects.all()\n fbook = UserBook.objects.filter(user=user)\n genre_list = []\n for i in fbook:\n if i.book.genre.id in genre_list:\n pass\n else:\n genre_list.append(i.book.genre.id)\n if len(genre_list) != 0:\n number = 5 // len(genre_list)\n isselected = 1\n recbook = set()\n for i in genre_list:\n book = Books.objects.filter(genre=int(i)).order_by('-rating')\n while len(recbook) < 5:\n if len(book) >= number:\n for k in range(0, number):\n recbook.add(book[k])\n else:\n for k in range(0, len(book)):\n recbook.add(book[k])\n break\n else:\n isselected = 0\n recbook = ''\n return render(request, 'home/user.html', {'user': user, 'genre':\n genre, 'fbook': fbook, 'recbook': recbook, 'isset': isselected})\n else:\n user = User.objects.get(pk=int(request.session['id']))\n book = request.POST['book']\n userbook = UserBook(user=user, book=Books.objects.get(pk=int(book)))\n userbook.save()\n return redirect('user')\n\n\ndef genre(request):\n if request.method == 'GET':\n id = request.GET['id']\n books = Books.objects.filter(genre=id)\n return render(request, 'home/genre.html', {'books': books})\n\n\ndef book(request):\n if request.method == 'GET':\n id = request.GET['id']\n book = Books.objects.get(pk=int(id))\n if UserBook.objects.filter(user=User.objects.get(pk=int(request.\n session['id'])), book=book).exists():\n follow = 1\n else:\n follow = 0\n comment = UserCommentBook.objects.filter(book=book)\n return render(request, 'home/book.html', {'book': book, 'comment':\n comment, 'follow': follow})\n else:\n comment = request.POST['comment']\n book = request.POST['id']\n comment = UserCommentBook(user=User.objects.get(pk=int(request.\n session['id'])), book=Books.objects.get(pk=int(book)), comment=\n comment)\n comment.save()\n return redirect('book/?id=' + str(book))\n\n\ndef logout_view(request):\n logout(request)\n return redirect('/')\n",
"<import token>\n\n\ndef home(request):\n if request.method == 'GET':\n daily_users = User.objects.filter(date_joined__contains=date.today()\n ).count()\n return render(request, 'home/home.html', {'users': daily_users})\n\n\ndef register(request):\n if request.method == 'GET':\n return render(request, 'home/home.html')\n else:\n name = request.POST['name']\n username = request.POST['uname']\n email = request.POST['email']\n password = request.POST['password']\n if name and username and email and password:\n if not User.objects.filter(username=username).exists():\n user = User.objects.create_user(first_name=name, username=\n username, email=email, password=password)\n u = authenticate(username=username, password=password)\n if u is not None:\n print('authenticated')\n login(request, u)\n request.session['id'] = user.id\n return redirect('user')\n else:\n redirect('/')\n\n\ndef login_view(request):\n if request.method == 'GET':\n if 'id' in request.session:\n return redirect('user')\n return render(request, 'home/login.html')\n else:\n username = request.POST['uname']\n password = request.POST['password']\n user = authenticate(username=username, password=password)\n if user is not None:\n if user.is_active:\n print('user active')\n login(request, user)\n request.session['id'] = User.objects.filter(username=username\n ).values('id')[0]['id']\n return redirect('user')\n else:\n return render(request, 'home/home.html')\n else:\n return redirect('/')\n\n\ndef user(request):\n if request.method == 'GET':\n try:\n uid = request.GET['id']\n except:\n uid = request.session['id']\n user = User.objects.get(pk=int(uid))\n genre = Genres.objects.all()\n fbook = UserBook.objects.filter(user=user)\n genre_list = []\n for i in fbook:\n if i.book.genre.id in genre_list:\n pass\n else:\n genre_list.append(i.book.genre.id)\n if len(genre_list) != 0:\n number = 5 // len(genre_list)\n isselected = 1\n recbook = set()\n for i in genre_list:\n book = Books.objects.filter(genre=int(i)).order_by('-rating')\n while len(recbook) < 5:\n if len(book) >= number:\n for k in range(0, number):\n recbook.add(book[k])\n else:\n for k in range(0, len(book)):\n recbook.add(book[k])\n break\n else:\n isselected = 0\n recbook = ''\n return render(request, 'home/user.html', {'user': user, 'genre':\n genre, 'fbook': fbook, 'recbook': recbook, 'isset': isselected})\n else:\n user = User.objects.get(pk=int(request.session['id']))\n book = request.POST['book']\n userbook = UserBook(user=user, book=Books.objects.get(pk=int(book)))\n userbook.save()\n return redirect('user')\n\n\ndef genre(request):\n if request.method == 'GET':\n id = request.GET['id']\n books = Books.objects.filter(genre=id)\n return render(request, 'home/genre.html', {'books': books})\n\n\ndef book(request):\n if request.method == 'GET':\n id = request.GET['id']\n book = Books.objects.get(pk=int(id))\n if UserBook.objects.filter(user=User.objects.get(pk=int(request.\n session['id'])), book=book).exists():\n follow = 1\n else:\n follow = 0\n comment = UserCommentBook.objects.filter(book=book)\n return render(request, 'home/book.html', {'book': book, 'comment':\n comment, 'follow': follow})\n else:\n comment = request.POST['comment']\n book = request.POST['id']\n comment = UserCommentBook(user=User.objects.get(pk=int(request.\n session['id'])), book=Books.objects.get(pk=int(book)), comment=\n comment)\n comment.save()\n return redirect('book/?id=' + str(book))\n\n\ndef logout_view(request):\n logout(request)\n return redirect('/')\n",
"<import token>\n\n\ndef home(request):\n if request.method == 'GET':\n daily_users = User.objects.filter(date_joined__contains=date.today()\n ).count()\n return render(request, 'home/home.html', {'users': daily_users})\n\n\ndef register(request):\n if request.method == 'GET':\n return render(request, 'home/home.html')\n else:\n name = request.POST['name']\n username = request.POST['uname']\n email = request.POST['email']\n password = request.POST['password']\n if name and username and email and password:\n if not User.objects.filter(username=username).exists():\n user = User.objects.create_user(first_name=name, username=\n username, email=email, password=password)\n u = authenticate(username=username, password=password)\n if u is not None:\n print('authenticated')\n login(request, u)\n request.session['id'] = user.id\n return redirect('user')\n else:\n redirect('/')\n\n\ndef login_view(request):\n if request.method == 'GET':\n if 'id' in request.session:\n return redirect('user')\n return render(request, 'home/login.html')\n else:\n username = request.POST['uname']\n password = request.POST['password']\n user = authenticate(username=username, password=password)\n if user is not None:\n if user.is_active:\n print('user active')\n login(request, user)\n request.session['id'] = User.objects.filter(username=username\n ).values('id')[0]['id']\n return redirect('user')\n else:\n return render(request, 'home/home.html')\n else:\n return redirect('/')\n\n\ndef user(request):\n if request.method == 'GET':\n try:\n uid = request.GET['id']\n except:\n uid = request.session['id']\n user = User.objects.get(pk=int(uid))\n genre = Genres.objects.all()\n fbook = UserBook.objects.filter(user=user)\n genre_list = []\n for i in fbook:\n if i.book.genre.id in genre_list:\n pass\n else:\n genre_list.append(i.book.genre.id)\n if len(genre_list) != 0:\n number = 5 // len(genre_list)\n isselected = 1\n recbook = set()\n for i in genre_list:\n book = Books.objects.filter(genre=int(i)).order_by('-rating')\n while len(recbook) < 5:\n if len(book) >= number:\n for k in range(0, number):\n recbook.add(book[k])\n else:\n for k in range(0, len(book)):\n recbook.add(book[k])\n break\n else:\n isselected = 0\n recbook = ''\n return render(request, 'home/user.html', {'user': user, 'genre':\n genre, 'fbook': fbook, 'recbook': recbook, 'isset': isselected})\n else:\n user = User.objects.get(pk=int(request.session['id']))\n book = request.POST['book']\n userbook = UserBook(user=user, book=Books.objects.get(pk=int(book)))\n userbook.save()\n return redirect('user')\n\n\ndef genre(request):\n if request.method == 'GET':\n id = request.GET['id']\n books = Books.objects.filter(genre=id)\n return render(request, 'home/genre.html', {'books': books})\n\n\n<function token>\n\n\ndef logout_view(request):\n logout(request)\n return redirect('/')\n",
"<import token>\n\n\ndef home(request):\n if request.method == 'GET':\n daily_users = User.objects.filter(date_joined__contains=date.today()\n ).count()\n return render(request, 'home/home.html', {'users': daily_users})\n\n\n<function token>\n\n\ndef login_view(request):\n if request.method == 'GET':\n if 'id' in request.session:\n return redirect('user')\n return render(request, 'home/login.html')\n else:\n username = request.POST['uname']\n password = request.POST['password']\n user = authenticate(username=username, password=password)\n if user is not None:\n if user.is_active:\n print('user active')\n login(request, user)\n request.session['id'] = User.objects.filter(username=username\n ).values('id')[0]['id']\n return redirect('user')\n else:\n return render(request, 'home/home.html')\n else:\n return redirect('/')\n\n\ndef user(request):\n if request.method == 'GET':\n try:\n uid = request.GET['id']\n except:\n uid = request.session['id']\n user = User.objects.get(pk=int(uid))\n genre = Genres.objects.all()\n fbook = UserBook.objects.filter(user=user)\n genre_list = []\n for i in fbook:\n if i.book.genre.id in genre_list:\n pass\n else:\n genre_list.append(i.book.genre.id)\n if len(genre_list) != 0:\n number = 5 // len(genre_list)\n isselected = 1\n recbook = set()\n for i in genre_list:\n book = Books.objects.filter(genre=int(i)).order_by('-rating')\n while len(recbook) < 5:\n if len(book) >= number:\n for k in range(0, number):\n recbook.add(book[k])\n else:\n for k in range(0, len(book)):\n recbook.add(book[k])\n break\n else:\n isselected = 0\n recbook = ''\n return render(request, 'home/user.html', {'user': user, 'genre':\n genre, 'fbook': fbook, 'recbook': recbook, 'isset': isselected})\n else:\n user = User.objects.get(pk=int(request.session['id']))\n book = request.POST['book']\n userbook = UserBook(user=user, book=Books.objects.get(pk=int(book)))\n userbook.save()\n return redirect('user')\n\n\ndef genre(request):\n if request.method == 'GET':\n id = request.GET['id']\n books = Books.objects.filter(genre=id)\n return render(request, 'home/genre.html', {'books': books})\n\n\n<function token>\n\n\ndef logout_view(request):\n logout(request)\n return redirect('/')\n",
"<import token>\n\n\ndef home(request):\n if request.method == 'GET':\n daily_users = User.objects.filter(date_joined__contains=date.today()\n ).count()\n return render(request, 'home/home.html', {'users': daily_users})\n\n\n<function token>\n\n\ndef login_view(request):\n if request.method == 'GET':\n if 'id' in request.session:\n return redirect('user')\n return render(request, 'home/login.html')\n else:\n username = request.POST['uname']\n password = request.POST['password']\n user = authenticate(username=username, password=password)\n if user is not None:\n if user.is_active:\n print('user active')\n login(request, user)\n request.session['id'] = User.objects.filter(username=username\n ).values('id')[0]['id']\n return redirect('user')\n else:\n return render(request, 'home/home.html')\n else:\n return redirect('/')\n\n\ndef user(request):\n if request.method == 'GET':\n try:\n uid = request.GET['id']\n except:\n uid = request.session['id']\n user = User.objects.get(pk=int(uid))\n genre = Genres.objects.all()\n fbook = UserBook.objects.filter(user=user)\n genre_list = []\n for i in fbook:\n if i.book.genre.id in genre_list:\n pass\n else:\n genre_list.append(i.book.genre.id)\n if len(genre_list) != 0:\n number = 5 // len(genre_list)\n isselected = 1\n recbook = set()\n for i in genre_list:\n book = Books.objects.filter(genre=int(i)).order_by('-rating')\n while len(recbook) < 5:\n if len(book) >= number:\n for k in range(0, number):\n recbook.add(book[k])\n else:\n for k in range(0, len(book)):\n recbook.add(book[k])\n break\n else:\n isselected = 0\n recbook = ''\n return render(request, 'home/user.html', {'user': user, 'genre':\n genre, 'fbook': fbook, 'recbook': recbook, 'isset': isselected})\n else:\n user = User.objects.get(pk=int(request.session['id']))\n book = request.POST['book']\n userbook = UserBook(user=user, book=Books.objects.get(pk=int(book)))\n userbook.save()\n return redirect('user')\n\n\ndef genre(request):\n if request.method == 'GET':\n id = request.GET['id']\n books = Books.objects.filter(genre=id)\n return render(request, 'home/genre.html', {'books': books})\n\n\n<function token>\n<function token>\n",
"<import token>\n\n\ndef home(request):\n if request.method == 'GET':\n daily_users = User.objects.filter(date_joined__contains=date.today()\n ).count()\n return render(request, 'home/home.html', {'users': daily_users})\n\n\n<function token>\n\n\ndef login_view(request):\n if request.method == 'GET':\n if 'id' in request.session:\n return redirect('user')\n return render(request, 'home/login.html')\n else:\n username = request.POST['uname']\n password = request.POST['password']\n user = authenticate(username=username, password=password)\n if user is not None:\n if user.is_active:\n print('user active')\n login(request, user)\n request.session['id'] = User.objects.filter(username=username\n ).values('id')[0]['id']\n return redirect('user')\n else:\n return render(request, 'home/home.html')\n else:\n return redirect('/')\n\n\n<function token>\n\n\ndef genre(request):\n if request.method == 'GET':\n id = request.GET['id']\n books = Books.objects.filter(genre=id)\n return render(request, 'home/genre.html', {'books': books})\n\n\n<function token>\n<function token>\n",
"<import token>\n\n\ndef home(request):\n if request.method == 'GET':\n daily_users = User.objects.filter(date_joined__contains=date.today()\n ).count()\n return render(request, 'home/home.html', {'users': daily_users})\n\n\n<function token>\n\n\ndef login_view(request):\n if request.method == 'GET':\n if 'id' in request.session:\n return redirect('user')\n return render(request, 'home/login.html')\n else:\n username = request.POST['uname']\n password = request.POST['password']\n user = authenticate(username=username, password=password)\n if user is not None:\n if user.is_active:\n print('user active')\n login(request, user)\n request.session['id'] = User.objects.filter(username=username\n ).values('id')[0]['id']\n return redirect('user')\n else:\n return render(request, 'home/home.html')\n else:\n return redirect('/')\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n",
"<import token>\n<function token>\n<function token>\n\n\ndef login_view(request):\n if request.method == 'GET':\n if 'id' in request.session:\n return redirect('user')\n return render(request, 'home/login.html')\n else:\n username = request.POST['uname']\n password = request.POST['password']\n user = authenticate(username=username, password=password)\n if user is not None:\n if user.is_active:\n print('user active')\n login(request, user)\n request.session['id'] = User.objects.filter(username=username\n ).values('id')[0]['id']\n return redirect('user')\n else:\n return render(request, 'home/home.html')\n else:\n return redirect('/')\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n",
"<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n"
] | false |
810 |
0ed99037d7ff708b7931fbc3553b1aeb19a20f53
|
'''
* @file IntQueue.py
* @author (original JAVA) William Fiset, [email protected]
* liujingkun, [email protected]
* (conversion to Python) Armin Zare Zadeh, [email protected]
* @date 23 Jun 2020
* @version 0.1
* @brief This file contains an implementation of an integer only queue.
*
'''
import time
from array import array as arr
from collections import deque
from Queue import Queue
class IntQueue(Queue):
'''
An integer only implementation of a queue
'''
def __init__(self, maxSize):
"""
maxSize is the maximum number of items
that can be in the queue at any given time
"""
self.front = 0
self.end = 0
self.qSize = 0
self.data = arr('i', (0 for i in range(maxSize)))
def isEmpty(self):
"""
Return true/false on whether the queue is empty
"""
return self.qSize == 0
def size(self):
"""
Return the number of elements inside the queue
"""
return self.qSize
def peek(self):
if self.isEmpty():
raise Exception('Queue is empty')
self.front = self.front % len(self.data)
return self.data[self.front]
def isFull(self):
return self.qSize == len(self.data)
def offer(self, value):
"""
Add an element to the queue
"""
if self.isFull():
raise Exception("Queue too small!")
self.data[self.end] = value
self.end += 1
self.qSize += 1
self.end = self.end % len(self.data)
def poll(self):
"""
Make sure you check is the queue is not empty before calling poll!
"""
if self.isEmpty():
raise Exception('Queue is empty')
self.qSize -= 1
self.front = self.front % len(self.data)
d = self.data[self.front]
self.front += 1
return d
def benchMarkTest():
"""
BenchMark IntQueue vs ArrayDeque.
"""
n = 10000000
intQ = IntQueue(n)
# IntQueue times at around 12.109375 seconds
start = time.process_time()
for i in range(0, n):
intQ.offer(i)
for i in range(0, n):
intQ.poll()
end = time.process_time()
print("IntQueue Time: ", (end - start))
# ArrayDeque times at around 1.1875 seconds
arrayDeque = deque()
start = time.process_time()
for i in range(0, n):
arrayDeque.append(i)
for i in range(0, n):
arrayDeque.popleft()
end = time.process_time()
print("ArrayDeque Time: ", (end - start))
if __name__ == '__main__':
"""
Example usage
"""
q = IntQueue(5)
q.offer(1)
q.offer(2)
q.offer(3)
q.offer(4)
q.offer(5)
print(q.poll()) # 1
print(q.poll()) # 2
print(q.poll()) # 3
print(q.poll()) # 4
print(q.isEmpty()) # false
q.offer(1);
q.offer(2);
q.offer(3);
print(q.poll()) # 5
print(q.poll()) # 1
print(q.poll()) # 2
print(q.poll()) # 3
print(q.isEmpty()) # true
benchMarkTest()
|
[
"'''\n * @file IntQueue.py\n * @author (original JAVA) William Fiset, [email protected]\n * liujingkun, [email protected]\n * (conversion to Python) Armin Zare Zadeh, [email protected]\n * @date 23 Jun 2020\n * @version 0.1\n * @brief This file contains an implementation of an integer only queue.\n * \n'''\n\nimport time\nfrom array import array as arr\nfrom collections import deque\nfrom Queue import Queue\n\n\nclass IntQueue(Queue):\n ''' \n An integer only implementation of a queue\n '''\n def __init__(self, maxSize):\n \"\"\"\n maxSize is the maximum number of items\n that can be in the queue at any given time\n \"\"\" \n self.front = 0\n self.end = 0\n self.qSize = 0\n self.data = arr('i', (0 for i in range(maxSize)))\n\n\n def isEmpty(self):\n \"\"\"\n Return true/false on whether the queue is empty\n \"\"\"\n return self.qSize == 0\n\n\n def size(self):\n \"\"\"\n Return the number of elements inside the queue\n \"\"\" \n return self.qSize\n\n\n def peek(self):\n if self.isEmpty():\n raise Exception('Queue is empty')\n \n self.front = self.front % len(self.data)\n return self.data[self.front]\n\n\n def isFull(self):\n return self.qSize == len(self.data)\n\n\n def offer(self, value):\n \"\"\"\n Add an element to the queue\n \"\"\"\n if self.isFull():\n raise Exception(\"Queue too small!\")\n \n self.data[self.end] = value\n self.end += 1\n self.qSize += 1\n self.end = self.end % len(self.data)\n\n\n def poll(self):\n \"\"\"\n Make sure you check is the queue is not empty before calling poll!\n \"\"\"\n if self.isEmpty():\n raise Exception('Queue is empty')\n \n self.qSize -= 1\n self.front = self.front % len(self.data)\n d = self.data[self.front]\n self.front += 1\n return d\n\n\n\ndef benchMarkTest():\n \"\"\"\n BenchMark IntQueue vs ArrayDeque.\n \"\"\" \n\n n = 10000000\n intQ = IntQueue(n)\n\n # IntQueue times at around 12.109375 seconds\n start = time.process_time()\n for i in range(0, n):\n intQ.offer(i)\n for i in range(0, n):\n intQ.poll()\n end = time.process_time()\n print(\"IntQueue Time: \", (end - start))\n\n # ArrayDeque times at around 1.1875 seconds\n arrayDeque = deque()\n start = time.process_time()\n for i in range(0, n):\n arrayDeque.append(i)\n for i in range(0, n):\n arrayDeque.popleft()\n end = time.process_time()\n print(\"ArrayDeque Time: \", (end - start))\n\n\n\nif __name__ == '__main__':\n \"\"\"\n Example usage\n \"\"\"\n\n q = IntQueue(5)\n\n q.offer(1)\n q.offer(2)\n q.offer(3)\n q.offer(4)\n q.offer(5)\n\n print(q.poll()) # 1\n print(q.poll()) # 2\n print(q.poll()) # 3\n print(q.poll()) # 4\n\n print(q.isEmpty()) # false\n\n q.offer(1);\n q.offer(2);\n q.offer(3);\n\n print(q.poll()) # 5\n print(q.poll()) # 1\n print(q.poll()) # 2\n print(q.poll()) # 3\n\n print(q.isEmpty()) # true\n\n benchMarkTest()\n",
"<docstring token>\nimport time\nfrom array import array as arr\nfrom collections import deque\nfrom Queue import Queue\n\n\nclass IntQueue(Queue):\n \"\"\" \n An integer only implementation of a queue\n \"\"\"\n\n def __init__(self, maxSize):\n \"\"\"\n maxSize is the maximum number of items\n that can be in the queue at any given time\n \"\"\"\n self.front = 0\n self.end = 0\n self.qSize = 0\n self.data = arr('i', (0 for i in range(maxSize)))\n\n def isEmpty(self):\n \"\"\"\n Return true/false on whether the queue is empty\n \"\"\"\n return self.qSize == 0\n\n def size(self):\n \"\"\"\n Return the number of elements inside the queue\n \"\"\"\n return self.qSize\n\n def peek(self):\n if self.isEmpty():\n raise Exception('Queue is empty')\n self.front = self.front % len(self.data)\n return self.data[self.front]\n\n def isFull(self):\n return self.qSize == len(self.data)\n\n def offer(self, value):\n \"\"\"\n Add an element to the queue\n \"\"\"\n if self.isFull():\n raise Exception('Queue too small!')\n self.data[self.end] = value\n self.end += 1\n self.qSize += 1\n self.end = self.end % len(self.data)\n\n def poll(self):\n \"\"\"\n Make sure you check is the queue is not empty before calling poll!\n \"\"\"\n if self.isEmpty():\n raise Exception('Queue is empty')\n self.qSize -= 1\n self.front = self.front % len(self.data)\n d = self.data[self.front]\n self.front += 1\n return d\n\n\ndef benchMarkTest():\n \"\"\"\n BenchMark IntQueue vs ArrayDeque.\n \"\"\"\n n = 10000000\n intQ = IntQueue(n)\n start = time.process_time()\n for i in range(0, n):\n intQ.offer(i)\n for i in range(0, n):\n intQ.poll()\n end = time.process_time()\n print('IntQueue Time: ', end - start)\n arrayDeque = deque()\n start = time.process_time()\n for i in range(0, n):\n arrayDeque.append(i)\n for i in range(0, n):\n arrayDeque.popleft()\n end = time.process_time()\n print('ArrayDeque Time: ', end - start)\n\n\nif __name__ == '__main__':\n \"\"\"\n Example usage\n \"\"\"\n q = IntQueue(5)\n q.offer(1)\n q.offer(2)\n q.offer(3)\n q.offer(4)\n q.offer(5)\n print(q.poll())\n print(q.poll())\n print(q.poll())\n print(q.poll())\n print(q.isEmpty())\n q.offer(1)\n q.offer(2)\n q.offer(3)\n print(q.poll())\n print(q.poll())\n print(q.poll())\n print(q.poll())\n print(q.isEmpty())\n benchMarkTest()\n",
"<docstring token>\n<import token>\n\n\nclass IntQueue(Queue):\n \"\"\" \n An integer only implementation of a queue\n \"\"\"\n\n def __init__(self, maxSize):\n \"\"\"\n maxSize is the maximum number of items\n that can be in the queue at any given time\n \"\"\"\n self.front = 0\n self.end = 0\n self.qSize = 0\n self.data = arr('i', (0 for i in range(maxSize)))\n\n def isEmpty(self):\n \"\"\"\n Return true/false on whether the queue is empty\n \"\"\"\n return self.qSize == 0\n\n def size(self):\n \"\"\"\n Return the number of elements inside the queue\n \"\"\"\n return self.qSize\n\n def peek(self):\n if self.isEmpty():\n raise Exception('Queue is empty')\n self.front = self.front % len(self.data)\n return self.data[self.front]\n\n def isFull(self):\n return self.qSize == len(self.data)\n\n def offer(self, value):\n \"\"\"\n Add an element to the queue\n \"\"\"\n if self.isFull():\n raise Exception('Queue too small!')\n self.data[self.end] = value\n self.end += 1\n self.qSize += 1\n self.end = self.end % len(self.data)\n\n def poll(self):\n \"\"\"\n Make sure you check is the queue is not empty before calling poll!\n \"\"\"\n if self.isEmpty():\n raise Exception('Queue is empty')\n self.qSize -= 1\n self.front = self.front % len(self.data)\n d = self.data[self.front]\n self.front += 1\n return d\n\n\ndef benchMarkTest():\n \"\"\"\n BenchMark IntQueue vs ArrayDeque.\n \"\"\"\n n = 10000000\n intQ = IntQueue(n)\n start = time.process_time()\n for i in range(0, n):\n intQ.offer(i)\n for i in range(0, n):\n intQ.poll()\n end = time.process_time()\n print('IntQueue Time: ', end - start)\n arrayDeque = deque()\n start = time.process_time()\n for i in range(0, n):\n arrayDeque.append(i)\n for i in range(0, n):\n arrayDeque.popleft()\n end = time.process_time()\n print('ArrayDeque Time: ', end - start)\n\n\nif __name__ == '__main__':\n \"\"\"\n Example usage\n \"\"\"\n q = IntQueue(5)\n q.offer(1)\n q.offer(2)\n q.offer(3)\n q.offer(4)\n q.offer(5)\n print(q.poll())\n print(q.poll())\n print(q.poll())\n print(q.poll())\n print(q.isEmpty())\n q.offer(1)\n q.offer(2)\n q.offer(3)\n print(q.poll())\n print(q.poll())\n print(q.poll())\n print(q.poll())\n print(q.isEmpty())\n benchMarkTest()\n",
"<docstring token>\n<import token>\n\n\nclass IntQueue(Queue):\n \"\"\" \n An integer only implementation of a queue\n \"\"\"\n\n def __init__(self, maxSize):\n \"\"\"\n maxSize is the maximum number of items\n that can be in the queue at any given time\n \"\"\"\n self.front = 0\n self.end = 0\n self.qSize = 0\n self.data = arr('i', (0 for i in range(maxSize)))\n\n def isEmpty(self):\n \"\"\"\n Return true/false on whether the queue is empty\n \"\"\"\n return self.qSize == 0\n\n def size(self):\n \"\"\"\n Return the number of elements inside the queue\n \"\"\"\n return self.qSize\n\n def peek(self):\n if self.isEmpty():\n raise Exception('Queue is empty')\n self.front = self.front % len(self.data)\n return self.data[self.front]\n\n def isFull(self):\n return self.qSize == len(self.data)\n\n def offer(self, value):\n \"\"\"\n Add an element to the queue\n \"\"\"\n if self.isFull():\n raise Exception('Queue too small!')\n self.data[self.end] = value\n self.end += 1\n self.qSize += 1\n self.end = self.end % len(self.data)\n\n def poll(self):\n \"\"\"\n Make sure you check is the queue is not empty before calling poll!\n \"\"\"\n if self.isEmpty():\n raise Exception('Queue is empty')\n self.qSize -= 1\n self.front = self.front % len(self.data)\n d = self.data[self.front]\n self.front += 1\n return d\n\n\ndef benchMarkTest():\n \"\"\"\n BenchMark IntQueue vs ArrayDeque.\n \"\"\"\n n = 10000000\n intQ = IntQueue(n)\n start = time.process_time()\n for i in range(0, n):\n intQ.offer(i)\n for i in range(0, n):\n intQ.poll()\n end = time.process_time()\n print('IntQueue Time: ', end - start)\n arrayDeque = deque()\n start = time.process_time()\n for i in range(0, n):\n arrayDeque.append(i)\n for i in range(0, n):\n arrayDeque.popleft()\n end = time.process_time()\n print('ArrayDeque Time: ', end - start)\n\n\n<code token>\n",
"<docstring token>\n<import token>\n\n\nclass IntQueue(Queue):\n \"\"\" \n An integer only implementation of a queue\n \"\"\"\n\n def __init__(self, maxSize):\n \"\"\"\n maxSize is the maximum number of items\n that can be in the queue at any given time\n \"\"\"\n self.front = 0\n self.end = 0\n self.qSize = 0\n self.data = arr('i', (0 for i in range(maxSize)))\n\n def isEmpty(self):\n \"\"\"\n Return true/false on whether the queue is empty\n \"\"\"\n return self.qSize == 0\n\n def size(self):\n \"\"\"\n Return the number of elements inside the queue\n \"\"\"\n return self.qSize\n\n def peek(self):\n if self.isEmpty():\n raise Exception('Queue is empty')\n self.front = self.front % len(self.data)\n return self.data[self.front]\n\n def isFull(self):\n return self.qSize == len(self.data)\n\n def offer(self, value):\n \"\"\"\n Add an element to the queue\n \"\"\"\n if self.isFull():\n raise Exception('Queue too small!')\n self.data[self.end] = value\n self.end += 1\n self.qSize += 1\n self.end = self.end % len(self.data)\n\n def poll(self):\n \"\"\"\n Make sure you check is the queue is not empty before calling poll!\n \"\"\"\n if self.isEmpty():\n raise Exception('Queue is empty')\n self.qSize -= 1\n self.front = self.front % len(self.data)\n d = self.data[self.front]\n self.front += 1\n return d\n\n\n<function token>\n<code token>\n",
"<docstring token>\n<import token>\n\n\nclass IntQueue(Queue):\n <docstring token>\n\n def __init__(self, maxSize):\n \"\"\"\n maxSize is the maximum number of items\n that can be in the queue at any given time\n \"\"\"\n self.front = 0\n self.end = 0\n self.qSize = 0\n self.data = arr('i', (0 for i in range(maxSize)))\n\n def isEmpty(self):\n \"\"\"\n Return true/false on whether the queue is empty\n \"\"\"\n return self.qSize == 0\n\n def size(self):\n \"\"\"\n Return the number of elements inside the queue\n \"\"\"\n return self.qSize\n\n def peek(self):\n if self.isEmpty():\n raise Exception('Queue is empty')\n self.front = self.front % len(self.data)\n return self.data[self.front]\n\n def isFull(self):\n return self.qSize == len(self.data)\n\n def offer(self, value):\n \"\"\"\n Add an element to the queue\n \"\"\"\n if self.isFull():\n raise Exception('Queue too small!')\n self.data[self.end] = value\n self.end += 1\n self.qSize += 1\n self.end = self.end % len(self.data)\n\n def poll(self):\n \"\"\"\n Make sure you check is the queue is not empty before calling poll!\n \"\"\"\n if self.isEmpty():\n raise Exception('Queue is empty')\n self.qSize -= 1\n self.front = self.front % len(self.data)\n d = self.data[self.front]\n self.front += 1\n return d\n\n\n<function token>\n<code token>\n",
"<docstring token>\n<import token>\n\n\nclass IntQueue(Queue):\n <docstring token>\n\n def __init__(self, maxSize):\n \"\"\"\n maxSize is the maximum number of items\n that can be in the queue at any given time\n \"\"\"\n self.front = 0\n self.end = 0\n self.qSize = 0\n self.data = arr('i', (0 for i in range(maxSize)))\n\n def isEmpty(self):\n \"\"\"\n Return true/false on whether the queue is empty\n \"\"\"\n return self.qSize == 0\n\n def size(self):\n \"\"\"\n Return the number of elements inside the queue\n \"\"\"\n return self.qSize\n <function token>\n\n def isFull(self):\n return self.qSize == len(self.data)\n\n def offer(self, value):\n \"\"\"\n Add an element to the queue\n \"\"\"\n if self.isFull():\n raise Exception('Queue too small!')\n self.data[self.end] = value\n self.end += 1\n self.qSize += 1\n self.end = self.end % len(self.data)\n\n def poll(self):\n \"\"\"\n Make sure you check is the queue is not empty before calling poll!\n \"\"\"\n if self.isEmpty():\n raise Exception('Queue is empty')\n self.qSize -= 1\n self.front = self.front % len(self.data)\n d = self.data[self.front]\n self.front += 1\n return d\n\n\n<function token>\n<code token>\n",
"<docstring token>\n<import token>\n\n\nclass IntQueue(Queue):\n <docstring token>\n\n def __init__(self, maxSize):\n \"\"\"\n maxSize is the maximum number of items\n that can be in the queue at any given time\n \"\"\"\n self.front = 0\n self.end = 0\n self.qSize = 0\n self.data = arr('i', (0 for i in range(maxSize)))\n\n def isEmpty(self):\n \"\"\"\n Return true/false on whether the queue is empty\n \"\"\"\n return self.qSize == 0\n\n def size(self):\n \"\"\"\n Return the number of elements inside the queue\n \"\"\"\n return self.qSize\n <function token>\n\n def isFull(self):\n return self.qSize == len(self.data)\n\n def offer(self, value):\n \"\"\"\n Add an element to the queue\n \"\"\"\n if self.isFull():\n raise Exception('Queue too small!')\n self.data[self.end] = value\n self.end += 1\n self.qSize += 1\n self.end = self.end % len(self.data)\n <function token>\n\n\n<function token>\n<code token>\n",
"<docstring token>\n<import token>\n\n\nclass IntQueue(Queue):\n <docstring token>\n\n def __init__(self, maxSize):\n \"\"\"\n maxSize is the maximum number of items\n that can be in the queue at any given time\n \"\"\"\n self.front = 0\n self.end = 0\n self.qSize = 0\n self.data = arr('i', (0 for i in range(maxSize)))\n\n def isEmpty(self):\n \"\"\"\n Return true/false on whether the queue is empty\n \"\"\"\n return self.qSize == 0\n\n def size(self):\n \"\"\"\n Return the number of elements inside the queue\n \"\"\"\n return self.qSize\n <function token>\n <function token>\n\n def offer(self, value):\n \"\"\"\n Add an element to the queue\n \"\"\"\n if self.isFull():\n raise Exception('Queue too small!')\n self.data[self.end] = value\n self.end += 1\n self.qSize += 1\n self.end = self.end % len(self.data)\n <function token>\n\n\n<function token>\n<code token>\n",
"<docstring token>\n<import token>\n\n\nclass IntQueue(Queue):\n <docstring token>\n\n def __init__(self, maxSize):\n \"\"\"\n maxSize is the maximum number of items\n that can be in the queue at any given time\n \"\"\"\n self.front = 0\n self.end = 0\n self.qSize = 0\n self.data = arr('i', (0 for i in range(maxSize)))\n\n def isEmpty(self):\n \"\"\"\n Return true/false on whether the queue is empty\n \"\"\"\n return self.qSize == 0\n\n def size(self):\n \"\"\"\n Return the number of elements inside the queue\n \"\"\"\n return self.qSize\n <function token>\n <function token>\n <function token>\n <function token>\n\n\n<function token>\n<code token>\n",
"<docstring token>\n<import token>\n\n\nclass IntQueue(Queue):\n <docstring token>\n <function token>\n\n def isEmpty(self):\n \"\"\"\n Return true/false on whether the queue is empty\n \"\"\"\n return self.qSize == 0\n\n def size(self):\n \"\"\"\n Return the number of elements inside the queue\n \"\"\"\n return self.qSize\n <function token>\n <function token>\n <function token>\n <function token>\n\n\n<function token>\n<code token>\n",
"<docstring token>\n<import token>\n\n\nclass IntQueue(Queue):\n <docstring token>\n <function token>\n\n def isEmpty(self):\n \"\"\"\n Return true/false on whether the queue is empty\n \"\"\"\n return self.qSize == 0\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\n<function token>\n<code token>\n",
"<docstring token>\n<import token>\n\n\nclass IntQueue(Queue):\n <docstring token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\n<function token>\n<code token>\n",
"<docstring token>\n<import token>\n<class token>\n<function token>\n<code token>\n"
] | false |
811 |
acff8618754658104ac36214901d346447a0134f
|
import firebase_admin
from firebase_admin import credentials
from firebase_admin import db
import paho.mqtt.client as mqtt
# Fetch the service account key JSON file contents
cred = credentials.Certificate('iot_mikro.json')
# Initialize the app with a service account, granting admin privileges
firebase_admin.initialize_app(cred, {
'databaseURL': 'https://mikro-b4844.firebaseio.com/'
})
ref = db.reference('lampu')
print(ref.get())
i=0
while True:
print(ref.get())
if ref.get()=="Off" and i==0 :
i=1
client = mqtt.Client()
client.connect("127.0.0.1",1883,60)
client.publish("building/lampu", "Off")
if ref.get()=="On" and i==1 :
i=0
client = mqtt.Client()
client.connect("127.0.0.1",1883,60)
client.publish("building/lampu", "On")
# client.disconnect();
|
[
"import firebase_admin\nfrom firebase_admin import credentials\nfrom firebase_admin import db\nimport paho.mqtt.client as mqtt\n\n# Fetch the service account key JSON file contents\ncred = credentials.Certificate('iot_mikro.json')\n# Initialize the app with a service account, granting admin privileges\nfirebase_admin.initialize_app(cred, {\n 'databaseURL': 'https://mikro-b4844.firebaseio.com/'\n})\n\nref = db.reference('lampu')\nprint(ref.get())\ni=0\nwhile True:\n print(ref.get())\n if ref.get()==\"Off\" and i==0 :\n i=1\n client = mqtt.Client()\n client.connect(\"127.0.0.1\",1883,60)\n client.publish(\"building/lampu\", \"Off\")\n if ref.get()==\"On\" and i==1 :\n i=0\n client = mqtt.Client()\n client.connect(\"127.0.0.1\",1883,60)\n client.publish(\"building/lampu\", \"On\")\n# client.disconnect();\n ",
"import firebase_admin\nfrom firebase_admin import credentials\nfrom firebase_admin import db\nimport paho.mqtt.client as mqtt\ncred = credentials.Certificate('iot_mikro.json')\nfirebase_admin.initialize_app(cred, {'databaseURL':\n 'https://mikro-b4844.firebaseio.com/'})\nref = db.reference('lampu')\nprint(ref.get())\ni = 0\nwhile True:\n print(ref.get())\n if ref.get() == 'Off' and i == 0:\n i = 1\n client = mqtt.Client()\n client.connect('127.0.0.1', 1883, 60)\n client.publish('building/lampu', 'Off')\n if ref.get() == 'On' and i == 1:\n i = 0\n client = mqtt.Client()\n client.connect('127.0.0.1', 1883, 60)\n client.publish('building/lampu', 'On')\n",
"<import token>\ncred = credentials.Certificate('iot_mikro.json')\nfirebase_admin.initialize_app(cred, {'databaseURL':\n 'https://mikro-b4844.firebaseio.com/'})\nref = db.reference('lampu')\nprint(ref.get())\ni = 0\nwhile True:\n print(ref.get())\n if ref.get() == 'Off' and i == 0:\n i = 1\n client = mqtt.Client()\n client.connect('127.0.0.1', 1883, 60)\n client.publish('building/lampu', 'Off')\n if ref.get() == 'On' and i == 1:\n i = 0\n client = mqtt.Client()\n client.connect('127.0.0.1', 1883, 60)\n client.publish('building/lampu', 'On')\n",
"<import token>\n<assignment token>\nfirebase_admin.initialize_app(cred, {'databaseURL':\n 'https://mikro-b4844.firebaseio.com/'})\n<assignment token>\nprint(ref.get())\n<assignment token>\nwhile True:\n print(ref.get())\n if ref.get() == 'Off' and i == 0:\n i = 1\n client = mqtt.Client()\n client.connect('127.0.0.1', 1883, 60)\n client.publish('building/lampu', 'Off')\n if ref.get() == 'On' and i == 1:\n i = 0\n client = mqtt.Client()\n client.connect('127.0.0.1', 1883, 60)\n client.publish('building/lampu', 'On')\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n"
] | false |
812 |
077b6d3d7417bbc26e9f23af6f437ff05e3d5771
|
__author__ = "那位先生Beer"
import matplotlib.pyplot as plt
from matplotlib.font_manager import FontProperties
import xlrd
import numpy as np
print('输入鲈鱼的先验概率例如:70,对应70%')
a=input('输入鲈鱼的先验概率(鲑鱼对应的1减去剩余的):')
font_set = FontProperties(fname=r"c:\windows\fonts\simsun.ttc", size=15)
#根据生成的数据画出图像(横坐标为长度,纵坐标为亮度)
data=xlrd.open_workbook('xqtest.xls')
shxrange=range(data.nsheets)
sh=data.sheet_by_name("1")
L=[]
for i in range(0,(int(a))*50):
rowa_data=sh.row_values(i)
L.append(rowa_data)
L=np.array(L)
L=L[:,0:2]
G=[]
for j in range(5000,5000+(100-int(a))*50):
rowa_data = sh.row_values(j)
G.append(rowa_data)
G=np.array(G)
G=G[:,0:2]
plt.figure(figsize=(8,6))
plt.title("生成的鲈鱼和鲑鱼数据的散点图",fontproperties=font_set)
plt.xlabel("长度",fontproperties=font_set)
plt.ylabel("宽度",fontproperties=font_set)
plt.scatter(L[:,0],L[:,1],marker="o",label="鲈鱼")
plt.scatter(G[:,0],G[:,1],marker="s",label="鲑鱼")
# 分类模型
x = np.linspace(0,8)
y = -x+9
plt.plot(x,y, color="red")
plt.legend()
plt.show()
#模拟的数据鲈鱼比较小,可得出其在直线下面,即y+x<=9:
#计算准确率
count=0
for i in L:
if i[0]+i[1]<=9:
count=count+1
q=(count/((int(a))*50))
print('鲈鱼准确率:%s'%(count/((int(a))*50)))
countG=0
for i in G:
if i[0]+i[1]>=9:
countG=countG+1
p=(countG/((100-int(a))*50))
print('鲑鱼准确率:%s'%(countG/((100-int(a))*50)))
#p(b)=p(b|a)*p(a) + p(b|-a)p(-a)
pb=(int(a)/100)*q + (1-(int(a)/100))*p
print(pb)
#p(ab)=p(b|a)*p(a)
pab=(int(a)/100)*q
print(pab)
print(pab/pb)
|
[
"__author__ = \"那位先生Beer\"\nimport matplotlib.pyplot as plt\nfrom matplotlib.font_manager import FontProperties\nimport xlrd\nimport numpy as np\nprint('输入鲈鱼的先验概率例如:70,对应70%')\na=input('输入鲈鱼的先验概率(鲑鱼对应的1减去剩余的):')\nfont_set = FontProperties(fname=r\"c:\\windows\\fonts\\simsun.ttc\", size=15)\n#根据生成的数据画出图像(横坐标为长度,纵坐标为亮度)\ndata=xlrd.open_workbook('xqtest.xls')\nshxrange=range(data.nsheets)\nsh=data.sheet_by_name(\"1\")\nL=[]\nfor i in range(0,(int(a))*50):\n rowa_data=sh.row_values(i)\n L.append(rowa_data)\nL=np.array(L)\nL=L[:,0:2]\n\nG=[]\nfor j in range(5000,5000+(100-int(a))*50):\n rowa_data = sh.row_values(j)\n G.append(rowa_data)\nG=np.array(G)\nG=G[:,0:2]\nplt.figure(figsize=(8,6))\nplt.title(\"生成的鲈鱼和鲑鱼数据的散点图\",fontproperties=font_set)\nplt.xlabel(\"长度\",fontproperties=font_set)\nplt.ylabel(\"宽度\",fontproperties=font_set)\nplt.scatter(L[:,0],L[:,1],marker=\"o\",label=\"鲈鱼\")\nplt.scatter(G[:,0],G[:,1],marker=\"s\",label=\"鲑鱼\")\n# 分类模型\nx = np.linspace(0,8)\ny = -x+9\nplt.plot(x,y, color=\"red\")\nplt.legend()\nplt.show()\n\n\n#模拟的数据鲈鱼比较小,可得出其在直线下面,即y+x<=9:\n#计算准确率\ncount=0\nfor i in L:\n if i[0]+i[1]<=9:\n count=count+1\nq=(count/((int(a))*50))\nprint('鲈鱼准确率:%s'%(count/((int(a))*50)))\ncountG=0\nfor i in G:\n if i[0]+i[1]>=9:\n countG=countG+1\np=(countG/((100-int(a))*50))\nprint('鲑鱼准确率:%s'%(countG/((100-int(a))*50)))\n\n#p(b)=p(b|a)*p(a) + p(b|-a)p(-a)\npb=(int(a)/100)*q + (1-(int(a)/100))*p\nprint(pb)\n#p(ab)=p(b|a)*p(a)\npab=(int(a)/100)*q\nprint(pab)\nprint(pab/pb)\n",
"__author__ = '那位先生Beer'\nimport matplotlib.pyplot as plt\nfrom matplotlib.font_manager import FontProperties\nimport xlrd\nimport numpy as np\nprint('输入鲈鱼的先验概率例如:70,对应70%')\na = input('输入鲈鱼的先验概率(鲑鱼对应的1减去剩余的):')\nfont_set = FontProperties(fname='c:\\\\windows\\\\fonts\\\\simsun.ttc', size=15)\ndata = xlrd.open_workbook('xqtest.xls')\nshxrange = range(data.nsheets)\nsh = data.sheet_by_name('1')\nL = []\nfor i in range(0, int(a) * 50):\n rowa_data = sh.row_values(i)\n L.append(rowa_data)\nL = np.array(L)\nL = L[:, 0:2]\nG = []\nfor j in range(5000, 5000 + (100 - int(a)) * 50):\n rowa_data = sh.row_values(j)\n G.append(rowa_data)\nG = np.array(G)\nG = G[:, 0:2]\nplt.figure(figsize=(8, 6))\nplt.title('生成的鲈鱼和鲑鱼数据的散点图', fontproperties=font_set)\nplt.xlabel('长度', fontproperties=font_set)\nplt.ylabel('宽度', fontproperties=font_set)\nplt.scatter(L[:, 0], L[:, 1], marker='o', label='鲈鱼')\nplt.scatter(G[:, 0], G[:, 1], marker='s', label='鲑鱼')\nx = np.linspace(0, 8)\ny = -x + 9\nplt.plot(x, y, color='red')\nplt.legend()\nplt.show()\ncount = 0\nfor i in L:\n if i[0] + i[1] <= 9:\n count = count + 1\nq = count / (int(a) * 50)\nprint('鲈鱼准确率:%s' % (count / (int(a) * 50)))\ncountG = 0\nfor i in G:\n if i[0] + i[1] >= 9:\n countG = countG + 1\np = countG / ((100 - int(a)) * 50)\nprint('鲑鱼准确率:%s' % (countG / ((100 - int(a)) * 50)))\npb = int(a) / 100 * q + (1 - int(a) / 100) * p\nprint(pb)\npab = int(a) / 100 * q\nprint(pab)\nprint(pab / pb)\n",
"__author__ = '那位先生Beer'\n<import token>\nprint('输入鲈鱼的先验概率例如:70,对应70%')\na = input('输入鲈鱼的先验概率(鲑鱼对应的1减去剩余的):')\nfont_set = FontProperties(fname='c:\\\\windows\\\\fonts\\\\simsun.ttc', size=15)\ndata = xlrd.open_workbook('xqtest.xls')\nshxrange = range(data.nsheets)\nsh = data.sheet_by_name('1')\nL = []\nfor i in range(0, int(a) * 50):\n rowa_data = sh.row_values(i)\n L.append(rowa_data)\nL = np.array(L)\nL = L[:, 0:2]\nG = []\nfor j in range(5000, 5000 + (100 - int(a)) * 50):\n rowa_data = sh.row_values(j)\n G.append(rowa_data)\nG = np.array(G)\nG = G[:, 0:2]\nplt.figure(figsize=(8, 6))\nplt.title('生成的鲈鱼和鲑鱼数据的散点图', fontproperties=font_set)\nplt.xlabel('长度', fontproperties=font_set)\nplt.ylabel('宽度', fontproperties=font_set)\nplt.scatter(L[:, 0], L[:, 1], marker='o', label='鲈鱼')\nplt.scatter(G[:, 0], G[:, 1], marker='s', label='鲑鱼')\nx = np.linspace(0, 8)\ny = -x + 9\nplt.plot(x, y, color='red')\nplt.legend()\nplt.show()\ncount = 0\nfor i in L:\n if i[0] + i[1] <= 9:\n count = count + 1\nq = count / (int(a) * 50)\nprint('鲈鱼准确率:%s' % (count / (int(a) * 50)))\ncountG = 0\nfor i in G:\n if i[0] + i[1] >= 9:\n countG = countG + 1\np = countG / ((100 - int(a)) * 50)\nprint('鲑鱼准确率:%s' % (countG / ((100 - int(a)) * 50)))\npb = int(a) / 100 * q + (1 - int(a) / 100) * p\nprint(pb)\npab = int(a) / 100 * q\nprint(pab)\nprint(pab / pb)\n",
"<assignment token>\n<import token>\nprint('输入鲈鱼的先验概率例如:70,对应70%')\n<assignment token>\nfor i in range(0, int(a) * 50):\n rowa_data = sh.row_values(i)\n L.append(rowa_data)\n<assignment token>\nfor j in range(5000, 5000 + (100 - int(a)) * 50):\n rowa_data = sh.row_values(j)\n G.append(rowa_data)\n<assignment token>\nplt.figure(figsize=(8, 6))\nplt.title('生成的鲈鱼和鲑鱼数据的散点图', fontproperties=font_set)\nplt.xlabel('长度', fontproperties=font_set)\nplt.ylabel('宽度', fontproperties=font_set)\nplt.scatter(L[:, 0], L[:, 1], marker='o', label='鲈鱼')\nplt.scatter(G[:, 0], G[:, 1], marker='s', label='鲑鱼')\n<assignment token>\nplt.plot(x, y, color='red')\nplt.legend()\nplt.show()\n<assignment token>\nfor i in L:\n if i[0] + i[1] <= 9:\n count = count + 1\n<assignment token>\nprint('鲈鱼准确率:%s' % (count / (int(a) * 50)))\n<assignment token>\nfor i in G:\n if i[0] + i[1] >= 9:\n countG = countG + 1\n<assignment token>\nprint('鲑鱼准确率:%s' % (countG / ((100 - int(a)) * 50)))\n<assignment token>\nprint(pb)\n<assignment token>\nprint(pab)\nprint(pab / pb)\n",
"<assignment token>\n<import token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n"
] | false |
813 |
0ea67ac97ec8e7f287a2430c67f8f7d841d8b646
|
# -*- coding: utf-8 -*-
# Copyright 2017 Objectif Libre
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Test SummaryModel objects."""
from oslotest import base
from cloudkitty.api.v1.datamodels import report
class TestSummary(base.BaseTestCase):
def setUp(self):
super(TestSummary, self).setUp()
def test_nulls(self):
s = report.SummaryModel(begin=None,
end=None,
tenant_id=None,
res_type=None,
rate=None)
self.assertIsNone(s.begin)
self.assertIsNone(s.end)
self.assertEqual(s.tenant_id, "ALL")
self.assertEqual(s.res_type, "ALL")
self.assertEqual(s.rate, "0")
|
[
"# -*- coding: utf-8 -*-\n# Copyright 2017 Objectif Libre\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n#\n\"\"\"Test SummaryModel objects.\"\"\"\nfrom oslotest import base\n\nfrom cloudkitty.api.v1.datamodels import report\n\n\nclass TestSummary(base.BaseTestCase):\n\n def setUp(self):\n super(TestSummary, self).setUp()\n\n def test_nulls(self):\n s = report.SummaryModel(begin=None,\n end=None,\n tenant_id=None,\n res_type=None,\n rate=None)\n self.assertIsNone(s.begin)\n self.assertIsNone(s.end)\n self.assertEqual(s.tenant_id, \"ALL\")\n self.assertEqual(s.res_type, \"ALL\")\n self.assertEqual(s.rate, \"0\")\n",
"<docstring token>\nfrom oslotest import base\nfrom cloudkitty.api.v1.datamodels import report\n\n\nclass TestSummary(base.BaseTestCase):\n\n def setUp(self):\n super(TestSummary, self).setUp()\n\n def test_nulls(self):\n s = report.SummaryModel(begin=None, end=None, tenant_id=None,\n res_type=None, rate=None)\n self.assertIsNone(s.begin)\n self.assertIsNone(s.end)\n self.assertEqual(s.tenant_id, 'ALL')\n self.assertEqual(s.res_type, 'ALL')\n self.assertEqual(s.rate, '0')\n",
"<docstring token>\n<import token>\n\n\nclass TestSummary(base.BaseTestCase):\n\n def setUp(self):\n super(TestSummary, self).setUp()\n\n def test_nulls(self):\n s = report.SummaryModel(begin=None, end=None, tenant_id=None,\n res_type=None, rate=None)\n self.assertIsNone(s.begin)\n self.assertIsNone(s.end)\n self.assertEqual(s.tenant_id, 'ALL')\n self.assertEqual(s.res_type, 'ALL')\n self.assertEqual(s.rate, '0')\n",
"<docstring token>\n<import token>\n\n\nclass TestSummary(base.BaseTestCase):\n\n def setUp(self):\n super(TestSummary, self).setUp()\n <function token>\n",
"<docstring token>\n<import token>\n\n\nclass TestSummary(base.BaseTestCase):\n <function token>\n <function token>\n",
"<docstring token>\n<import token>\n<class token>\n"
] | false |
814 |
883b4de18dddede97f850e3a184a0e1072bda99e
|
# #1
# def bi_search(l, r, arr, x):
# # Code Here
# if(l == r):
# return arr[r] == x
# mid = (l + r)//2 + 1
# if(arr[mid] > x):
# return bi_search(l,mid-1,arr,x)
# else:
# return bi_search(mid,r,arr,x)
# inp = input('Enter Input : ').split('/')
# arr, k = list(map(int, inp[0].split())), int(inp[1])
# print(bi_search(0, len(arr) - 1, sorted(arr), k))
# #2
# def bi_search(l, r, arr, x):
# if(l == r):
# if arr[l] > x :
# return arr[l]
# else:
# return None
# mid = (l + r)//2 + 1
# res = None
# if(arr[mid] > x):
# res = bi_search(l,mid-1,arr,x)
# else:
# res = bi_search(mid,r,arr,x)
# return res if res else (arr[mid] if arr[mid] > x else None)
# inp = input('Enter Input : ').split('/')
# arr, arr2 = sorted(list(map(int, inp[0].split()))), list(map(int, inp[1].split()))
# for k in arr2:
# res = bi_search(0, len(arr) - 1, arr, k)
# print(res if res else "No First Greater Value")
#3
# class Data:
# def __init__(self, key, value):
# self.key = key
# self.value = value
# def __str__(self):
# return "({0}, {1})".format(self.key, self.value)
# class hash:
# def __init__(self,max,chain):
# self.data = [None for i in range(max)]
# self.limit= max
# self.chain= chain
# self.length = 0
# def code(self,a):
# return sum([ord(i) for i in a])
# def isFull(self):
# return self.length == self.limit
# def insert(self,value):
# key,val = value.split(" ")
# s = self.code(key)
# co = 0
# now = 0
# while(co <= self.chain):
# if(co != 0):
# print ("collision number",co,"at",now)
# if(co == self.chain):
# break
# now = (s + (0 if not co else co*co) ) % self.limit
# if(self.data[now] == None):
# self.data[now] = Data(key,val)
# self.length += 1
# break
# co += 1
# if(co >= self.chain):
# print("Max of collisionChain")
# def __str__(self):
# return "\n".join(list(map(str,[ "#{0} {1}".format(str(i+1),self.data[i]) for i in range( len(self.data) ) ] ) ) ) + "\n---------------------------"
# print(" ***** Fun with hashing *****")
# val,arr = input("Enter Input : ").split("/")
# h = hash(int(val.split(" ")[0]),int(val.split(" ")[1]))
# arr = arr.split(",")
# for i in arr:
# h.insert(i)
# print(h)
# if(h.isFull()):
# print("This table is full !!!!!!")
# break
#4
# import math
# class Data:
# def __init__(self, value):
# self.value = value
# def __str__(self):
# return str(self.value)
# class hash:
# def __init__(self,max,chain,t):
# self.data = [None for i in range(max)]
# self.limit = max
# self.chain = chain
# self.length = 0
# self.threshold = t
# self.bu = list()
# def code(self,a):
# # return sum([ord(i) for i in a])
# return int(a)
# def isFull(self):
# return self.length == self.limit
# def findNearPrime(self):
# i = self.limit * 2
# while(True):
# c = True
# for j in range(2, int(math.sqrt(i)) + 1):
# if(not i % j):
# i += 1
# c = False
# break
# if c :
# break
# return i
# def handlerIllegal(self,co,value):
# if(self.length * 100 // self.limit >= self.threshold):
# print("****** Data over threshold - Rehash !!! ******")
# self.resize()
# self.Rehash()
# elif (co >= self.chain):
# print("****** Max collision - Rehash !!! ******")
# self.resize()
# self.Rehash()
# def resize(self):
# self.data += [None for i in range(self.findNearPrime() - self.limit)]
# self.limit = len(self.data)
# def Rehash(self):
# for i in range(self.limit):
# self.data[i] = None
# for i in self.bu:
# self.insert(i,False)
# def insert(self,value,Rehash = True):
# s = self.code(value)
# co = 0
# now = 0
# while(co <= self.chain):
# if(co != 0):
# print ("collision number",co,"at",now)
# if(co == self.chain):
# break
# now = (s + (0 if not co else co*co) ) % self.limit
# if(self.data[now] == None):
# self.data[now] = Data(value)
# if(Rehash):
# self.length += 1
# break
# co += 1
# if(Rehash):
# self.handlerIllegal(co,value)
# def addBuff(self,value):
# self.bu.append(value)
# def __str__(self):
# return "\n".join(list(map(str,[ "#{0} {1}".format(str(i+1),self.data[i]) for i in range( len(self.data) ) ] ) ) ) + "\n----------------------------------------"
# print(" ***** Rehashing *****")
# val,arr = input("Enter Input : ").split("/")
# h = hash(int(val.split(" ")[0]),int(val.split(" ")[1]),int(val.split(" ")[2]))
# arr = arr.split()
# print("Initial Table :",h,sep="\n")
# for i in arr:
# print("Add :",i)
# h.addBuff(i)
# h.insert(i)
# print(h)
# if(h.isFull()):
# print("This table is full !!!!!!")
# break
# 5
boxes = 0
ans = -1
def solve(dpArr,list,box,i):
global boxes
global ans
if(box == boxes):
s = 0
for j in list:
s += len(j)
if(s == len(dpArr)):
mx = 0
for j in list:
if(sum(j) > mx):
mx = sum(j)
if(mx < ans or ans == -1):
ans = mx
return
for j in range(1,len(dpArr) + 1):
if ( i + j > len(dpArr) ):
break
solve(dpArr,list + [dpArr[i:i + j]],box + 1 ,i + j)
inp = input("Enter Input : ")
inp,boxes = list(map(int,inp.split("/")[0].split() )) , int( inp.split("/")[1])
# for i in range(1,len(inp)):
# inp[i] += inp[i-1]
solve(dpArr = inp,list = [],box = 0,i = 0)
print("Minimum weigth for",boxes,"box(es) =",ans)
|
[
"# #1\n# def bi_search(l, r, arr, x):\n# # Code Here\n# if(l == r):\n# return arr[r] == x\n \n# mid = (l + r)//2 + 1\n# if(arr[mid] > x):\n# return bi_search(l,mid-1,arr,x)\n# else:\n# return bi_search(mid,r,arr,x)\n\n# inp = input('Enter Input : ').split('/')\n# arr, k = list(map(int, inp[0].split())), int(inp[1])\n# print(bi_search(0, len(arr) - 1, sorted(arr), k))\n\n# #2\n# def bi_search(l, r, arr, x):\n# if(l == r):\n# if arr[l] > x :\n# return arr[l]\n# else: \n# return None\n\n# mid = (l + r)//2 + 1\n# res = None\n# if(arr[mid] > x):\n# res = bi_search(l,mid-1,arr,x)\n# else:\n# res = bi_search(mid,r,arr,x)\n# return res if res else (arr[mid] if arr[mid] > x else None)\n\n\n# inp = input('Enter Input : ').split('/')\n# arr, arr2 = sorted(list(map(int, inp[0].split()))), list(map(int, inp[1].split()))\n# for k in arr2:\n# res = bi_search(0, len(arr) - 1, arr, k) \n# print(res if res else \"No First Greater Value\")\n\n#3\n# class Data:\n# def __init__(self, key, value):\n# self.key = key\n# self.value = value\n\n# def __str__(self):\n# return \"({0}, {1})\".format(self.key, self.value)\n\n# class hash:\n\n# def __init__(self,max,chain):\n# self.data = [None for i in range(max)]\n# self.limit= max\n# self.chain= chain\n# self.length = 0\n\n# def code(self,a):\n# return sum([ord(i) for i in a]) \n\n# def isFull(self):\n# return self.length == self.limit\n\n# def insert(self,value):\n# key,val = value.split(\" \")\n# s = self.code(key)\n# co = 0\n# now = 0\n# while(co <= self.chain):\n# if(co != 0):\n# print (\"collision number\",co,\"at\",now)\n# if(co == self.chain):\n# break\n# now = (s + (0 if not co else co*co) ) % self.limit \n \n\n# if(self.data[now] == None):\n# self.data[now] = Data(key,val)\n# self.length += 1\n# break\n# co += 1\n\n# if(co >= self.chain):\n# print(\"Max of collisionChain\")\n\n\n# def __str__(self):\n# return \"\\n\".join(list(map(str,[ \"#{0}\t{1}\".format(str(i+1),self.data[i]) for i in range( len(self.data) ) ] ) ) ) + \"\\n---------------------------\"\n\n\n# print(\" ***** Fun with hashing *****\")\n\n# val,arr = input(\"Enter Input : \").split(\"/\")\n\n# h = hash(int(val.split(\" \")[0]),int(val.split(\" \")[1]))\n\n# arr = arr.split(\",\")\n\n# for i in arr:\n# h.insert(i)\n# print(h)\n# if(h.isFull()):\n# print(\"This table is full !!!!!!\")\n# break\n\n\n#4\n# import math\n# class Data:\n# def __init__(self, value):\n# self.value = value\n\n# def __str__(self):\n# return str(self.value)\n\n# class hash:\n\n# def __init__(self,max,chain,t):\n# self.data = [None for i in range(max)]\n# self.limit = max\n# self.chain = chain\n# self.length = 0\n# self.threshold = t\n# self.bu = list()\n\n# def code(self,a):\n# # return sum([ord(i) for i in a]) \n# return int(a)\n\n# def isFull(self):\n# return self.length == self.limit\n\n# def findNearPrime(self):\n# i = self.limit * 2\n# while(True):\n# c = True\n# for j in range(2, int(math.sqrt(i)) + 1):\n# if(not i % j):\n# i += 1\n# c = False\n# break\n# if c :\n# break\n\n# return i\n\n# def handlerIllegal(self,co,value):\n# if(self.length * 100 // self.limit >= self.threshold):\n# print(\"****** Data over threshold - Rehash !!! ******\")\n# self.resize()\n# self.Rehash()\n# elif (co >= self.chain):\n# print(\"****** Max collision - Rehash !!! ******\")\n# self.resize()\n# self.Rehash()\n\n# def resize(self):\n# self.data += [None for i in range(self.findNearPrime() - self.limit)]\n# self.limit = len(self.data)\n\n# def Rehash(self):\n# for i in range(self.limit):\n# self.data[i] = None\n# for i in self.bu:\n# self.insert(i,False)\n\n# def insert(self,value,Rehash = True):\n# s = self.code(value)\n# co = 0\n# now = 0\n# while(co <= self.chain):\n# if(co != 0):\n# print (\"collision number\",co,\"at\",now)\n# if(co == self.chain):\n# break\n# now = (s + (0 if not co else co*co) ) % self.limit \n\n# if(self.data[now] == None):\n# self.data[now] = Data(value)\n# if(Rehash):\n# self.length += 1\n# break\n# co += 1\n\n# if(Rehash):\n# self.handlerIllegal(co,value)\n\n# def addBuff(self,value):\n# self.bu.append(value)\n\n# def __str__(self):\n# return \"\\n\".join(list(map(str,[ \"#{0}\t{1}\".format(str(i+1),self.data[i]) for i in range( len(self.data) ) ] ) ) ) + \"\\n----------------------------------------\"\n\n\n# print(\" ***** Rehashing *****\")\n\n# val,arr = input(\"Enter Input : \").split(\"/\")\n\n# h = hash(int(val.split(\" \")[0]),int(val.split(\" \")[1]),int(val.split(\" \")[2]))\n\n# arr = arr.split()\n\n# print(\"Initial Table :\",h,sep=\"\\n\")\n\n# for i in arr:\n# print(\"Add :\",i)\n# h.addBuff(i)\n# h.insert(i)\n# print(h)\n# if(h.isFull()):\n# print(\"This table is full !!!!!!\")\n# break\n\n\n# 5\nboxes = 0\nans = -1\ndef solve(dpArr,list,box,i):\n global boxes \n global ans\n if(box == boxes):\n s = 0\n for j in list:\n s += len(j)\n \n if(s == len(dpArr)):\n mx = 0\n for j in list:\n if(sum(j) > mx):\n mx = sum(j)\n\n if(mx < ans or ans == -1):\n ans = mx \n return\n\n for j in range(1,len(dpArr) + 1):\n if ( i + j > len(dpArr) ):\n break\n solve(dpArr,list + [dpArr[i:i + j]],box + 1 ,i + j)\n\n\ninp = input(\"Enter Input : \")\n\ninp,boxes = list(map(int,inp.split(\"/\")[0].split() )) , int( inp.split(\"/\")[1])\n\n# for i in range(1,len(inp)):\n# inp[i] += inp[i-1]\n\nsolve(dpArr = inp,list = [],box = 0,i = 0)\nprint(\"Minimum weigth for\",boxes,\"box(es) =\",ans)",
"boxes = 0\nans = -1\n\n\ndef solve(dpArr, list, box, i):\n global boxes\n global ans\n if box == boxes:\n s = 0\n for j in list:\n s += len(j)\n if s == len(dpArr):\n mx = 0\n for j in list:\n if sum(j) > mx:\n mx = sum(j)\n if mx < ans or ans == -1:\n ans = mx\n return\n for j in range(1, len(dpArr) + 1):\n if i + j > len(dpArr):\n break\n solve(dpArr, list + [dpArr[i:i + j]], box + 1, i + j)\n\n\ninp = input('Enter Input : ')\ninp, boxes = list(map(int, inp.split('/')[0].split())), int(inp.split('/')[1])\nsolve(dpArr=inp, list=[], box=0, i=0)\nprint('Minimum weigth for', boxes, 'box(es) =', ans)\n",
"<assignment token>\n\n\ndef solve(dpArr, list, box, i):\n global boxes\n global ans\n if box == boxes:\n s = 0\n for j in list:\n s += len(j)\n if s == len(dpArr):\n mx = 0\n for j in list:\n if sum(j) > mx:\n mx = sum(j)\n if mx < ans or ans == -1:\n ans = mx\n return\n for j in range(1, len(dpArr) + 1):\n if i + j > len(dpArr):\n break\n solve(dpArr, list + [dpArr[i:i + j]], box + 1, i + j)\n\n\n<assignment token>\nsolve(dpArr=inp, list=[], box=0, i=0)\nprint('Minimum weigth for', boxes, 'box(es) =', ans)\n",
"<assignment token>\n\n\ndef solve(dpArr, list, box, i):\n global boxes\n global ans\n if box == boxes:\n s = 0\n for j in list:\n s += len(j)\n if s == len(dpArr):\n mx = 0\n for j in list:\n if sum(j) > mx:\n mx = sum(j)\n if mx < ans or ans == -1:\n ans = mx\n return\n for j in range(1, len(dpArr) + 1):\n if i + j > len(dpArr):\n break\n solve(dpArr, list + [dpArr[i:i + j]], box + 1, i + j)\n\n\n<assignment token>\n<code token>\n",
"<assignment token>\n<function token>\n<assignment token>\n<code token>\n"
] | false |
815 |
95b75395cafc6ba9f75ecf48157421e37ced2518
|
import math
# type defining of the variable and playing with variables.
a = 5.0
print(id(a))
a = 10
print("hello.....")
print(type(a))
print(id(a))
# locating addresses...
b = [5, 6, 7]
print(id(b))
b.append(10)
print(id(b))
# Strings...
name = input("Enter Your Name:: ") # iNPUTTING AS NAME
print(name)
print(len(name))
print(name[2])
print(name[0:3])
print(name[-2:])
# Escape Sequence
# \'
# \"
# \\
# \n
message = 'Python "Programming"'
print(message)
message = """Python
New Line..
Programmin"""
print(message)
# string Concatenation
lastname = input("Enter Your Last Name:: ") # iNPUTTING AS NAME
print(lastname)
print(name + " " + lastname)
full = f"{name} {lastname}"
print("Another way of writing... \n" + full)
print(full.upper()) # converts into upper case.
print(full.find("ip")) # finding location of specific char. Returns index number.
print("Dipesh" in full) # returns Boolean value either true or false..
print("Patel" in full)
print(full.replace("Rafaliya", "Patel"))
# Binary representation of any number...
print(bin(a)) # binary of a = 10
print(hex(a)) # Hexadecimal of a..
x = 0b0101
print((x)) # binary num a
print(bin(x)) # binary printing of a
# complex Number...
complex = a + 5j
print(complex) # printing complex number
y = 3
# operations
q = a + y # addition
print(q)
w = a - y # substraction
print(w)
e = a * y # multiplication
print(e)
r = a / y # division
print(r)
t = a // y # division but only print integer value
print(t)
g = a ** y # to the power of
print(g)
m = a % y # remainder
print(m)
# constants variables..
PI = 3.14 # this is a var with a constant value
print(abs(PI)) # absolute value of PI
print(round(PI)) # round up value of PI
no = -8.56
print(math.floor(no)) # floor value of no
print(math.ceil(no)) # ceiling value of no
# if-elif-else loop
age = 10
if age >= 21:
print("Adult")
elif age >= 13:
print("Teenager")
else:
print("Child")
# ternary operator
print("Adult" if age >= 21 else "Teenager")
# for loops
for p in "Dipesh":
print(p)
for l in range(0, 10, 2): # range is a kind of list...
print(l)
answer = 10
guess = 1
while answer != guess: # while loop for guessing
guess = int(input("Enter your Guess:: "))
else:
pass # this is used to break the loop...
# defining a function ... Number is even or odd..
def evenodd(numb):
if numb % 2 == 0:
return "even"
else:
return "odd"
print("The Number is " + evenodd(20))
# printing the row at a time...
def rows(**ro):
print(ro)
rows(name="Dipesh", id=1)
|
[
"import math\r\n\r\n# type defining of the variable and playing with variables.\r\na = 5.0\r\nprint(id(a))\r\na = 10\r\nprint(\"hello.....\")\r\nprint(type(a))\r\nprint(id(a))\r\n\r\n# locating addresses...\r\nb = [5, 6, 7]\r\nprint(id(b))\r\nb.append(10)\r\nprint(id(b))\r\n\r\n# Strings...\r\n\r\nname = input(\"Enter Your Name:: \") # iNPUTTING AS NAME\r\nprint(name)\r\nprint(len(name))\r\nprint(name[2])\r\nprint(name[0:3])\r\nprint(name[-2:])\r\n\r\n# Escape Sequence\r\n# \\'\r\n# \\\"\r\n# \\\\\r\n# \\n\r\nmessage = 'Python \"Programming\"'\r\nprint(message)\r\nmessage = \"\"\"Python \r\nNew Line..\r\nProgrammin\"\"\"\r\nprint(message)\r\n# string Concatenation\r\n\r\nlastname = input(\"Enter Your Last Name:: \") # iNPUTTING AS NAME\r\nprint(lastname)\r\nprint(name + \" \" + lastname)\r\n\r\nfull = f\"{name} {lastname}\"\r\nprint(\"Another way of writing... \\n\" + full)\r\nprint(full.upper()) # converts into upper case.\r\nprint(full.find(\"ip\")) # finding location of specific char. Returns index number.\r\n\r\nprint(\"Dipesh\" in full) # returns Boolean value either true or false..\r\nprint(\"Patel\" in full)\r\nprint(full.replace(\"Rafaliya\", \"Patel\"))\r\n\r\n# Binary representation of any number...\r\nprint(bin(a)) # binary of a = 10\r\nprint(hex(a)) # Hexadecimal of a..\r\n\r\nx = 0b0101\r\nprint((x)) # binary num a\r\nprint(bin(x)) # binary printing of a\r\n\r\n# complex Number...\r\ncomplex = a + 5j\r\nprint(complex) # printing complex number\r\ny = 3\r\n# operations\r\nq = a + y # addition\r\nprint(q)\r\nw = a - y # substraction\r\nprint(w)\r\ne = a * y # multiplication\r\nprint(e)\r\nr = a / y # division\r\nprint(r)\r\nt = a // y # division but only print integer value\r\nprint(t)\r\ng = a ** y # to the power of\r\nprint(g)\r\nm = a % y # remainder\r\nprint(m)\r\n\r\n# constants variables..\r\nPI = 3.14 # this is a var with a constant value\r\nprint(abs(PI)) # absolute value of PI\r\nprint(round(PI)) # round up value of PI\r\nno = -8.56\r\nprint(math.floor(no)) # floor value of no\r\nprint(math.ceil(no)) # ceiling value of no\r\n\r\n# if-elif-else loop\r\nage = 10\r\nif age >= 21:\r\n print(\"Adult\")\r\nelif age >= 13:\r\n print(\"Teenager\")\r\nelse:\r\n print(\"Child\")\r\n\r\n# ternary operator\r\nprint(\"Adult\" if age >= 21 else \"Teenager\")\r\n\r\n# for loops\r\nfor p in \"Dipesh\":\r\n print(p)\r\n\r\nfor l in range(0, 10, 2): # range is a kind of list...\r\n print(l)\r\n\r\nanswer = 10\r\nguess = 1\r\nwhile answer != guess: # while loop for guessing\r\n guess = int(input(\"Enter your Guess:: \"))\r\nelse:\r\n pass # this is used to break the loop...\r\n\r\n# defining a function ... Number is even or odd..\r\ndef evenodd(numb):\r\n if numb % 2 == 0:\r\n return \"even\"\r\n else:\r\n return \"odd\"\r\n\r\n\r\nprint(\"The Number is \" + evenodd(20))\r\n\r\n# printing the row at a time...\r\ndef rows(**ro):\r\n print(ro)\r\n\r\n\r\nrows(name=\"Dipesh\", id=1)\r\n\r\n",
"import math\na = 5.0\nprint(id(a))\na = 10\nprint('hello.....')\nprint(type(a))\nprint(id(a))\nb = [5, 6, 7]\nprint(id(b))\nb.append(10)\nprint(id(b))\nname = input('Enter Your Name:: ')\nprint(name)\nprint(len(name))\nprint(name[2])\nprint(name[0:3])\nprint(name[-2:])\nmessage = 'Python \"Programming\"'\nprint(message)\nmessage = \"\"\"Python \nNew Line..\nProgrammin\"\"\"\nprint(message)\nlastname = input('Enter Your Last Name:: ')\nprint(lastname)\nprint(name + ' ' + lastname)\nfull = f'{name} {lastname}'\nprint('Another way of writing... \\n' + full)\nprint(full.upper())\nprint(full.find('ip'))\nprint('Dipesh' in full)\nprint('Patel' in full)\nprint(full.replace('Rafaliya', 'Patel'))\nprint(bin(a))\nprint(hex(a))\nx = 5\nprint(x)\nprint(bin(x))\ncomplex = a + 5.0j\nprint(complex)\ny = 3\nq = a + y\nprint(q)\nw = a - y\nprint(w)\ne = a * y\nprint(e)\nr = a / y\nprint(r)\nt = a // y\nprint(t)\ng = a ** y\nprint(g)\nm = a % y\nprint(m)\nPI = 3.14\nprint(abs(PI))\nprint(round(PI))\nno = -8.56\nprint(math.floor(no))\nprint(math.ceil(no))\nage = 10\nif age >= 21:\n print('Adult')\nelif age >= 13:\n print('Teenager')\nelse:\n print('Child')\nprint('Adult' if age >= 21 else 'Teenager')\nfor p in 'Dipesh':\n print(p)\nfor l in range(0, 10, 2):\n print(l)\nanswer = 10\nguess = 1\nwhile answer != guess:\n guess = int(input('Enter your Guess:: '))\nelse:\n pass\n\n\ndef evenodd(numb):\n if numb % 2 == 0:\n return 'even'\n else:\n return 'odd'\n\n\nprint('The Number is ' + evenodd(20))\n\n\ndef rows(**ro):\n print(ro)\n\n\nrows(name='Dipesh', id=1)\n",
"<import token>\na = 5.0\nprint(id(a))\na = 10\nprint('hello.....')\nprint(type(a))\nprint(id(a))\nb = [5, 6, 7]\nprint(id(b))\nb.append(10)\nprint(id(b))\nname = input('Enter Your Name:: ')\nprint(name)\nprint(len(name))\nprint(name[2])\nprint(name[0:3])\nprint(name[-2:])\nmessage = 'Python \"Programming\"'\nprint(message)\nmessage = \"\"\"Python \nNew Line..\nProgrammin\"\"\"\nprint(message)\nlastname = input('Enter Your Last Name:: ')\nprint(lastname)\nprint(name + ' ' + lastname)\nfull = f'{name} {lastname}'\nprint('Another way of writing... \\n' + full)\nprint(full.upper())\nprint(full.find('ip'))\nprint('Dipesh' in full)\nprint('Patel' in full)\nprint(full.replace('Rafaliya', 'Patel'))\nprint(bin(a))\nprint(hex(a))\nx = 5\nprint(x)\nprint(bin(x))\ncomplex = a + 5.0j\nprint(complex)\ny = 3\nq = a + y\nprint(q)\nw = a - y\nprint(w)\ne = a * y\nprint(e)\nr = a / y\nprint(r)\nt = a // y\nprint(t)\ng = a ** y\nprint(g)\nm = a % y\nprint(m)\nPI = 3.14\nprint(abs(PI))\nprint(round(PI))\nno = -8.56\nprint(math.floor(no))\nprint(math.ceil(no))\nage = 10\nif age >= 21:\n print('Adult')\nelif age >= 13:\n print('Teenager')\nelse:\n print('Child')\nprint('Adult' if age >= 21 else 'Teenager')\nfor p in 'Dipesh':\n print(p)\nfor l in range(0, 10, 2):\n print(l)\nanswer = 10\nguess = 1\nwhile answer != guess:\n guess = int(input('Enter your Guess:: '))\nelse:\n pass\n\n\ndef evenodd(numb):\n if numb % 2 == 0:\n return 'even'\n else:\n return 'odd'\n\n\nprint('The Number is ' + evenodd(20))\n\n\ndef rows(**ro):\n print(ro)\n\n\nrows(name='Dipesh', id=1)\n",
"<import token>\n<assignment token>\nprint(id(a))\n<assignment token>\nprint('hello.....')\nprint(type(a))\nprint(id(a))\n<assignment token>\nprint(id(b))\nb.append(10)\nprint(id(b))\n<assignment token>\nprint(name)\nprint(len(name))\nprint(name[2])\nprint(name[0:3])\nprint(name[-2:])\n<assignment token>\nprint(message)\n<assignment token>\nprint(message)\n<assignment token>\nprint(lastname)\nprint(name + ' ' + lastname)\n<assignment token>\nprint('Another way of writing... \\n' + full)\nprint(full.upper())\nprint(full.find('ip'))\nprint('Dipesh' in full)\nprint('Patel' in full)\nprint(full.replace('Rafaliya', 'Patel'))\nprint(bin(a))\nprint(hex(a))\n<assignment token>\nprint(x)\nprint(bin(x))\n<assignment token>\nprint(complex)\n<assignment token>\nprint(q)\n<assignment token>\nprint(w)\n<assignment token>\nprint(e)\n<assignment token>\nprint(r)\n<assignment token>\nprint(t)\n<assignment token>\nprint(g)\n<assignment token>\nprint(m)\n<assignment token>\nprint(abs(PI))\nprint(round(PI))\n<assignment token>\nprint(math.floor(no))\nprint(math.ceil(no))\n<assignment token>\nif age >= 21:\n print('Adult')\nelif age >= 13:\n print('Teenager')\nelse:\n print('Child')\nprint('Adult' if age >= 21 else 'Teenager')\nfor p in 'Dipesh':\n print(p)\nfor l in range(0, 10, 2):\n print(l)\n<assignment token>\nwhile answer != guess:\n guess = int(input('Enter your Guess:: '))\nelse:\n pass\n\n\ndef evenodd(numb):\n if numb % 2 == 0:\n return 'even'\n else:\n return 'odd'\n\n\nprint('The Number is ' + evenodd(20))\n\n\ndef rows(**ro):\n print(ro)\n\n\nrows(name='Dipesh', id=1)\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n\n\ndef evenodd(numb):\n if numb % 2 == 0:\n return 'even'\n else:\n return 'odd'\n\n\n<code token>\n\n\ndef rows(**ro):\n print(ro)\n\n\n<code token>\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<function token>\n<code token>\n\n\ndef rows(**ro):\n print(ro)\n\n\n<code token>\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<function token>\n<code token>\n<function token>\n<code token>\n"
] | false |
816 |
21bdf315c98a4cf69482cc7db41bc30d44781596
|
"""added personal collection
Revision ID: 43eabda1d630
Revises: 9cad4dfb5125
Create Date: 2018-03-28 13:55:03.557872
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '43eabda1d630'
down_revision = '9cad4dfb5125'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('Gifs', sa.Column('personal_collections', sa.Integer(), nullable=True))
op.create_foreign_key(None, 'Gifs', 'PersonalGifCollections', ['personal_collections'], ['id'])
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'Gifs', type_='foreignkey')
op.drop_column('Gifs', 'personal_collections')
# ### end Alembic commands ###
|
[
"\"\"\"added personal collection\n\nRevision ID: 43eabda1d630\nRevises: 9cad4dfb5125\nCreate Date: 2018-03-28 13:55:03.557872\n\n\"\"\"\nfrom alembic import op\nimport sqlalchemy as sa\n\n\n# revision identifiers, used by Alembic.\nrevision = '43eabda1d630'\ndown_revision = '9cad4dfb5125'\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n # ### commands auto generated by Alembic - please adjust! ###\n op.add_column('Gifs', sa.Column('personal_collections', sa.Integer(), nullable=True))\n op.create_foreign_key(None, 'Gifs', 'PersonalGifCollections', ['personal_collections'], ['id'])\n # ### end Alembic commands ###\n\n\ndef downgrade():\n # ### commands auto generated by Alembic - please adjust! ###\n op.drop_constraint(None, 'Gifs', type_='foreignkey')\n op.drop_column('Gifs', 'personal_collections')\n # ### end Alembic commands ###\n",
"<docstring token>\nfrom alembic import op\nimport sqlalchemy as sa\nrevision = '43eabda1d630'\ndown_revision = '9cad4dfb5125'\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n op.add_column('Gifs', sa.Column('personal_collections', sa.Integer(),\n nullable=True))\n op.create_foreign_key(None, 'Gifs', 'PersonalGifCollections', [\n 'personal_collections'], ['id'])\n\n\ndef downgrade():\n op.drop_constraint(None, 'Gifs', type_='foreignkey')\n op.drop_column('Gifs', 'personal_collections')\n",
"<docstring token>\n<import token>\nrevision = '43eabda1d630'\ndown_revision = '9cad4dfb5125'\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n op.add_column('Gifs', sa.Column('personal_collections', sa.Integer(),\n nullable=True))\n op.create_foreign_key(None, 'Gifs', 'PersonalGifCollections', [\n 'personal_collections'], ['id'])\n\n\ndef downgrade():\n op.drop_constraint(None, 'Gifs', type_='foreignkey')\n op.drop_column('Gifs', 'personal_collections')\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\ndef upgrade():\n op.add_column('Gifs', sa.Column('personal_collections', sa.Integer(),\n nullable=True))\n op.create_foreign_key(None, 'Gifs', 'PersonalGifCollections', [\n 'personal_collections'], ['id'])\n\n\ndef downgrade():\n op.drop_constraint(None, 'Gifs', type_='foreignkey')\n op.drop_column('Gifs', 'personal_collections')\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\ndef upgrade():\n op.add_column('Gifs', sa.Column('personal_collections', sa.Integer(),\n nullable=True))\n op.create_foreign_key(None, 'Gifs', 'PersonalGifCollections', [\n 'personal_collections'], ['id'])\n\n\n<function token>\n",
"<docstring token>\n<import token>\n<assignment token>\n<function token>\n<function token>\n"
] | false |
817 |
7503a0c8f83ff0ce370ed7bce733b09d9a2c69c4
|
# -*- coding: utf-8 -*-
from selenium.webdriver.common.keys import Keys
from titan.components import Base
class Input(Base):
def clear(self):
element = self.driver.find_element_by_xpath(self.params['xpath'])
if self.params.get('clear', None):
element.clear()
return True
element.click()
space_num = self.params['space']if self.params.get('space', None) else 4
while space_num:
space_num -= 1
element.send_keys(Keys.BACK_SPACE)
def text(self):
print(self.params)
element = self.driver.find_element_by_xpath(self.params['xpath'])
element.send_keys(self.params['text'])
|
[
"# -*- coding: utf-8 -*-\nfrom selenium.webdriver.common.keys import Keys\nfrom titan.components import Base\n\n\nclass Input(Base):\n\n def clear(self):\n element = self.driver.find_element_by_xpath(self.params['xpath'])\n if self.params.get('clear', None):\n element.clear()\n return True\n\n element.click()\n space_num = self.params['space']if self.params.get('space', None) else 4\n while space_num:\n space_num -= 1\n element.send_keys(Keys.BACK_SPACE)\n\n def text(self):\n print(self.params)\n element = self.driver.find_element_by_xpath(self.params['xpath'])\n element.send_keys(self.params['text'])\n\n",
"from selenium.webdriver.common.keys import Keys\nfrom titan.components import Base\n\n\nclass Input(Base):\n\n def clear(self):\n element = self.driver.find_element_by_xpath(self.params['xpath'])\n if self.params.get('clear', None):\n element.clear()\n return True\n element.click()\n space_num = self.params['space'] if self.params.get('space', None\n ) else 4\n while space_num:\n space_num -= 1\n element.send_keys(Keys.BACK_SPACE)\n\n def text(self):\n print(self.params)\n element = self.driver.find_element_by_xpath(self.params['xpath'])\n element.send_keys(self.params['text'])\n",
"<import token>\n\n\nclass Input(Base):\n\n def clear(self):\n element = self.driver.find_element_by_xpath(self.params['xpath'])\n if self.params.get('clear', None):\n element.clear()\n return True\n element.click()\n space_num = self.params['space'] if self.params.get('space', None\n ) else 4\n while space_num:\n space_num -= 1\n element.send_keys(Keys.BACK_SPACE)\n\n def text(self):\n print(self.params)\n element = self.driver.find_element_by_xpath(self.params['xpath'])\n element.send_keys(self.params['text'])\n",
"<import token>\n\n\nclass Input(Base):\n <function token>\n\n def text(self):\n print(self.params)\n element = self.driver.find_element_by_xpath(self.params['xpath'])\n element.send_keys(self.params['text'])\n",
"<import token>\n\n\nclass Input(Base):\n <function token>\n <function token>\n",
"<import token>\n<class token>\n"
] | false |
818 |
b7738c27e11e9566d90157717633312031cdffd6
|
import sqlite3
class announcement:
def __init__(eps_df, revenue_df):
conn = sqlite3.connect("earnings.db", timeout=120)
cur = conn.cursor()
symbol_href = self.driver.find_element_by_class_name("lfkTWp")
symbol = symbol_href.text
eps_history_df = pd.read_sql(
'select * from estimize_eps where Symbol == "%s"' % symbol, conn
)
revenue_history_df = pd.read_sql("select * from estimize_revenue", conn)
price_history_df = pd.read_sql("select * from price_history", conn)
def get_combined_df(eps_df, revenue_df):
del eps_df["Historical Beat Rate"]
del revenue_df["Historical Beat Rate"]
date_reported_df = eps_df["Date Reported"].str.split(" ", n=1, expand=True)
date_reported_df = date_reported_df.rename(
columns={0: "Date Reported", 1: "Time Reported"}
)
date_reported_df["Date Reported"] = pd.to_datetime(
date_reported_df["Date Reported"]
)
eps_df["Date Reported"] = date_reported_df["Date Reported"]
eps_df["Time Reported"] = date_reported_df["Time Reported"]
date_reported_df = revenue_df["Date Reported"].str.split(" ", n=1, expand=True)
date_reported_df = date_reported_df.rename(
columns={0: "Date Reported", 1: "Time Reported"}
)
date_reported_df["Date Reported"] = pd.to_datetime(
date_reported_df["Date Reported"]
)
revenue_df["Date Reported"] = date_reported_df["Date Reported"]
revenue_df["Time Reported"] = date_reported_df["Time Reported"]
eps_df = eps_df.sort_values(by="Date Reported")
revenue_df = revenue_df.sort_values(by="Date Reported")
eps_df = eps_df.set_index(
["Date Reported", "Time Reported", "Symbol"], append=True, drop=True
)
revenue_df = revenue_df.set_index(
["Date Reported", "Time Reported", "Symbol"], append=True, drop=True
)
eps_df.columns = "EPS " + eps_df.columns
revenue_df.columns = "Revenue " + revenue_df.columns
df = eps_df.join(revenue_df)
return df
def get_historical_beat():
df["Historical EPS Beat Ratio"] = None
df["Historical EPS Beat Percent"] = None
for index, row in df.iterrows():
index_num, date_reported, time_reported, symbol = index
this_df = df[df.index.get_level_values("Symbol") == symbol]
beat_rate = this_df[
this_df.index.get_level_values("Date Reported") <= date_reported
].tail(8)
if len(beat_rate) >= 4:
beat_rate_ratio = len(beat_rate[beat_rate["EPS Surprise"] > 0]) / float(
len(beat_rate)
)
beat_rate_percent = beat_rate["EPS Surprise"] / beat_rate["EPS Actual"]
beat_rate_percent = beat_rate_percent.replace([np.inf, -np.inf], np.nan)
beat_rate_percent = beat_rate_percent.mean()
# TODO: Do the same for revenue
df.loc[index_num, ["Historical EPS Beat Ratio"]] = beat_rate_ratio
df.loc[index_num, ["Historical EPS Beat Percent"]] = beat_rate_percent
def get_average_change():
df["Average Change 5 Days"] = None
df["Average Abnormal Change 5 Days"] = None
df["Average Change 10 Days"] = None
df["Average Abnormal Change 10 Days"] = None
for index, row in df.iterrows():
index_num, date_reported, time_reported, symbol = index
returns_df = df[
df.index.get_level_values("Date Reported") < date_reported
].tail(8)
if len(returns_df) >= 4:
df.loc[index_num, ["Average Change 5 Days"]] = returns_df[
"5 Day Change"
].mean()
df.loc[index_num, ["Average Change 10 Days"]] = returns_df[
"10 Day Change"
].mean()
df.loc[index_num, ["Average Abnormal Change 5 Days"]] = returns_df[
"5 Day Change Abnormal"
].mean()
df.loc[index_num, ["Average Abnormal Change 10 Days"]] = returns_df[
"10 Day Change Abnormal"
].mean()
def get_YoY_growth():
df["YoY Growth"] = None
for index, row in df.iterrows():
index_num, date_reported, time_reported, symbol = index
time_reported = time_reported.replace("'", "")
quarter_numer, year = time_reported.split(" ")
this_df = df["EPS Actual"]
try:
this_quarter = this_df[
this_df.index.get_level_values("Time Reported")
== quarter_numer + " '" + year
].values[0]
last_quarter = this_df[
this_df.index.get_level_values("Time Reported")
== quarter_numer + " '" + str(int(year) - 1)
].values[0]
df.loc[index_num, ["YoY Growth"]] = (
this_quarter - last_quarter
) / last_quarter
except Exception as e:
pass
def get_market_cap():
finviz_page = r.get("https://finviz.com/quote.ashx?t=%s" % symbol)
soup = BeautifulSoup(finviz_page.text, features="lxml")
table_row = soup.findAll("tr", attrs={"class": "table-dark-row"})[1]
market_cap = table_row.text.replace("Market Cap", "").split("\n")[1]
if "K" in market_cap:
market_cap = float(market_cap[:-1]) * 1000
elif "M" in market_cap:
market_cap = float(market_cap[:-1]) * 1000000
elif "B" in market_cap:
market_cap = float(market_cap[:-1]) * 1000000000
market_cap = int(market_cap)
if market_cap > 10000000000:
market_cap_text = "Large"
elif market_cap > 2000000000:
market_cap_text = "Medium"
elif market_cap > 300000000:
market_cap_text = "Small"
elif market_cap > 50000000:
market_cap_text = "Micro"
else:
market_cap_text = "Nano"
df["Market Cap Text"] = market_cap_text
def get_estimize_data(self):
# request the estimize website for data
url = "https://www.estimize.com/calendar?tab=equity&date=" + datetime.now().strftime(
"%Y-%m-%d"
)
self.driver.get(url)
# check if there are no companies reporting earnings
myElem = WebDriverWait(self.driver, self.delay).until(
EC.presence_of_element_located((By.CLASS_NAME, "dAViVi"))
)
companies_reporting_div = self.driver.find_element_by_class_name("dAViVi")
if "0 Events" == companies_reporting_div.text.split("\n")[1]:
return
# method to extra the ticker symbols from the webpage
tickers = self.get_tickers()
# method to get the historical data from yahoo
# self.get_yahoo_historical(tickers)
# TODO: update price history table with missing yahoo price data entries
# read the table and make a dataframe out of it
eps_df = pd.read_html(self.driver.page_source)[0]
eps_df["Symbol"] = tickers
eps_df = eps_df.iloc[:, [2, 3, 5, 6, 7, 8, 9, 10, 12]]
eps_df.columns = [
"Date Reported",
"Num of Estimates",
"Delta",
"Surprise",
"Historical Beat Rate",
"Wall St",
"Estimize",
"Actual",
"Symbol",
]
# same as above, but for revenues table instead of EPS table
url = (
"https://www.estimize.com/calendar?tab=equity&metric=revenue&date="
+ self.read_date.strftime("%Y-%m-%d")
)
self.driver.get(url)
myElem = WebDriverWait(self.driver, self.delay).until(
EC.presence_of_element_located((By.TAG_NAME, "table"))
)
revenue_df = pd.read_html(self.driver.page_source)[0]
tickers = self.get_tickers()
revenue_df["Symbol"] = tickers
revenue_df = revenue_df.iloc[:, [2, 3, 5, 6, 7, 8, 9, 10, 12]]
revenue_df.columns = [
"Date Reported",
"Num of Estimates",
"Delta",
"Surprise",
"Historical Beat Rate",
"Wall St",
"Estimize",
"Actual",
"Symbol",
]
return eps_df, revenue_df
def get_tickers(self):
# extract ticker symbopls from the html source
soup = BeautifulSoup(self.driver.page_source, features="lxml")
ticker_links = soup.findAll("a", attrs={"class": "lfkTWp"})
# create list of symbols that were extracted
tickers = []
for ticker in ticker_links:
tickers.append(ticker.contents[0])
return tickers
|
[
"import sqlite3\n\n\nclass announcement:\n def __init__(eps_df, revenue_df):\n conn = sqlite3.connect(\"earnings.db\", timeout=120)\n cur = conn.cursor()\n\n symbol_href = self.driver.find_element_by_class_name(\"lfkTWp\")\n symbol = symbol_href.text\n\n eps_history_df = pd.read_sql(\n 'select * from estimize_eps where Symbol == \"%s\"' % symbol, conn\n )\n revenue_history_df = pd.read_sql(\"select * from estimize_revenue\", conn)\n price_history_df = pd.read_sql(\"select * from price_history\", conn)\n\n def get_combined_df(eps_df, revenue_df):\n del eps_df[\"Historical Beat Rate\"]\n del revenue_df[\"Historical Beat Rate\"]\n\n date_reported_df = eps_df[\"Date Reported\"].str.split(\" \", n=1, expand=True)\n date_reported_df = date_reported_df.rename(\n columns={0: \"Date Reported\", 1: \"Time Reported\"}\n )\n date_reported_df[\"Date Reported\"] = pd.to_datetime(\n date_reported_df[\"Date Reported\"]\n )\n eps_df[\"Date Reported\"] = date_reported_df[\"Date Reported\"]\n eps_df[\"Time Reported\"] = date_reported_df[\"Time Reported\"]\n\n date_reported_df = revenue_df[\"Date Reported\"].str.split(\" \", n=1, expand=True)\n date_reported_df = date_reported_df.rename(\n columns={0: \"Date Reported\", 1: \"Time Reported\"}\n )\n date_reported_df[\"Date Reported\"] = pd.to_datetime(\n date_reported_df[\"Date Reported\"]\n )\n revenue_df[\"Date Reported\"] = date_reported_df[\"Date Reported\"]\n revenue_df[\"Time Reported\"] = date_reported_df[\"Time Reported\"]\n\n eps_df = eps_df.sort_values(by=\"Date Reported\")\n revenue_df = revenue_df.sort_values(by=\"Date Reported\")\n\n eps_df = eps_df.set_index(\n [\"Date Reported\", \"Time Reported\", \"Symbol\"], append=True, drop=True\n )\n revenue_df = revenue_df.set_index(\n [\"Date Reported\", \"Time Reported\", \"Symbol\"], append=True, drop=True\n )\n\n eps_df.columns = \"EPS \" + eps_df.columns\n revenue_df.columns = \"Revenue \" + revenue_df.columns\n\n df = eps_df.join(revenue_df)\n\n return df\n\n def get_historical_beat():\n df[\"Historical EPS Beat Ratio\"] = None\n df[\"Historical EPS Beat Percent\"] = None\n for index, row in df.iterrows():\n index_num, date_reported, time_reported, symbol = index\n\n this_df = df[df.index.get_level_values(\"Symbol\") == symbol]\n beat_rate = this_df[\n this_df.index.get_level_values(\"Date Reported\") <= date_reported\n ].tail(8)\n\n if len(beat_rate) >= 4:\n beat_rate_ratio = len(beat_rate[beat_rate[\"EPS Surprise\"] > 0]) / float(\n len(beat_rate)\n )\n beat_rate_percent = beat_rate[\"EPS Surprise\"] / beat_rate[\"EPS Actual\"]\n beat_rate_percent = beat_rate_percent.replace([np.inf, -np.inf], np.nan)\n beat_rate_percent = beat_rate_percent.mean()\n\n # TODO: Do the same for revenue\n df.loc[index_num, [\"Historical EPS Beat Ratio\"]] = beat_rate_ratio\n df.loc[index_num, [\"Historical EPS Beat Percent\"]] = beat_rate_percent\n\n def get_average_change():\n df[\"Average Change 5 Days\"] = None\n df[\"Average Abnormal Change 5 Days\"] = None\n df[\"Average Change 10 Days\"] = None\n df[\"Average Abnormal Change 10 Days\"] = None\n for index, row in df.iterrows():\n index_num, date_reported, time_reported, symbol = index\n\n returns_df = df[\n df.index.get_level_values(\"Date Reported\") < date_reported\n ].tail(8)\n\n if len(returns_df) >= 4:\n df.loc[index_num, [\"Average Change 5 Days\"]] = returns_df[\n \"5 Day Change\"\n ].mean()\n df.loc[index_num, [\"Average Change 10 Days\"]] = returns_df[\n \"10 Day Change\"\n ].mean()\n df.loc[index_num, [\"Average Abnormal Change 5 Days\"]] = returns_df[\n \"5 Day Change Abnormal\"\n ].mean()\n df.loc[index_num, [\"Average Abnormal Change 10 Days\"]] = returns_df[\n \"10 Day Change Abnormal\"\n ].mean()\n\n def get_YoY_growth():\n df[\"YoY Growth\"] = None\n for index, row in df.iterrows():\n index_num, date_reported, time_reported, symbol = index\n time_reported = time_reported.replace(\"'\", \"\")\n quarter_numer, year = time_reported.split(\" \")\n\n this_df = df[\"EPS Actual\"]\n try:\n this_quarter = this_df[\n this_df.index.get_level_values(\"Time Reported\")\n == quarter_numer + \" '\" + year\n ].values[0]\n last_quarter = this_df[\n this_df.index.get_level_values(\"Time Reported\")\n == quarter_numer + \" '\" + str(int(year) - 1)\n ].values[0]\n df.loc[index_num, [\"YoY Growth\"]] = (\n this_quarter - last_quarter\n ) / last_quarter\n except Exception as e:\n pass\n\n def get_market_cap():\n finviz_page = r.get(\"https://finviz.com/quote.ashx?t=%s\" % symbol)\n\n soup = BeautifulSoup(finviz_page.text, features=\"lxml\")\n table_row = soup.findAll(\"tr\", attrs={\"class\": \"table-dark-row\"})[1]\n market_cap = table_row.text.replace(\"Market Cap\", \"\").split(\"\\n\")[1]\n if \"K\" in market_cap:\n market_cap = float(market_cap[:-1]) * 1000\n elif \"M\" in market_cap:\n market_cap = float(market_cap[:-1]) * 1000000\n elif \"B\" in market_cap:\n market_cap = float(market_cap[:-1]) * 1000000000\n\n market_cap = int(market_cap)\n if market_cap > 10000000000:\n market_cap_text = \"Large\"\n elif market_cap > 2000000000:\n market_cap_text = \"Medium\"\n elif market_cap > 300000000:\n market_cap_text = \"Small\"\n elif market_cap > 50000000:\n market_cap_text = \"Micro\"\n else:\n market_cap_text = \"Nano\"\n\n df[\"Market Cap Text\"] = market_cap_text\n\n\ndef get_estimize_data(self):\n # request the estimize website for data\n url = \"https://www.estimize.com/calendar?tab=equity&date=\" + datetime.now().strftime(\n \"%Y-%m-%d\"\n )\n self.driver.get(url)\n\n # check if there are no companies reporting earnings\n myElem = WebDriverWait(self.driver, self.delay).until(\n EC.presence_of_element_located((By.CLASS_NAME, \"dAViVi\"))\n )\n companies_reporting_div = self.driver.find_element_by_class_name(\"dAViVi\")\n if \"0 Events\" == companies_reporting_div.text.split(\"\\n\")[1]:\n return\n\n # method to extra the ticker symbols from the webpage\n tickers = self.get_tickers()\n\n # method to get the historical data from yahoo\n # self.get_yahoo_historical(tickers)\n # TODO: update price history table with missing yahoo price data entries\n\n # read the table and make a dataframe out of it\n eps_df = pd.read_html(self.driver.page_source)[0]\n eps_df[\"Symbol\"] = tickers\n eps_df = eps_df.iloc[:, [2, 3, 5, 6, 7, 8, 9, 10, 12]]\n eps_df.columns = [\n \"Date Reported\",\n \"Num of Estimates\",\n \"Delta\",\n \"Surprise\",\n \"Historical Beat Rate\",\n \"Wall St\",\n \"Estimize\",\n \"Actual\",\n \"Symbol\",\n ]\n\n # same as above, but for revenues table instead of EPS table\n url = (\n \"https://www.estimize.com/calendar?tab=equity&metric=revenue&date=\"\n + self.read_date.strftime(\"%Y-%m-%d\")\n )\n self.driver.get(url)\n myElem = WebDriverWait(self.driver, self.delay).until(\n EC.presence_of_element_located((By.TAG_NAME, \"table\"))\n )\n\n revenue_df = pd.read_html(self.driver.page_source)[0]\n tickers = self.get_tickers()\n revenue_df[\"Symbol\"] = tickers\n revenue_df = revenue_df.iloc[:, [2, 3, 5, 6, 7, 8, 9, 10, 12]]\n revenue_df.columns = [\n \"Date Reported\",\n \"Num of Estimates\",\n \"Delta\",\n \"Surprise\",\n \"Historical Beat Rate\",\n \"Wall St\",\n \"Estimize\",\n \"Actual\",\n \"Symbol\",\n ]\n\n return eps_df, revenue_df\n\n\ndef get_tickers(self):\n # extract ticker symbopls from the html source\n soup = BeautifulSoup(self.driver.page_source, features=\"lxml\")\n ticker_links = soup.findAll(\"a\", attrs={\"class\": \"lfkTWp\"})\n\n # create list of symbols that were extracted\n tickers = []\n for ticker in ticker_links:\n tickers.append(ticker.contents[0])\n\n return tickers\n",
"import sqlite3\n\n\nclass announcement:\n\n def __init__(eps_df, revenue_df):\n conn = sqlite3.connect('earnings.db', timeout=120)\n cur = conn.cursor()\n symbol_href = self.driver.find_element_by_class_name('lfkTWp')\n symbol = symbol_href.text\n eps_history_df = pd.read_sql(\n 'select * from estimize_eps where Symbol == \"%s\"' % symbol, conn)\n revenue_history_df = pd.read_sql('select * from estimize_revenue', conn\n )\n price_history_df = pd.read_sql('select * from price_history', conn)\n\n def get_combined_df(eps_df, revenue_df):\n del eps_df['Historical Beat Rate']\n del revenue_df['Historical Beat Rate']\n date_reported_df = eps_df['Date Reported'].str.split(' ', n=1,\n expand=True)\n date_reported_df = date_reported_df.rename(columns={(0):\n 'Date Reported', (1): 'Time Reported'})\n date_reported_df['Date Reported'] = pd.to_datetime(date_reported_df\n ['Date Reported'])\n eps_df['Date Reported'] = date_reported_df['Date Reported']\n eps_df['Time Reported'] = date_reported_df['Time Reported']\n date_reported_df = revenue_df['Date Reported'].str.split(' ', n=1,\n expand=True)\n date_reported_df = date_reported_df.rename(columns={(0):\n 'Date Reported', (1): 'Time Reported'})\n date_reported_df['Date Reported'] = pd.to_datetime(date_reported_df\n ['Date Reported'])\n revenue_df['Date Reported'] = date_reported_df['Date Reported']\n revenue_df['Time Reported'] = date_reported_df['Time Reported']\n eps_df = eps_df.sort_values(by='Date Reported')\n revenue_df = revenue_df.sort_values(by='Date Reported')\n eps_df = eps_df.set_index(['Date Reported', 'Time Reported',\n 'Symbol'], append=True, drop=True)\n revenue_df = revenue_df.set_index(['Date Reported', 'Time Reported',\n 'Symbol'], append=True, drop=True)\n eps_df.columns = 'EPS ' + eps_df.columns\n revenue_df.columns = 'Revenue ' + revenue_df.columns\n df = eps_df.join(revenue_df)\n return df\n\n def get_historical_beat():\n df['Historical EPS Beat Ratio'] = None\n df['Historical EPS Beat Percent'] = None\n for index, row in df.iterrows():\n index_num, date_reported, time_reported, symbol = index\n this_df = df[df.index.get_level_values('Symbol') == symbol]\n beat_rate = this_df[this_df.index.get_level_values(\n 'Date Reported') <= date_reported].tail(8)\n if len(beat_rate) >= 4:\n beat_rate_ratio = len(beat_rate[beat_rate['EPS Surprise'] > 0]\n ) / float(len(beat_rate))\n beat_rate_percent = beat_rate['EPS Surprise'] / beat_rate[\n 'EPS Actual']\n beat_rate_percent = beat_rate_percent.replace([np.inf, -np.\n inf], np.nan)\n beat_rate_percent = beat_rate_percent.mean()\n df.loc[index_num, ['Historical EPS Beat Ratio']\n ] = beat_rate_ratio\n df.loc[index_num, ['Historical EPS Beat Percent']\n ] = beat_rate_percent\n\n def get_average_change():\n df['Average Change 5 Days'] = None\n df['Average Abnormal Change 5 Days'] = None\n df['Average Change 10 Days'] = None\n df['Average Abnormal Change 10 Days'] = None\n for index, row in df.iterrows():\n index_num, date_reported, time_reported, symbol = index\n returns_df = df[df.index.get_level_values('Date Reported') <\n date_reported].tail(8)\n if len(returns_df) >= 4:\n df.loc[index_num, ['Average Change 5 Days']] = returns_df[\n '5 Day Change'].mean()\n df.loc[index_num, ['Average Change 10 Days']] = returns_df[\n '10 Day Change'].mean()\n df.loc[index_num, ['Average Abnormal Change 5 Days']\n ] = returns_df['5 Day Change Abnormal'].mean()\n df.loc[index_num, ['Average Abnormal Change 10 Days']\n ] = returns_df['10 Day Change Abnormal'].mean()\n\n def get_YoY_growth():\n df['YoY Growth'] = None\n for index, row in df.iterrows():\n index_num, date_reported, time_reported, symbol = index\n time_reported = time_reported.replace(\"'\", '')\n quarter_numer, year = time_reported.split(' ')\n this_df = df['EPS Actual']\n try:\n this_quarter = this_df[this_df.index.get_level_values(\n 'Time Reported') == quarter_numer + \" '\" + year].values[0]\n last_quarter = this_df[this_df.index.get_level_values(\n 'Time Reported') == quarter_numer + \" '\" + str(int(year\n ) - 1)].values[0]\n df.loc[index_num, ['YoY Growth']] = (this_quarter -\n last_quarter) / last_quarter\n except Exception as e:\n pass\n\n def get_market_cap():\n finviz_page = r.get('https://finviz.com/quote.ashx?t=%s' % symbol)\n soup = BeautifulSoup(finviz_page.text, features='lxml')\n table_row = soup.findAll('tr', attrs={'class': 'table-dark-row'})[1]\n market_cap = table_row.text.replace('Market Cap', '').split('\\n')[1]\n if 'K' in market_cap:\n market_cap = float(market_cap[:-1]) * 1000\n elif 'M' in market_cap:\n market_cap = float(market_cap[:-1]) * 1000000\n elif 'B' in market_cap:\n market_cap = float(market_cap[:-1]) * 1000000000\n market_cap = int(market_cap)\n if market_cap > 10000000000:\n market_cap_text = 'Large'\n elif market_cap > 2000000000:\n market_cap_text = 'Medium'\n elif market_cap > 300000000:\n market_cap_text = 'Small'\n elif market_cap > 50000000:\n market_cap_text = 'Micro'\n else:\n market_cap_text = 'Nano'\n df['Market Cap Text'] = market_cap_text\n\n\ndef get_estimize_data(self):\n url = 'https://www.estimize.com/calendar?tab=equity&date=' + datetime.now(\n ).strftime('%Y-%m-%d')\n self.driver.get(url)\n myElem = WebDriverWait(self.driver, self.delay).until(EC.\n presence_of_element_located((By.CLASS_NAME, 'dAViVi')))\n companies_reporting_div = self.driver.find_element_by_class_name('dAViVi')\n if '0 Events' == companies_reporting_div.text.split('\\n')[1]:\n return\n tickers = self.get_tickers()\n eps_df = pd.read_html(self.driver.page_source)[0]\n eps_df['Symbol'] = tickers\n eps_df = eps_df.iloc[:, [2, 3, 5, 6, 7, 8, 9, 10, 12]]\n eps_df.columns = ['Date Reported', 'Num of Estimates', 'Delta',\n 'Surprise', 'Historical Beat Rate', 'Wall St', 'Estimize', 'Actual',\n 'Symbol']\n url = (\n 'https://www.estimize.com/calendar?tab=equity&metric=revenue&date=' +\n self.read_date.strftime('%Y-%m-%d'))\n self.driver.get(url)\n myElem = WebDriverWait(self.driver, self.delay).until(EC.\n presence_of_element_located((By.TAG_NAME, 'table')))\n revenue_df = pd.read_html(self.driver.page_source)[0]\n tickers = self.get_tickers()\n revenue_df['Symbol'] = tickers\n revenue_df = revenue_df.iloc[:, [2, 3, 5, 6, 7, 8, 9, 10, 12]]\n revenue_df.columns = ['Date Reported', 'Num of Estimates', 'Delta',\n 'Surprise', 'Historical Beat Rate', 'Wall St', 'Estimize', 'Actual',\n 'Symbol']\n return eps_df, revenue_df\n\n\ndef get_tickers(self):\n soup = BeautifulSoup(self.driver.page_source, features='lxml')\n ticker_links = soup.findAll('a', attrs={'class': 'lfkTWp'})\n tickers = []\n for ticker in ticker_links:\n tickers.append(ticker.contents[0])\n return tickers\n",
"<import token>\n\n\nclass announcement:\n\n def __init__(eps_df, revenue_df):\n conn = sqlite3.connect('earnings.db', timeout=120)\n cur = conn.cursor()\n symbol_href = self.driver.find_element_by_class_name('lfkTWp')\n symbol = symbol_href.text\n eps_history_df = pd.read_sql(\n 'select * from estimize_eps where Symbol == \"%s\"' % symbol, conn)\n revenue_history_df = pd.read_sql('select * from estimize_revenue', conn\n )\n price_history_df = pd.read_sql('select * from price_history', conn)\n\n def get_combined_df(eps_df, revenue_df):\n del eps_df['Historical Beat Rate']\n del revenue_df['Historical Beat Rate']\n date_reported_df = eps_df['Date Reported'].str.split(' ', n=1,\n expand=True)\n date_reported_df = date_reported_df.rename(columns={(0):\n 'Date Reported', (1): 'Time Reported'})\n date_reported_df['Date Reported'] = pd.to_datetime(date_reported_df\n ['Date Reported'])\n eps_df['Date Reported'] = date_reported_df['Date Reported']\n eps_df['Time Reported'] = date_reported_df['Time Reported']\n date_reported_df = revenue_df['Date Reported'].str.split(' ', n=1,\n expand=True)\n date_reported_df = date_reported_df.rename(columns={(0):\n 'Date Reported', (1): 'Time Reported'})\n date_reported_df['Date Reported'] = pd.to_datetime(date_reported_df\n ['Date Reported'])\n revenue_df['Date Reported'] = date_reported_df['Date Reported']\n revenue_df['Time Reported'] = date_reported_df['Time Reported']\n eps_df = eps_df.sort_values(by='Date Reported')\n revenue_df = revenue_df.sort_values(by='Date Reported')\n eps_df = eps_df.set_index(['Date Reported', 'Time Reported',\n 'Symbol'], append=True, drop=True)\n revenue_df = revenue_df.set_index(['Date Reported', 'Time Reported',\n 'Symbol'], append=True, drop=True)\n eps_df.columns = 'EPS ' + eps_df.columns\n revenue_df.columns = 'Revenue ' + revenue_df.columns\n df = eps_df.join(revenue_df)\n return df\n\n def get_historical_beat():\n df['Historical EPS Beat Ratio'] = None\n df['Historical EPS Beat Percent'] = None\n for index, row in df.iterrows():\n index_num, date_reported, time_reported, symbol = index\n this_df = df[df.index.get_level_values('Symbol') == symbol]\n beat_rate = this_df[this_df.index.get_level_values(\n 'Date Reported') <= date_reported].tail(8)\n if len(beat_rate) >= 4:\n beat_rate_ratio = len(beat_rate[beat_rate['EPS Surprise'] > 0]\n ) / float(len(beat_rate))\n beat_rate_percent = beat_rate['EPS Surprise'] / beat_rate[\n 'EPS Actual']\n beat_rate_percent = beat_rate_percent.replace([np.inf, -np.\n inf], np.nan)\n beat_rate_percent = beat_rate_percent.mean()\n df.loc[index_num, ['Historical EPS Beat Ratio']\n ] = beat_rate_ratio\n df.loc[index_num, ['Historical EPS Beat Percent']\n ] = beat_rate_percent\n\n def get_average_change():\n df['Average Change 5 Days'] = None\n df['Average Abnormal Change 5 Days'] = None\n df['Average Change 10 Days'] = None\n df['Average Abnormal Change 10 Days'] = None\n for index, row in df.iterrows():\n index_num, date_reported, time_reported, symbol = index\n returns_df = df[df.index.get_level_values('Date Reported') <\n date_reported].tail(8)\n if len(returns_df) >= 4:\n df.loc[index_num, ['Average Change 5 Days']] = returns_df[\n '5 Day Change'].mean()\n df.loc[index_num, ['Average Change 10 Days']] = returns_df[\n '10 Day Change'].mean()\n df.loc[index_num, ['Average Abnormal Change 5 Days']\n ] = returns_df['5 Day Change Abnormal'].mean()\n df.loc[index_num, ['Average Abnormal Change 10 Days']\n ] = returns_df['10 Day Change Abnormal'].mean()\n\n def get_YoY_growth():\n df['YoY Growth'] = None\n for index, row in df.iterrows():\n index_num, date_reported, time_reported, symbol = index\n time_reported = time_reported.replace(\"'\", '')\n quarter_numer, year = time_reported.split(' ')\n this_df = df['EPS Actual']\n try:\n this_quarter = this_df[this_df.index.get_level_values(\n 'Time Reported') == quarter_numer + \" '\" + year].values[0]\n last_quarter = this_df[this_df.index.get_level_values(\n 'Time Reported') == quarter_numer + \" '\" + str(int(year\n ) - 1)].values[0]\n df.loc[index_num, ['YoY Growth']] = (this_quarter -\n last_quarter) / last_quarter\n except Exception as e:\n pass\n\n def get_market_cap():\n finviz_page = r.get('https://finviz.com/quote.ashx?t=%s' % symbol)\n soup = BeautifulSoup(finviz_page.text, features='lxml')\n table_row = soup.findAll('tr', attrs={'class': 'table-dark-row'})[1]\n market_cap = table_row.text.replace('Market Cap', '').split('\\n')[1]\n if 'K' in market_cap:\n market_cap = float(market_cap[:-1]) * 1000\n elif 'M' in market_cap:\n market_cap = float(market_cap[:-1]) * 1000000\n elif 'B' in market_cap:\n market_cap = float(market_cap[:-1]) * 1000000000\n market_cap = int(market_cap)\n if market_cap > 10000000000:\n market_cap_text = 'Large'\n elif market_cap > 2000000000:\n market_cap_text = 'Medium'\n elif market_cap > 300000000:\n market_cap_text = 'Small'\n elif market_cap > 50000000:\n market_cap_text = 'Micro'\n else:\n market_cap_text = 'Nano'\n df['Market Cap Text'] = market_cap_text\n\n\ndef get_estimize_data(self):\n url = 'https://www.estimize.com/calendar?tab=equity&date=' + datetime.now(\n ).strftime('%Y-%m-%d')\n self.driver.get(url)\n myElem = WebDriverWait(self.driver, self.delay).until(EC.\n presence_of_element_located((By.CLASS_NAME, 'dAViVi')))\n companies_reporting_div = self.driver.find_element_by_class_name('dAViVi')\n if '0 Events' == companies_reporting_div.text.split('\\n')[1]:\n return\n tickers = self.get_tickers()\n eps_df = pd.read_html(self.driver.page_source)[0]\n eps_df['Symbol'] = tickers\n eps_df = eps_df.iloc[:, [2, 3, 5, 6, 7, 8, 9, 10, 12]]\n eps_df.columns = ['Date Reported', 'Num of Estimates', 'Delta',\n 'Surprise', 'Historical Beat Rate', 'Wall St', 'Estimize', 'Actual',\n 'Symbol']\n url = (\n 'https://www.estimize.com/calendar?tab=equity&metric=revenue&date=' +\n self.read_date.strftime('%Y-%m-%d'))\n self.driver.get(url)\n myElem = WebDriverWait(self.driver, self.delay).until(EC.\n presence_of_element_located((By.TAG_NAME, 'table')))\n revenue_df = pd.read_html(self.driver.page_source)[0]\n tickers = self.get_tickers()\n revenue_df['Symbol'] = tickers\n revenue_df = revenue_df.iloc[:, [2, 3, 5, 6, 7, 8, 9, 10, 12]]\n revenue_df.columns = ['Date Reported', 'Num of Estimates', 'Delta',\n 'Surprise', 'Historical Beat Rate', 'Wall St', 'Estimize', 'Actual',\n 'Symbol']\n return eps_df, revenue_df\n\n\ndef get_tickers(self):\n soup = BeautifulSoup(self.driver.page_source, features='lxml')\n ticker_links = soup.findAll('a', attrs={'class': 'lfkTWp'})\n tickers = []\n for ticker in ticker_links:\n tickers.append(ticker.contents[0])\n return tickers\n",
"<import token>\n\n\nclass announcement:\n\n def __init__(eps_df, revenue_df):\n conn = sqlite3.connect('earnings.db', timeout=120)\n cur = conn.cursor()\n symbol_href = self.driver.find_element_by_class_name('lfkTWp')\n symbol = symbol_href.text\n eps_history_df = pd.read_sql(\n 'select * from estimize_eps where Symbol == \"%s\"' % symbol, conn)\n revenue_history_df = pd.read_sql('select * from estimize_revenue', conn\n )\n price_history_df = pd.read_sql('select * from price_history', conn)\n\n def get_combined_df(eps_df, revenue_df):\n del eps_df['Historical Beat Rate']\n del revenue_df['Historical Beat Rate']\n date_reported_df = eps_df['Date Reported'].str.split(' ', n=1,\n expand=True)\n date_reported_df = date_reported_df.rename(columns={(0):\n 'Date Reported', (1): 'Time Reported'})\n date_reported_df['Date Reported'] = pd.to_datetime(date_reported_df\n ['Date Reported'])\n eps_df['Date Reported'] = date_reported_df['Date Reported']\n eps_df['Time Reported'] = date_reported_df['Time Reported']\n date_reported_df = revenue_df['Date Reported'].str.split(' ', n=1,\n expand=True)\n date_reported_df = date_reported_df.rename(columns={(0):\n 'Date Reported', (1): 'Time Reported'})\n date_reported_df['Date Reported'] = pd.to_datetime(date_reported_df\n ['Date Reported'])\n revenue_df['Date Reported'] = date_reported_df['Date Reported']\n revenue_df['Time Reported'] = date_reported_df['Time Reported']\n eps_df = eps_df.sort_values(by='Date Reported')\n revenue_df = revenue_df.sort_values(by='Date Reported')\n eps_df = eps_df.set_index(['Date Reported', 'Time Reported',\n 'Symbol'], append=True, drop=True)\n revenue_df = revenue_df.set_index(['Date Reported', 'Time Reported',\n 'Symbol'], append=True, drop=True)\n eps_df.columns = 'EPS ' + eps_df.columns\n revenue_df.columns = 'Revenue ' + revenue_df.columns\n df = eps_df.join(revenue_df)\n return df\n\n def get_historical_beat():\n df['Historical EPS Beat Ratio'] = None\n df['Historical EPS Beat Percent'] = None\n for index, row in df.iterrows():\n index_num, date_reported, time_reported, symbol = index\n this_df = df[df.index.get_level_values('Symbol') == symbol]\n beat_rate = this_df[this_df.index.get_level_values(\n 'Date Reported') <= date_reported].tail(8)\n if len(beat_rate) >= 4:\n beat_rate_ratio = len(beat_rate[beat_rate['EPS Surprise'] > 0]\n ) / float(len(beat_rate))\n beat_rate_percent = beat_rate['EPS Surprise'] / beat_rate[\n 'EPS Actual']\n beat_rate_percent = beat_rate_percent.replace([np.inf, -np.\n inf], np.nan)\n beat_rate_percent = beat_rate_percent.mean()\n df.loc[index_num, ['Historical EPS Beat Ratio']\n ] = beat_rate_ratio\n df.loc[index_num, ['Historical EPS Beat Percent']\n ] = beat_rate_percent\n\n def get_average_change():\n df['Average Change 5 Days'] = None\n df['Average Abnormal Change 5 Days'] = None\n df['Average Change 10 Days'] = None\n df['Average Abnormal Change 10 Days'] = None\n for index, row in df.iterrows():\n index_num, date_reported, time_reported, symbol = index\n returns_df = df[df.index.get_level_values('Date Reported') <\n date_reported].tail(8)\n if len(returns_df) >= 4:\n df.loc[index_num, ['Average Change 5 Days']] = returns_df[\n '5 Day Change'].mean()\n df.loc[index_num, ['Average Change 10 Days']] = returns_df[\n '10 Day Change'].mean()\n df.loc[index_num, ['Average Abnormal Change 5 Days']\n ] = returns_df['5 Day Change Abnormal'].mean()\n df.loc[index_num, ['Average Abnormal Change 10 Days']\n ] = returns_df['10 Day Change Abnormal'].mean()\n\n def get_YoY_growth():\n df['YoY Growth'] = None\n for index, row in df.iterrows():\n index_num, date_reported, time_reported, symbol = index\n time_reported = time_reported.replace(\"'\", '')\n quarter_numer, year = time_reported.split(' ')\n this_df = df['EPS Actual']\n try:\n this_quarter = this_df[this_df.index.get_level_values(\n 'Time Reported') == quarter_numer + \" '\" + year].values[0]\n last_quarter = this_df[this_df.index.get_level_values(\n 'Time Reported') == quarter_numer + \" '\" + str(int(year\n ) - 1)].values[0]\n df.loc[index_num, ['YoY Growth']] = (this_quarter -\n last_quarter) / last_quarter\n except Exception as e:\n pass\n\n def get_market_cap():\n finviz_page = r.get('https://finviz.com/quote.ashx?t=%s' % symbol)\n soup = BeautifulSoup(finviz_page.text, features='lxml')\n table_row = soup.findAll('tr', attrs={'class': 'table-dark-row'})[1]\n market_cap = table_row.text.replace('Market Cap', '').split('\\n')[1]\n if 'K' in market_cap:\n market_cap = float(market_cap[:-1]) * 1000\n elif 'M' in market_cap:\n market_cap = float(market_cap[:-1]) * 1000000\n elif 'B' in market_cap:\n market_cap = float(market_cap[:-1]) * 1000000000\n market_cap = int(market_cap)\n if market_cap > 10000000000:\n market_cap_text = 'Large'\n elif market_cap > 2000000000:\n market_cap_text = 'Medium'\n elif market_cap > 300000000:\n market_cap_text = 'Small'\n elif market_cap > 50000000:\n market_cap_text = 'Micro'\n else:\n market_cap_text = 'Nano'\n df['Market Cap Text'] = market_cap_text\n\n\n<function token>\n\n\ndef get_tickers(self):\n soup = BeautifulSoup(self.driver.page_source, features='lxml')\n ticker_links = soup.findAll('a', attrs={'class': 'lfkTWp'})\n tickers = []\n for ticker in ticker_links:\n tickers.append(ticker.contents[0])\n return tickers\n",
"<import token>\n\n\nclass announcement:\n\n def __init__(eps_df, revenue_df):\n conn = sqlite3.connect('earnings.db', timeout=120)\n cur = conn.cursor()\n symbol_href = self.driver.find_element_by_class_name('lfkTWp')\n symbol = symbol_href.text\n eps_history_df = pd.read_sql(\n 'select * from estimize_eps where Symbol == \"%s\"' % symbol, conn)\n revenue_history_df = pd.read_sql('select * from estimize_revenue', conn\n )\n price_history_df = pd.read_sql('select * from price_history', conn)\n\n def get_combined_df(eps_df, revenue_df):\n del eps_df['Historical Beat Rate']\n del revenue_df['Historical Beat Rate']\n date_reported_df = eps_df['Date Reported'].str.split(' ', n=1,\n expand=True)\n date_reported_df = date_reported_df.rename(columns={(0):\n 'Date Reported', (1): 'Time Reported'})\n date_reported_df['Date Reported'] = pd.to_datetime(date_reported_df\n ['Date Reported'])\n eps_df['Date Reported'] = date_reported_df['Date Reported']\n eps_df['Time Reported'] = date_reported_df['Time Reported']\n date_reported_df = revenue_df['Date Reported'].str.split(' ', n=1,\n expand=True)\n date_reported_df = date_reported_df.rename(columns={(0):\n 'Date Reported', (1): 'Time Reported'})\n date_reported_df['Date Reported'] = pd.to_datetime(date_reported_df\n ['Date Reported'])\n revenue_df['Date Reported'] = date_reported_df['Date Reported']\n revenue_df['Time Reported'] = date_reported_df['Time Reported']\n eps_df = eps_df.sort_values(by='Date Reported')\n revenue_df = revenue_df.sort_values(by='Date Reported')\n eps_df = eps_df.set_index(['Date Reported', 'Time Reported',\n 'Symbol'], append=True, drop=True)\n revenue_df = revenue_df.set_index(['Date Reported', 'Time Reported',\n 'Symbol'], append=True, drop=True)\n eps_df.columns = 'EPS ' + eps_df.columns\n revenue_df.columns = 'Revenue ' + revenue_df.columns\n df = eps_df.join(revenue_df)\n return df\n\n def get_historical_beat():\n df['Historical EPS Beat Ratio'] = None\n df['Historical EPS Beat Percent'] = None\n for index, row in df.iterrows():\n index_num, date_reported, time_reported, symbol = index\n this_df = df[df.index.get_level_values('Symbol') == symbol]\n beat_rate = this_df[this_df.index.get_level_values(\n 'Date Reported') <= date_reported].tail(8)\n if len(beat_rate) >= 4:\n beat_rate_ratio = len(beat_rate[beat_rate['EPS Surprise'] > 0]\n ) / float(len(beat_rate))\n beat_rate_percent = beat_rate['EPS Surprise'] / beat_rate[\n 'EPS Actual']\n beat_rate_percent = beat_rate_percent.replace([np.inf, -np.\n inf], np.nan)\n beat_rate_percent = beat_rate_percent.mean()\n df.loc[index_num, ['Historical EPS Beat Ratio']\n ] = beat_rate_ratio\n df.loc[index_num, ['Historical EPS Beat Percent']\n ] = beat_rate_percent\n\n def get_average_change():\n df['Average Change 5 Days'] = None\n df['Average Abnormal Change 5 Days'] = None\n df['Average Change 10 Days'] = None\n df['Average Abnormal Change 10 Days'] = None\n for index, row in df.iterrows():\n index_num, date_reported, time_reported, symbol = index\n returns_df = df[df.index.get_level_values('Date Reported') <\n date_reported].tail(8)\n if len(returns_df) >= 4:\n df.loc[index_num, ['Average Change 5 Days']] = returns_df[\n '5 Day Change'].mean()\n df.loc[index_num, ['Average Change 10 Days']] = returns_df[\n '10 Day Change'].mean()\n df.loc[index_num, ['Average Abnormal Change 5 Days']\n ] = returns_df['5 Day Change Abnormal'].mean()\n df.loc[index_num, ['Average Abnormal Change 10 Days']\n ] = returns_df['10 Day Change Abnormal'].mean()\n\n def get_YoY_growth():\n df['YoY Growth'] = None\n for index, row in df.iterrows():\n index_num, date_reported, time_reported, symbol = index\n time_reported = time_reported.replace(\"'\", '')\n quarter_numer, year = time_reported.split(' ')\n this_df = df['EPS Actual']\n try:\n this_quarter = this_df[this_df.index.get_level_values(\n 'Time Reported') == quarter_numer + \" '\" + year].values[0]\n last_quarter = this_df[this_df.index.get_level_values(\n 'Time Reported') == quarter_numer + \" '\" + str(int(year\n ) - 1)].values[0]\n df.loc[index_num, ['YoY Growth']] = (this_quarter -\n last_quarter) / last_quarter\n except Exception as e:\n pass\n\n def get_market_cap():\n finviz_page = r.get('https://finviz.com/quote.ashx?t=%s' % symbol)\n soup = BeautifulSoup(finviz_page.text, features='lxml')\n table_row = soup.findAll('tr', attrs={'class': 'table-dark-row'})[1]\n market_cap = table_row.text.replace('Market Cap', '').split('\\n')[1]\n if 'K' in market_cap:\n market_cap = float(market_cap[:-1]) * 1000\n elif 'M' in market_cap:\n market_cap = float(market_cap[:-1]) * 1000000\n elif 'B' in market_cap:\n market_cap = float(market_cap[:-1]) * 1000000000\n market_cap = int(market_cap)\n if market_cap > 10000000000:\n market_cap_text = 'Large'\n elif market_cap > 2000000000:\n market_cap_text = 'Medium'\n elif market_cap > 300000000:\n market_cap_text = 'Small'\n elif market_cap > 50000000:\n market_cap_text = 'Micro'\n else:\n market_cap_text = 'Nano'\n df['Market Cap Text'] = market_cap_text\n\n\n<function token>\n<function token>\n",
"<import token>\n\n\nclass announcement:\n\n def __init__(eps_df, revenue_df):\n conn = sqlite3.connect('earnings.db', timeout=120)\n cur = conn.cursor()\n symbol_href = self.driver.find_element_by_class_name('lfkTWp')\n symbol = symbol_href.text\n eps_history_df = pd.read_sql(\n 'select * from estimize_eps where Symbol == \"%s\"' % symbol, conn)\n revenue_history_df = pd.read_sql('select * from estimize_revenue', conn\n )\n price_history_df = pd.read_sql('select * from price_history', conn)\n\n def get_combined_df(eps_df, revenue_df):\n del eps_df['Historical Beat Rate']\n del revenue_df['Historical Beat Rate']\n date_reported_df = eps_df['Date Reported'].str.split(' ', n=1,\n expand=True)\n date_reported_df = date_reported_df.rename(columns={(0):\n 'Date Reported', (1): 'Time Reported'})\n date_reported_df['Date Reported'] = pd.to_datetime(date_reported_df\n ['Date Reported'])\n eps_df['Date Reported'] = date_reported_df['Date Reported']\n eps_df['Time Reported'] = date_reported_df['Time Reported']\n date_reported_df = revenue_df['Date Reported'].str.split(' ', n=1,\n expand=True)\n date_reported_df = date_reported_df.rename(columns={(0):\n 'Date Reported', (1): 'Time Reported'})\n date_reported_df['Date Reported'] = pd.to_datetime(date_reported_df\n ['Date Reported'])\n revenue_df['Date Reported'] = date_reported_df['Date Reported']\n revenue_df['Time Reported'] = date_reported_df['Time Reported']\n eps_df = eps_df.sort_values(by='Date Reported')\n revenue_df = revenue_df.sort_values(by='Date Reported')\n eps_df = eps_df.set_index(['Date Reported', 'Time Reported',\n 'Symbol'], append=True, drop=True)\n revenue_df = revenue_df.set_index(['Date Reported', 'Time Reported',\n 'Symbol'], append=True, drop=True)\n eps_df.columns = 'EPS ' + eps_df.columns\n revenue_df.columns = 'Revenue ' + revenue_df.columns\n df = eps_df.join(revenue_df)\n return df\n <function token>\n\n def get_average_change():\n df['Average Change 5 Days'] = None\n df['Average Abnormal Change 5 Days'] = None\n df['Average Change 10 Days'] = None\n df['Average Abnormal Change 10 Days'] = None\n for index, row in df.iterrows():\n index_num, date_reported, time_reported, symbol = index\n returns_df = df[df.index.get_level_values('Date Reported') <\n date_reported].tail(8)\n if len(returns_df) >= 4:\n df.loc[index_num, ['Average Change 5 Days']] = returns_df[\n '5 Day Change'].mean()\n df.loc[index_num, ['Average Change 10 Days']] = returns_df[\n '10 Day Change'].mean()\n df.loc[index_num, ['Average Abnormal Change 5 Days']\n ] = returns_df['5 Day Change Abnormal'].mean()\n df.loc[index_num, ['Average Abnormal Change 10 Days']\n ] = returns_df['10 Day Change Abnormal'].mean()\n\n def get_YoY_growth():\n df['YoY Growth'] = None\n for index, row in df.iterrows():\n index_num, date_reported, time_reported, symbol = index\n time_reported = time_reported.replace(\"'\", '')\n quarter_numer, year = time_reported.split(' ')\n this_df = df['EPS Actual']\n try:\n this_quarter = this_df[this_df.index.get_level_values(\n 'Time Reported') == quarter_numer + \" '\" + year].values[0]\n last_quarter = this_df[this_df.index.get_level_values(\n 'Time Reported') == quarter_numer + \" '\" + str(int(year\n ) - 1)].values[0]\n df.loc[index_num, ['YoY Growth']] = (this_quarter -\n last_quarter) / last_quarter\n except Exception as e:\n pass\n\n def get_market_cap():\n finviz_page = r.get('https://finviz.com/quote.ashx?t=%s' % symbol)\n soup = BeautifulSoup(finviz_page.text, features='lxml')\n table_row = soup.findAll('tr', attrs={'class': 'table-dark-row'})[1]\n market_cap = table_row.text.replace('Market Cap', '').split('\\n')[1]\n if 'K' in market_cap:\n market_cap = float(market_cap[:-1]) * 1000\n elif 'M' in market_cap:\n market_cap = float(market_cap[:-1]) * 1000000\n elif 'B' in market_cap:\n market_cap = float(market_cap[:-1]) * 1000000000\n market_cap = int(market_cap)\n if market_cap > 10000000000:\n market_cap_text = 'Large'\n elif market_cap > 2000000000:\n market_cap_text = 'Medium'\n elif market_cap > 300000000:\n market_cap_text = 'Small'\n elif market_cap > 50000000:\n market_cap_text = 'Micro'\n else:\n market_cap_text = 'Nano'\n df['Market Cap Text'] = market_cap_text\n\n\n<function token>\n<function token>\n",
"<import token>\n\n\nclass announcement:\n\n def __init__(eps_df, revenue_df):\n conn = sqlite3.connect('earnings.db', timeout=120)\n cur = conn.cursor()\n symbol_href = self.driver.find_element_by_class_name('lfkTWp')\n symbol = symbol_href.text\n eps_history_df = pd.read_sql(\n 'select * from estimize_eps where Symbol == \"%s\"' % symbol, conn)\n revenue_history_df = pd.read_sql('select * from estimize_revenue', conn\n )\n price_history_df = pd.read_sql('select * from price_history', conn)\n <function token>\n <function token>\n\n def get_average_change():\n df['Average Change 5 Days'] = None\n df['Average Abnormal Change 5 Days'] = None\n df['Average Change 10 Days'] = None\n df['Average Abnormal Change 10 Days'] = None\n for index, row in df.iterrows():\n index_num, date_reported, time_reported, symbol = index\n returns_df = df[df.index.get_level_values('Date Reported') <\n date_reported].tail(8)\n if len(returns_df) >= 4:\n df.loc[index_num, ['Average Change 5 Days']] = returns_df[\n '5 Day Change'].mean()\n df.loc[index_num, ['Average Change 10 Days']] = returns_df[\n '10 Day Change'].mean()\n df.loc[index_num, ['Average Abnormal Change 5 Days']\n ] = returns_df['5 Day Change Abnormal'].mean()\n df.loc[index_num, ['Average Abnormal Change 10 Days']\n ] = returns_df['10 Day Change Abnormal'].mean()\n\n def get_YoY_growth():\n df['YoY Growth'] = None\n for index, row in df.iterrows():\n index_num, date_reported, time_reported, symbol = index\n time_reported = time_reported.replace(\"'\", '')\n quarter_numer, year = time_reported.split(' ')\n this_df = df['EPS Actual']\n try:\n this_quarter = this_df[this_df.index.get_level_values(\n 'Time Reported') == quarter_numer + \" '\" + year].values[0]\n last_quarter = this_df[this_df.index.get_level_values(\n 'Time Reported') == quarter_numer + \" '\" + str(int(year\n ) - 1)].values[0]\n df.loc[index_num, ['YoY Growth']] = (this_quarter -\n last_quarter) / last_quarter\n except Exception as e:\n pass\n\n def get_market_cap():\n finviz_page = r.get('https://finviz.com/quote.ashx?t=%s' % symbol)\n soup = BeautifulSoup(finviz_page.text, features='lxml')\n table_row = soup.findAll('tr', attrs={'class': 'table-dark-row'})[1]\n market_cap = table_row.text.replace('Market Cap', '').split('\\n')[1]\n if 'K' in market_cap:\n market_cap = float(market_cap[:-1]) * 1000\n elif 'M' in market_cap:\n market_cap = float(market_cap[:-1]) * 1000000\n elif 'B' in market_cap:\n market_cap = float(market_cap[:-1]) * 1000000000\n market_cap = int(market_cap)\n if market_cap > 10000000000:\n market_cap_text = 'Large'\n elif market_cap > 2000000000:\n market_cap_text = 'Medium'\n elif market_cap > 300000000:\n market_cap_text = 'Small'\n elif market_cap > 50000000:\n market_cap_text = 'Micro'\n else:\n market_cap_text = 'Nano'\n df['Market Cap Text'] = market_cap_text\n\n\n<function token>\n<function token>\n",
"<import token>\n\n\nclass announcement:\n <function token>\n <function token>\n <function token>\n\n def get_average_change():\n df['Average Change 5 Days'] = None\n df['Average Abnormal Change 5 Days'] = None\n df['Average Change 10 Days'] = None\n df['Average Abnormal Change 10 Days'] = None\n for index, row in df.iterrows():\n index_num, date_reported, time_reported, symbol = index\n returns_df = df[df.index.get_level_values('Date Reported') <\n date_reported].tail(8)\n if len(returns_df) >= 4:\n df.loc[index_num, ['Average Change 5 Days']] = returns_df[\n '5 Day Change'].mean()\n df.loc[index_num, ['Average Change 10 Days']] = returns_df[\n '10 Day Change'].mean()\n df.loc[index_num, ['Average Abnormal Change 5 Days']\n ] = returns_df['5 Day Change Abnormal'].mean()\n df.loc[index_num, ['Average Abnormal Change 10 Days']\n ] = returns_df['10 Day Change Abnormal'].mean()\n\n def get_YoY_growth():\n df['YoY Growth'] = None\n for index, row in df.iterrows():\n index_num, date_reported, time_reported, symbol = index\n time_reported = time_reported.replace(\"'\", '')\n quarter_numer, year = time_reported.split(' ')\n this_df = df['EPS Actual']\n try:\n this_quarter = this_df[this_df.index.get_level_values(\n 'Time Reported') == quarter_numer + \" '\" + year].values[0]\n last_quarter = this_df[this_df.index.get_level_values(\n 'Time Reported') == quarter_numer + \" '\" + str(int(year\n ) - 1)].values[0]\n df.loc[index_num, ['YoY Growth']] = (this_quarter -\n last_quarter) / last_quarter\n except Exception as e:\n pass\n\n def get_market_cap():\n finviz_page = r.get('https://finviz.com/quote.ashx?t=%s' % symbol)\n soup = BeautifulSoup(finviz_page.text, features='lxml')\n table_row = soup.findAll('tr', attrs={'class': 'table-dark-row'})[1]\n market_cap = table_row.text.replace('Market Cap', '').split('\\n')[1]\n if 'K' in market_cap:\n market_cap = float(market_cap[:-1]) * 1000\n elif 'M' in market_cap:\n market_cap = float(market_cap[:-1]) * 1000000\n elif 'B' in market_cap:\n market_cap = float(market_cap[:-1]) * 1000000000\n market_cap = int(market_cap)\n if market_cap > 10000000000:\n market_cap_text = 'Large'\n elif market_cap > 2000000000:\n market_cap_text = 'Medium'\n elif market_cap > 300000000:\n market_cap_text = 'Small'\n elif market_cap > 50000000:\n market_cap_text = 'Micro'\n else:\n market_cap_text = 'Nano'\n df['Market Cap Text'] = market_cap_text\n\n\n<function token>\n<function token>\n",
"<import token>\n\n\nclass announcement:\n <function token>\n <function token>\n <function token>\n <function token>\n\n def get_YoY_growth():\n df['YoY Growth'] = None\n for index, row in df.iterrows():\n index_num, date_reported, time_reported, symbol = index\n time_reported = time_reported.replace(\"'\", '')\n quarter_numer, year = time_reported.split(' ')\n this_df = df['EPS Actual']\n try:\n this_quarter = this_df[this_df.index.get_level_values(\n 'Time Reported') == quarter_numer + \" '\" + year].values[0]\n last_quarter = this_df[this_df.index.get_level_values(\n 'Time Reported') == quarter_numer + \" '\" + str(int(year\n ) - 1)].values[0]\n df.loc[index_num, ['YoY Growth']] = (this_quarter -\n last_quarter) / last_quarter\n except Exception as e:\n pass\n\n def get_market_cap():\n finviz_page = r.get('https://finviz.com/quote.ashx?t=%s' % symbol)\n soup = BeautifulSoup(finviz_page.text, features='lxml')\n table_row = soup.findAll('tr', attrs={'class': 'table-dark-row'})[1]\n market_cap = table_row.text.replace('Market Cap', '').split('\\n')[1]\n if 'K' in market_cap:\n market_cap = float(market_cap[:-1]) * 1000\n elif 'M' in market_cap:\n market_cap = float(market_cap[:-1]) * 1000000\n elif 'B' in market_cap:\n market_cap = float(market_cap[:-1]) * 1000000000\n market_cap = int(market_cap)\n if market_cap > 10000000000:\n market_cap_text = 'Large'\n elif market_cap > 2000000000:\n market_cap_text = 'Medium'\n elif market_cap > 300000000:\n market_cap_text = 'Small'\n elif market_cap > 50000000:\n market_cap_text = 'Micro'\n else:\n market_cap_text = 'Nano'\n df['Market Cap Text'] = market_cap_text\n\n\n<function token>\n<function token>\n",
"<import token>\n\n\nclass announcement:\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def get_market_cap():\n finviz_page = r.get('https://finviz.com/quote.ashx?t=%s' % symbol)\n soup = BeautifulSoup(finviz_page.text, features='lxml')\n table_row = soup.findAll('tr', attrs={'class': 'table-dark-row'})[1]\n market_cap = table_row.text.replace('Market Cap', '').split('\\n')[1]\n if 'K' in market_cap:\n market_cap = float(market_cap[:-1]) * 1000\n elif 'M' in market_cap:\n market_cap = float(market_cap[:-1]) * 1000000\n elif 'B' in market_cap:\n market_cap = float(market_cap[:-1]) * 1000000000\n market_cap = int(market_cap)\n if market_cap > 10000000000:\n market_cap_text = 'Large'\n elif market_cap > 2000000000:\n market_cap_text = 'Medium'\n elif market_cap > 300000000:\n market_cap_text = 'Small'\n elif market_cap > 50000000:\n market_cap_text = 'Micro'\n else:\n market_cap_text = 'Nano'\n df['Market Cap Text'] = market_cap_text\n\n\n<function token>\n<function token>\n",
"<import token>\n\n\nclass announcement:\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\n<function token>\n<function token>\n",
"<import token>\n<class token>\n<function token>\n<function token>\n"
] | false |
819 |
8ccec24e1a7060269ffbb376ba0c480da9eabe0a
|
import tensorflow as tf
import settings
import numpy as np
slim = tf.contrib.slim
class Model:
def __init__(self, training = True):
self.classes = settings.classes_name
self.num_classes = len(settings.classes_name)
self.image_size = settings.image_size
self.cell_size = settings.cell_size
self.boxes_per_cell = settings.box_per_cell
self.output_size = (self.cell_size * self.cell_size) * (self.num_classes + self.boxes_per_cell * 5)
self.scale = 1.0 * self.image_size / self.cell_size
self.boundary1 = self.cell_size * self.cell_size * self.num_classes
self.boundary2 = self.boundary1 + self.cell_size * self.cell_size * self.boxes_per_cell
self.object_scale = settings.object_scale
self.no_object_scale = settings.no_object_scale
self.class_scale = settings.class_scale
self.coord_scale = settings.coordinate_scale
self.offset = np.transpose(np.reshape(np.array([np.arange(self.cell_size)] * self.cell_size * self.boxes_per_cell), (self.boxes_per_cell, self.cell_size, self.cell_size)), (1, 2, 0))
self.images = tf.placeholder(tf.float32, [None, settings.image_size, settings.image_size, 3])
if settings.model_type == 'normal':
self.logits = self.build_network(self.images, num_outputs = self.output_size, alpha = settings.alpha_relu, training = training)
if settings.model_type == 'fast':
self.logits = self.build_fast_network(self.images, num_outputs = self.output_size, alpha = settings.alpha_relu, training = training)
if training:
self.batch = tf.Variable(0)
self.labels = tf.placeholder(tf.float32, [None, self.cell_size, self.cell_size, 5 + self.num_classes])
self.loss_layer(self.logits, self.labels)
self.total_loss = tf.contrib.losses.get_total_loss()
self.learning_rate = tf.train.exponential_decay(settings.learning_rate, self.batch * settings.batch_size, settings.decay_step, settings.decay_rate, True)
self.optimizer = tf.train.GradientDescentOptimizer(self.learning_rate).minimize(self.total_loss, global_step = self.batch)
def build_network(self, images, num_outputs, alpha, keep_prob = settings.dropout, training = True, scope = 'yolo'):
with tf.variable_scope(scope):
with slim.arg_scope([slim.conv2d, slim.fully_connected], activation_fn = leaky_relu(alpha), weights_initializer = tf.truncated_normal_initializer(0.0, 0.01), weights_regularizer = slim.l2_regularizer(0.0005)):
net = tf.pad(images, np.array([[0, 0], [3, 3], [3, 3], [0, 0]]), name = 'pad_1')
net = slim.conv2d(net, 64, 7, 2, padding = 'VALID', scope = 'conv_2')
net = slim.max_pool2d(net, 2, padding = 'SAME', scope = 'pool_3')
net = slim.conv2d(net, 192, 3, scope = 'conv_4')
net = slim.max_pool2d(net, 2, padding = 'SAME', scope = 'pool_5')
net = slim.conv2d(net, 128, 1, scope = 'conv_6')
net = slim.conv2d(net, 256, 3, scope = 'conv_7')
net = slim.conv2d(net, 256, 1, scope = 'conv_8')
net = slim.conv2d(net, 512, 3, scope = 'conv_9')
net = slim.max_pool2d(net, 2, padding = 'SAME', scope = 'pool_10')
net = slim.conv2d(net, 256, 1, scope = 'conv_11')
net = slim.conv2d(net, 512, 3, scope = 'conv_12')
net = slim.conv2d(net, 256, 1, scope = 'conv_13')
net = slim.conv2d(net, 512, 3, scope = 'conv_14')
net = slim.conv2d(net, 256, 1, scope = 'conv_15')
net = slim.conv2d(net, 512, 3, scope = 'conv_16')
net = slim.conv2d(net, 256, 1, scope = 'conv_17')
net = slim.conv2d(net, 512, 3, scope = 'conv_18')
net = slim.conv2d(net, 512, 1, scope = 'conv_19')
net = slim.conv2d(net, 1024, 3, scope = 'conv_20')
net = slim.max_pool2d(net, 2, padding='SAME', scope = 'pool_21')
net = slim.conv2d(net, 512, 1, scope = 'conv_22')
net = slim.conv2d(net, 1024, 3, scope = 'conv_23')
net = slim.conv2d(net, 512, 1, scope = 'conv_24')
net = slim.conv2d(net, 1024, 3, scope = 'conv_25')
net = slim.conv2d(net, 1024, 3, scope = 'conv_26')
net = tf.pad(net, np.array([[0, 0], [1, 1], [1, 1], [0, 0]]), name = 'pad_27')
net = slim.conv2d(net, 1024, 3, 2, padding='VALID', scope = 'conv_28')
net = slim.conv2d(net, 1024, 3, scope = 'conv_29')
net = slim.conv2d(net, 1024, 3, scope = 'conv_30')
net = tf.transpose(net, [0, 3, 1, 2], name='trans_31')
net = slim.flatten(net, scope = 'flat_32')
net = slim.fully_connected(net, 512, scope = 'fc_33')
net = slim.fully_connected(net, 4096, scope = 'fc_34')
net = slim.dropout(net, keep_prob = keep_prob, is_training = training, scope = 'dropout_35')
net = slim.fully_connected(net, num_outputs, activation_fn = None, scope = 'fc_36')
return net
def build_fast_network(self, images, num_outputs, alpha, keep_prob = settings.dropout, training = True, scope = 'yolo'):
with tf.variable_scope(scope):
with slim.arg_scope([slim.conv2d, slim.fully_connected], activation_fn = leaky_relu(alpha), weights_initializer = tf.truncated_normal_initializer(0.0, 0.01), weights_regularizer = slim.l2_regularizer(0.0005)):
net = tf.pad(images, np.array([[0, 0], [3, 3], [3, 3], [0, 0]]), name = 'pad_1')
net = slim.conv2d(net, 64, 7, 2, padding = 'VALID', scope = 'conv_2')
net = slim.max_pool2d(net, 2, padding = 'SAME', scope = 'pool_3')
net = slim.conv2d(net, 192, 3, scope = 'conv_4')
net = slim.max_pool2d(net, 2, padding = 'SAME', scope = 'pool_5')
net = slim.conv2d(net, 128, 1, scope = 'conv_6')
net = slim.conv2d(net, 256, 3, scope = 'conv_7')
net = slim.conv2d(net, 512, 3, scope = 'conv_9')
net = slim.max_pool2d(net, 2, padding = 'SAME', scope = 'pool_10')
net = slim.conv2d(net, 256, 1, scope = 'conv_11')
net = slim.conv2d(net, 512, 3, scope = 'conv_12')
net = slim.conv2d(net, 1024, 3, scope = 'conv_20')
net = slim.max_pool2d(net, 2, padding='SAME', scope = 'pool_21')
net = slim.conv2d(net, 512, 1, scope = 'conv_22')
net = slim.conv2d(net, 1024, 3, scope = 'conv_23')
net = slim.conv2d(net, 1024, 3, scope = 'conv_26')
net = tf.pad(net, np.array([[0, 0], [1, 1], [1, 1], [0, 0]]), name = 'pad_27')
net = slim.conv2d(net, 1024, 3, 2, padding='VALID', scope = 'conv_28')
net = tf.transpose(net, [0, 3, 1, 2], name='trans_31')
net = slim.flatten(net, scope = 'flat_32')
net = slim.fully_connected(net, 512, scope = 'fc_33')
net = slim.fully_connected(net, 4096, scope = 'fc_34')
net = slim.dropout(net, keep_prob = keep_prob, is_training = training, scope = 'dropout_35')
net = slim.fully_connected(net, num_outputs, activation_fn = None, scope = 'fc_36')
return net
def calc_iou(self, boxes1, boxes2, scope = 'iou'):
with tf.variable_scope(scope):
boxes1 = tf.stack([boxes1[:, :, :, :, 0] - boxes1[:, :, :, :, 2] / 2.0,
boxes1[:, :, :, :, 1] - boxes1[:, :, :, :, 3] / 2.0,
boxes1[:, :, :, :, 0] + boxes1[:, :, :, :, 2] / 2.0,
boxes1[:, :, :, :, 1] + boxes1[:, :, :, :, 3] / 2.0])
boxes1 = tf.transpose(boxes1, [1, 2, 3, 4, 0])
boxes2 = tf.stack([boxes2[:, :, :, :, 0] - boxes2[:, :, :, :, 2] / 2.0,
boxes2[:, :, :, :, 1] - boxes2[:, :, :, :, 3] / 2.0,
boxes2[:, :, :, :, 0] + boxes2[:, :, :, :, 2] / 2.0,
boxes2[:, :, :, :, 1] + boxes2[:, :, :, :, 3] / 2.0])
boxes2 = tf.transpose(boxes2, [1, 2, 3, 4, 0])
lu = tf.maximum(boxes1[:, :, :, :, :2], boxes2[:, :, :, :, :2])
rd = tf.minimum(boxes1[:, :, :, :, 2:], boxes2[:, :, :, :, 2:])
intersection = tf.maximum(0.0, rd - lu)
inter_square = intersection[:, :, :, :, 0] * intersection[:, :, :, :, 1]
square1 = (boxes1[:, :, :, :, 2] - boxes1[:, :, :, :, 0]) * (boxes1[:, :, :, :, 3] - boxes1[:, :, :, :, 1])
square2 = (boxes2[:, :, :, :, 2] - boxes2[:, :, :, :, 0]) * (boxes2[:, :, :, :, 3] - boxes2[:, :, :, :, 1])
union_square = tf.maximum(square1 + square2 - inter_square, 1e-10)
return tf.clip_by_value(inter_square / union_square, 0.0, 1.0)
def loss_layer(self, predicts, labels, scope = 'loss_layer'):
with tf.variable_scope(scope):
predict_classes = tf.reshape(predicts[:, :self.boundary1], [settings.batch_size, self.cell_size, self.cell_size, self.num_classes])
predict_scales = tf.reshape(predicts[:, self.boundary1:self.boundary2], [settings.batch_size, self.cell_size, self.cell_size, self.boxes_per_cell])
predict_boxes = tf.reshape(predicts[:, self.boundary2:], [settings.batch_size, self.cell_size, self.cell_size, self.boxes_per_cell, 4])
response = tf.reshape(labels[:, :, :, 0], [settings.batch_size, self.cell_size, self.cell_size, 1])
boxes = tf.reshape(labels[:, :, :, 1:5], [settings.batch_size, self.cell_size, self.cell_size, 1, 4])
boxes = tf.tile(boxes, [1, 1, 1, self.boxes_per_cell, 1]) / self.image_size
classes = labels[:, :, :, 5:]
offset = tf.constant(self.offset, dtype = tf.float32)
offset = tf.reshape(offset, [1, self.cell_size, self.cell_size, self.boxes_per_cell])
offset = tf.tile(offset, [settings.batch_size, 1, 1, 1])
predict_boxes_tran = tf.stack([(predict_boxes[:, :, :, :, 0] + offset) / self.cell_size,
(predict_boxes[:, :, :, :, 1] + tf.transpose(offset, (0, 2, 1, 3))) / self.cell_size,
tf.square(predict_boxes[:, :, :, :, 2]),
tf.square(predict_boxes[:, :, :, :, 3])])
predict_boxes_tran = tf.transpose(predict_boxes_tran, [1, 2, 3, 4, 0])
iou_predict_truth = self.calc_iou(predict_boxes_tran, boxes)
object_mask = tf.reduce_max(iou_predict_truth, 3, keep_dims=True)
object_mask = tf.cast((iou_predict_truth >= object_mask), tf.float32) * response
noobject_mask = tf.ones_like(object_mask, dtype=tf.float32) - object_mask
boxes_tran = tf.stack([boxes[:, :, :, :, 0] * self.cell_size - offset,
boxes[:, :, :, :, 1] * self.cell_size - tf.transpose(offset, (0, 2, 1, 3)),
tf.sqrt(boxes[:, :, :, :, 2]),
tf.sqrt(boxes[:, :, :, :, 3])])
boxes_tran = tf.transpose(boxes_tran, [1, 2, 3, 4, 0])
class_delta = response * (predict_classes - classes)
class_loss = tf.reduce_mean(tf.reduce_sum(tf.square(class_delta), axis=[1, 2, 3]), name = 'class_loss') * self.class_scale
object_delta = object_mask * (predict_scales - iou_predict_truth)
object_loss = tf.reduce_mean(tf.reduce_sum(tf.square(object_delta), axis=[1, 2, 3]), name = 'object_loss') * self.object_scale
noobject_delta = noobject_mask * predict_scales
noobject_loss = tf.reduce_mean(tf.reduce_sum(tf.square(noobject_delta), axis=[1, 2, 3]), name = 'noobject_loss') * self.no_object_scale
coord_mask = tf.expand_dims(object_mask, 4)
boxes_delta = coord_mask * (predict_boxes - boxes_tran)
coord_loss = tf.reduce_mean(tf.reduce_sum(tf.square(boxes_delta), axis=[1, 2, 3, 4]), name = 'coord_loss') * self.coord_scale
tf.contrib.losses.add_loss(class_loss)
tf.contrib.losses.add_loss(object_loss)
tf.contrib.losses.add_loss(noobject_loss)
tf.contrib.losses.add_loss(coord_loss)
def leaky_relu(alpha):
def op(inputs):
return tf.maximum(alpha * inputs, inputs)
return op
|
[
"import tensorflow as tf\nimport settings\nimport numpy as np\n\nslim = tf.contrib.slim\n\nclass Model:\n \n def __init__(self, training = True):\n self.classes = settings.classes_name\n self.num_classes = len(settings.classes_name)\n self.image_size = settings.image_size\n self.cell_size = settings.cell_size\n self.boxes_per_cell = settings.box_per_cell\n self.output_size = (self.cell_size * self.cell_size) * (self.num_classes + self.boxes_per_cell * 5)\n self.scale = 1.0 * self.image_size / self.cell_size\n self.boundary1 = self.cell_size * self.cell_size * self.num_classes\n self.boundary2 = self.boundary1 + self.cell_size * self.cell_size * self.boxes_per_cell\n\n self.object_scale = settings.object_scale\n self.no_object_scale = settings.no_object_scale\n self.class_scale = settings.class_scale\n self.coord_scale = settings.coordinate_scale\n \n self.offset = np.transpose(np.reshape(np.array([np.arange(self.cell_size)] * self.cell_size * self.boxes_per_cell), (self.boxes_per_cell, self.cell_size, self.cell_size)), (1, 2, 0))\n\n self.images = tf.placeholder(tf.float32, [None, settings.image_size, settings.image_size, 3])\n \n if settings.model_type == 'normal':\n self.logits = self.build_network(self.images, num_outputs = self.output_size, alpha = settings.alpha_relu, training = training)\n if settings.model_type == 'fast':\n self.logits = self.build_fast_network(self.images, num_outputs = self.output_size, alpha = settings.alpha_relu, training = training)\n \n if training:\n self.batch = tf.Variable(0)\n self.labels = tf.placeholder(tf.float32, [None, self.cell_size, self.cell_size, 5 + self.num_classes])\n self.loss_layer(self.logits, self.labels)\n self.total_loss = tf.contrib.losses.get_total_loss()\n self.learning_rate = tf.train.exponential_decay(settings.learning_rate, self.batch * settings.batch_size, settings.decay_step, settings.decay_rate, True)\n self.optimizer = tf.train.GradientDescentOptimizer(self.learning_rate).minimize(self.total_loss, global_step = self.batch)\n \n def build_network(self, images, num_outputs, alpha, keep_prob = settings.dropout, training = True, scope = 'yolo'):\n with tf.variable_scope(scope):\n with slim.arg_scope([slim.conv2d, slim.fully_connected], activation_fn = leaky_relu(alpha), weights_initializer = tf.truncated_normal_initializer(0.0, 0.01), weights_regularizer = slim.l2_regularizer(0.0005)):\n net = tf.pad(images, np.array([[0, 0], [3, 3], [3, 3], [0, 0]]), name = 'pad_1')\n net = slim.conv2d(net, 64, 7, 2, padding = 'VALID', scope = 'conv_2')\n net = slim.max_pool2d(net, 2, padding = 'SAME', scope = 'pool_3')\n net = slim.conv2d(net, 192, 3, scope = 'conv_4')\n net = slim.max_pool2d(net, 2, padding = 'SAME', scope = 'pool_5')\n net = slim.conv2d(net, 128, 1, scope = 'conv_6')\n net = slim.conv2d(net, 256, 3, scope = 'conv_7')\n net = slim.conv2d(net, 256, 1, scope = 'conv_8')\n net = slim.conv2d(net, 512, 3, scope = 'conv_9')\n net = slim.max_pool2d(net, 2, padding = 'SAME', scope = 'pool_10')\n net = slim.conv2d(net, 256, 1, scope = 'conv_11')\n net = slim.conv2d(net, 512, 3, scope = 'conv_12')\n net = slim.conv2d(net, 256, 1, scope = 'conv_13')\n net = slim.conv2d(net, 512, 3, scope = 'conv_14')\n net = slim.conv2d(net, 256, 1, scope = 'conv_15')\n net = slim.conv2d(net, 512, 3, scope = 'conv_16')\n net = slim.conv2d(net, 256, 1, scope = 'conv_17')\n net = slim.conv2d(net, 512, 3, scope = 'conv_18')\n net = slim.conv2d(net, 512, 1, scope = 'conv_19')\n net = slim.conv2d(net, 1024, 3, scope = 'conv_20')\n net = slim.max_pool2d(net, 2, padding='SAME', scope = 'pool_21')\n net = slim.conv2d(net, 512, 1, scope = 'conv_22')\n net = slim.conv2d(net, 1024, 3, scope = 'conv_23')\n net = slim.conv2d(net, 512, 1, scope = 'conv_24')\n net = slim.conv2d(net, 1024, 3, scope = 'conv_25')\n net = slim.conv2d(net, 1024, 3, scope = 'conv_26')\n net = tf.pad(net, np.array([[0, 0], [1, 1], [1, 1], [0, 0]]), name = 'pad_27')\n net = slim.conv2d(net, 1024, 3, 2, padding='VALID', scope = 'conv_28')\n net = slim.conv2d(net, 1024, 3, scope = 'conv_29')\n net = slim.conv2d(net, 1024, 3, scope = 'conv_30')\n net = tf.transpose(net, [0, 3, 1, 2], name='trans_31')\n net = slim.flatten(net, scope = 'flat_32')\n net = slim.fully_connected(net, 512, scope = 'fc_33')\n net = slim.fully_connected(net, 4096, scope = 'fc_34')\n net = slim.dropout(net, keep_prob = keep_prob, is_training = training, scope = 'dropout_35')\n net = slim.fully_connected(net, num_outputs, activation_fn = None, scope = 'fc_36')\n return net\n \n def build_fast_network(self, images, num_outputs, alpha, keep_prob = settings.dropout, training = True, scope = 'yolo'):\n with tf.variable_scope(scope):\n with slim.arg_scope([slim.conv2d, slim.fully_connected], activation_fn = leaky_relu(alpha), weights_initializer = tf.truncated_normal_initializer(0.0, 0.01), weights_regularizer = slim.l2_regularizer(0.0005)):\n net = tf.pad(images, np.array([[0, 0], [3, 3], [3, 3], [0, 0]]), name = 'pad_1')\n net = slim.conv2d(net, 64, 7, 2, padding = 'VALID', scope = 'conv_2')\n net = slim.max_pool2d(net, 2, padding = 'SAME', scope = 'pool_3')\n net = slim.conv2d(net, 192, 3, scope = 'conv_4')\n net = slim.max_pool2d(net, 2, padding = 'SAME', scope = 'pool_5')\n net = slim.conv2d(net, 128, 1, scope = 'conv_6')\n net = slim.conv2d(net, 256, 3, scope = 'conv_7')\n net = slim.conv2d(net, 512, 3, scope = 'conv_9')\n net = slim.max_pool2d(net, 2, padding = 'SAME', scope = 'pool_10')\n net = slim.conv2d(net, 256, 1, scope = 'conv_11')\n net = slim.conv2d(net, 512, 3, scope = 'conv_12')\n net = slim.conv2d(net, 1024, 3, scope = 'conv_20')\n net = slim.max_pool2d(net, 2, padding='SAME', scope = 'pool_21')\n net = slim.conv2d(net, 512, 1, scope = 'conv_22')\n net = slim.conv2d(net, 1024, 3, scope = 'conv_23')\n net = slim.conv2d(net, 1024, 3, scope = 'conv_26')\n net = tf.pad(net, np.array([[0, 0], [1, 1], [1, 1], [0, 0]]), name = 'pad_27')\n net = slim.conv2d(net, 1024, 3, 2, padding='VALID', scope = 'conv_28')\n net = tf.transpose(net, [0, 3, 1, 2], name='trans_31')\n net = slim.flatten(net, scope = 'flat_32')\n net = slim.fully_connected(net, 512, scope = 'fc_33')\n net = slim.fully_connected(net, 4096, scope = 'fc_34')\n net = slim.dropout(net, keep_prob = keep_prob, is_training = training, scope = 'dropout_35')\n net = slim.fully_connected(net, num_outputs, activation_fn = None, scope = 'fc_36')\n return net\n \n \n def calc_iou(self, boxes1, boxes2, scope = 'iou'):\n with tf.variable_scope(scope):\n boxes1 = tf.stack([boxes1[:, :, :, :, 0] - boxes1[:, :, :, :, 2] / 2.0,\n boxes1[:, :, :, :, 1] - boxes1[:, :, :, :, 3] / 2.0,\n boxes1[:, :, :, :, 0] + boxes1[:, :, :, :, 2] / 2.0,\n boxes1[:, :, :, :, 1] + boxes1[:, :, :, :, 3] / 2.0])\n boxes1 = tf.transpose(boxes1, [1, 2, 3, 4, 0])\n\n boxes2 = tf.stack([boxes2[:, :, :, :, 0] - boxes2[:, :, :, :, 2] / 2.0,\n boxes2[:, :, :, :, 1] - boxes2[:, :, :, :, 3] / 2.0,\n boxes2[:, :, :, :, 0] + boxes2[:, :, :, :, 2] / 2.0,\n boxes2[:, :, :, :, 1] + boxes2[:, :, :, :, 3] / 2.0])\n boxes2 = tf.transpose(boxes2, [1, 2, 3, 4, 0])\n\n lu = tf.maximum(boxes1[:, :, :, :, :2], boxes2[:, :, :, :, :2])\n rd = tf.minimum(boxes1[:, :, :, :, 2:], boxes2[:, :, :, :, 2:])\n\n intersection = tf.maximum(0.0, rd - lu)\n inter_square = intersection[:, :, :, :, 0] * intersection[:, :, :, :, 1]\n\n square1 = (boxes1[:, :, :, :, 2] - boxes1[:, :, :, :, 0]) * (boxes1[:, :, :, :, 3] - boxes1[:, :, :, :, 1])\n square2 = (boxes2[:, :, :, :, 2] - boxes2[:, :, :, :, 0]) * (boxes2[:, :, :, :, 3] - boxes2[:, :, :, :, 1])\n\n union_square = tf.maximum(square1 + square2 - inter_square, 1e-10)\n\n return tf.clip_by_value(inter_square / union_square, 0.0, 1.0)\n\n def loss_layer(self, predicts, labels, scope = 'loss_layer'):\n with tf.variable_scope(scope):\n predict_classes = tf.reshape(predicts[:, :self.boundary1], [settings.batch_size, self.cell_size, self.cell_size, self.num_classes])\n predict_scales = tf.reshape(predicts[:, self.boundary1:self.boundary2], [settings.batch_size, self.cell_size, self.cell_size, self.boxes_per_cell])\n predict_boxes = tf.reshape(predicts[:, self.boundary2:], [settings.batch_size, self.cell_size, self.cell_size, self.boxes_per_cell, 4])\n\n response = tf.reshape(labels[:, :, :, 0], [settings.batch_size, self.cell_size, self.cell_size, 1])\n boxes = tf.reshape(labels[:, :, :, 1:5], [settings.batch_size, self.cell_size, self.cell_size, 1, 4])\n boxes = tf.tile(boxes, [1, 1, 1, self.boxes_per_cell, 1]) / self.image_size\n classes = labels[:, :, :, 5:]\n\n offset = tf.constant(self.offset, dtype = tf.float32)\n offset = tf.reshape(offset, [1, self.cell_size, self.cell_size, self.boxes_per_cell])\n offset = tf.tile(offset, [settings.batch_size, 1, 1, 1])\n predict_boxes_tran = tf.stack([(predict_boxes[:, :, :, :, 0] + offset) / self.cell_size,\n (predict_boxes[:, :, :, :, 1] + tf.transpose(offset, (0, 2, 1, 3))) / self.cell_size,\n tf.square(predict_boxes[:, :, :, :, 2]),\n tf.square(predict_boxes[:, :, :, :, 3])])\n predict_boxes_tran = tf.transpose(predict_boxes_tran, [1, 2, 3, 4, 0])\n\n iou_predict_truth = self.calc_iou(predict_boxes_tran, boxes)\n\n object_mask = tf.reduce_max(iou_predict_truth, 3, keep_dims=True)\n object_mask = tf.cast((iou_predict_truth >= object_mask), tf.float32) * response\n\n noobject_mask = tf.ones_like(object_mask, dtype=tf.float32) - object_mask\n\n boxes_tran = tf.stack([boxes[:, :, :, :, 0] * self.cell_size - offset,\n boxes[:, :, :, :, 1] * self.cell_size - tf.transpose(offset, (0, 2, 1, 3)),\n tf.sqrt(boxes[:, :, :, :, 2]),\n tf.sqrt(boxes[:, :, :, :, 3])])\n boxes_tran = tf.transpose(boxes_tran, [1, 2, 3, 4, 0])\n\n class_delta = response * (predict_classes - classes)\n class_loss = tf.reduce_mean(tf.reduce_sum(tf.square(class_delta), axis=[1, 2, 3]), name = 'class_loss') * self.class_scale\n\n object_delta = object_mask * (predict_scales - iou_predict_truth)\n object_loss = tf.reduce_mean(tf.reduce_sum(tf.square(object_delta), axis=[1, 2, 3]), name = 'object_loss') * self.object_scale\n\n noobject_delta = noobject_mask * predict_scales\n noobject_loss = tf.reduce_mean(tf.reduce_sum(tf.square(noobject_delta), axis=[1, 2, 3]), name = 'noobject_loss') * self.no_object_scale\n\n coord_mask = tf.expand_dims(object_mask, 4)\n boxes_delta = coord_mask * (predict_boxes - boxes_tran)\n coord_loss = tf.reduce_mean(tf.reduce_sum(tf.square(boxes_delta), axis=[1, 2, 3, 4]), name = 'coord_loss') * self.coord_scale\n\n tf.contrib.losses.add_loss(class_loss)\n tf.contrib.losses.add_loss(object_loss)\n tf.contrib.losses.add_loss(noobject_loss)\n tf.contrib.losses.add_loss(coord_loss)\n\ndef leaky_relu(alpha):\n \n def op(inputs):\n return tf.maximum(alpha * inputs, inputs)\n return op\n",
"import tensorflow as tf\nimport settings\nimport numpy as np\nslim = tf.contrib.slim\n\n\nclass Model:\n\n def __init__(self, training=True):\n self.classes = settings.classes_name\n self.num_classes = len(settings.classes_name)\n self.image_size = settings.image_size\n self.cell_size = settings.cell_size\n self.boxes_per_cell = settings.box_per_cell\n self.output_size = self.cell_size * self.cell_size * (self.\n num_classes + self.boxes_per_cell * 5)\n self.scale = 1.0 * self.image_size / self.cell_size\n self.boundary1 = self.cell_size * self.cell_size * self.num_classes\n self.boundary2 = (self.boundary1 + self.cell_size * self.cell_size *\n self.boxes_per_cell)\n self.object_scale = settings.object_scale\n self.no_object_scale = settings.no_object_scale\n self.class_scale = settings.class_scale\n self.coord_scale = settings.coordinate_scale\n self.offset = np.transpose(np.reshape(np.array([np.arange(self.\n cell_size)] * self.cell_size * self.boxes_per_cell), (self.\n boxes_per_cell, self.cell_size, self.cell_size)), (1, 2, 0))\n self.images = tf.placeholder(tf.float32, [None, settings.image_size,\n settings.image_size, 3])\n if settings.model_type == 'normal':\n self.logits = self.build_network(self.images, num_outputs=self.\n output_size, alpha=settings.alpha_relu, training=training)\n if settings.model_type == 'fast':\n self.logits = self.build_fast_network(self.images, num_outputs=\n self.output_size, alpha=settings.alpha_relu, training=training)\n if training:\n self.batch = tf.Variable(0)\n self.labels = tf.placeholder(tf.float32, [None, self.cell_size,\n self.cell_size, 5 + self.num_classes])\n self.loss_layer(self.logits, self.labels)\n self.total_loss = tf.contrib.losses.get_total_loss()\n self.learning_rate = tf.train.exponential_decay(settings.\n learning_rate, self.batch * settings.batch_size, settings.\n decay_step, settings.decay_rate, True)\n self.optimizer = tf.train.GradientDescentOptimizer(self.\n learning_rate).minimize(self.total_loss, global_step=self.batch\n )\n\n def build_network(self, images, num_outputs, alpha, keep_prob=settings.\n dropout, training=True, scope='yolo'):\n with tf.variable_scope(scope):\n with slim.arg_scope([slim.conv2d, slim.fully_connected],\n activation_fn=leaky_relu(alpha), weights_initializer=tf.\n truncated_normal_initializer(0.0, 0.01),\n weights_regularizer=slim.l2_regularizer(0.0005)):\n net = tf.pad(images, np.array([[0, 0], [3, 3], [3, 3], [0, \n 0]]), name='pad_1')\n net = slim.conv2d(net, 64, 7, 2, padding='VALID', scope=\n 'conv_2')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_3')\n net = slim.conv2d(net, 192, 3, scope='conv_4')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_5')\n net = slim.conv2d(net, 128, 1, scope='conv_6')\n net = slim.conv2d(net, 256, 3, scope='conv_7')\n net = slim.conv2d(net, 256, 1, scope='conv_8')\n net = slim.conv2d(net, 512, 3, scope='conv_9')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_10')\n net = slim.conv2d(net, 256, 1, scope='conv_11')\n net = slim.conv2d(net, 512, 3, scope='conv_12')\n net = slim.conv2d(net, 256, 1, scope='conv_13')\n net = slim.conv2d(net, 512, 3, scope='conv_14')\n net = slim.conv2d(net, 256, 1, scope='conv_15')\n net = slim.conv2d(net, 512, 3, scope='conv_16')\n net = slim.conv2d(net, 256, 1, scope='conv_17')\n net = slim.conv2d(net, 512, 3, scope='conv_18')\n net = slim.conv2d(net, 512, 1, scope='conv_19')\n net = slim.conv2d(net, 1024, 3, scope='conv_20')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_21')\n net = slim.conv2d(net, 512, 1, scope='conv_22')\n net = slim.conv2d(net, 1024, 3, scope='conv_23')\n net = slim.conv2d(net, 512, 1, scope='conv_24')\n net = slim.conv2d(net, 1024, 3, scope='conv_25')\n net = slim.conv2d(net, 1024, 3, scope='conv_26')\n net = tf.pad(net, np.array([[0, 0], [1, 1], [1, 1], [0, 0]]\n ), name='pad_27')\n net = slim.conv2d(net, 1024, 3, 2, padding='VALID', scope=\n 'conv_28')\n net = slim.conv2d(net, 1024, 3, scope='conv_29')\n net = slim.conv2d(net, 1024, 3, scope='conv_30')\n net = tf.transpose(net, [0, 3, 1, 2], name='trans_31')\n net = slim.flatten(net, scope='flat_32')\n net = slim.fully_connected(net, 512, scope='fc_33')\n net = slim.fully_connected(net, 4096, scope='fc_34')\n net = slim.dropout(net, keep_prob=keep_prob, is_training=\n training, scope='dropout_35')\n net = slim.fully_connected(net, num_outputs, activation_fn=\n None, scope='fc_36')\n return net\n\n def build_fast_network(self, images, num_outputs, alpha, keep_prob=\n settings.dropout, training=True, scope='yolo'):\n with tf.variable_scope(scope):\n with slim.arg_scope([slim.conv2d, slim.fully_connected],\n activation_fn=leaky_relu(alpha), weights_initializer=tf.\n truncated_normal_initializer(0.0, 0.01),\n weights_regularizer=slim.l2_regularizer(0.0005)):\n net = tf.pad(images, np.array([[0, 0], [3, 3], [3, 3], [0, \n 0]]), name='pad_1')\n net = slim.conv2d(net, 64, 7, 2, padding='VALID', scope=\n 'conv_2')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_3')\n net = slim.conv2d(net, 192, 3, scope='conv_4')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_5')\n net = slim.conv2d(net, 128, 1, scope='conv_6')\n net = slim.conv2d(net, 256, 3, scope='conv_7')\n net = slim.conv2d(net, 512, 3, scope='conv_9')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_10')\n net = slim.conv2d(net, 256, 1, scope='conv_11')\n net = slim.conv2d(net, 512, 3, scope='conv_12')\n net = slim.conv2d(net, 1024, 3, scope='conv_20')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_21')\n net = slim.conv2d(net, 512, 1, scope='conv_22')\n net = slim.conv2d(net, 1024, 3, scope='conv_23')\n net = slim.conv2d(net, 1024, 3, scope='conv_26')\n net = tf.pad(net, np.array([[0, 0], [1, 1], [1, 1], [0, 0]]\n ), name='pad_27')\n net = slim.conv2d(net, 1024, 3, 2, padding='VALID', scope=\n 'conv_28')\n net = tf.transpose(net, [0, 3, 1, 2], name='trans_31')\n net = slim.flatten(net, scope='flat_32')\n net = slim.fully_connected(net, 512, scope='fc_33')\n net = slim.fully_connected(net, 4096, scope='fc_34')\n net = slim.dropout(net, keep_prob=keep_prob, is_training=\n training, scope='dropout_35')\n net = slim.fully_connected(net, num_outputs, activation_fn=\n None, scope='fc_36')\n return net\n\n def calc_iou(self, boxes1, boxes2, scope='iou'):\n with tf.variable_scope(scope):\n boxes1 = tf.stack([boxes1[:, :, :, :, 0] - boxes1[:, :, :, :, 2\n ] / 2.0, boxes1[:, :, :, :, 1] - boxes1[:, :, :, :, 3] / \n 2.0, boxes1[:, :, :, :, 0] + boxes1[:, :, :, :, 2] / 2.0, \n boxes1[:, :, :, :, 1] + boxes1[:, :, :, :, 3] / 2.0])\n boxes1 = tf.transpose(boxes1, [1, 2, 3, 4, 0])\n boxes2 = tf.stack([boxes2[:, :, :, :, 0] - boxes2[:, :, :, :, 2\n ] / 2.0, boxes2[:, :, :, :, 1] - boxes2[:, :, :, :, 3] / \n 2.0, boxes2[:, :, :, :, 0] + boxes2[:, :, :, :, 2] / 2.0, \n boxes2[:, :, :, :, 1] + boxes2[:, :, :, :, 3] / 2.0])\n boxes2 = tf.transpose(boxes2, [1, 2, 3, 4, 0])\n lu = tf.maximum(boxes1[:, :, :, :, :2], boxes2[:, :, :, :, :2])\n rd = tf.minimum(boxes1[:, :, :, :, 2:], boxes2[:, :, :, :, 2:])\n intersection = tf.maximum(0.0, rd - lu)\n inter_square = intersection[:, :, :, :, 0] * intersection[:, :,\n :, :, 1]\n square1 = (boxes1[:, :, :, :, 2] - boxes1[:, :, :, :, 0]) * (boxes1\n [:, :, :, :, 3] - boxes1[:, :, :, :, 1])\n square2 = (boxes2[:, :, :, :, 2] - boxes2[:, :, :, :, 0]) * (boxes2\n [:, :, :, :, 3] - boxes2[:, :, :, :, 1])\n union_square = tf.maximum(square1 + square2 - inter_square, 1e-10)\n return tf.clip_by_value(inter_square / union_square, 0.0, 1.0)\n\n def loss_layer(self, predicts, labels, scope='loss_layer'):\n with tf.variable_scope(scope):\n predict_classes = tf.reshape(predicts[:, :self.boundary1], [\n settings.batch_size, self.cell_size, self.cell_size, self.\n num_classes])\n predict_scales = tf.reshape(predicts[:, self.boundary1:self.\n boundary2], [settings.batch_size, self.cell_size, self.\n cell_size, self.boxes_per_cell])\n predict_boxes = tf.reshape(predicts[:, self.boundary2:], [\n settings.batch_size, self.cell_size, self.cell_size, self.\n boxes_per_cell, 4])\n response = tf.reshape(labels[:, :, :, 0], [settings.batch_size,\n self.cell_size, self.cell_size, 1])\n boxes = tf.reshape(labels[:, :, :, 1:5], [settings.batch_size,\n self.cell_size, self.cell_size, 1, 4])\n boxes = tf.tile(boxes, [1, 1, 1, self.boxes_per_cell, 1]\n ) / self.image_size\n classes = labels[:, :, :, 5:]\n offset = tf.constant(self.offset, dtype=tf.float32)\n offset = tf.reshape(offset, [1, self.cell_size, self.cell_size,\n self.boxes_per_cell])\n offset = tf.tile(offset, [settings.batch_size, 1, 1, 1])\n predict_boxes_tran = tf.stack([(predict_boxes[:, :, :, :, 0] +\n offset) / self.cell_size, (predict_boxes[:, :, :, :, 1] +\n tf.transpose(offset, (0, 2, 1, 3))) / self.cell_size, tf.\n square(predict_boxes[:, :, :, :, 2]), tf.square(\n predict_boxes[:, :, :, :, 3])])\n predict_boxes_tran = tf.transpose(predict_boxes_tran, [1, 2, 3,\n 4, 0])\n iou_predict_truth = self.calc_iou(predict_boxes_tran, boxes)\n object_mask = tf.reduce_max(iou_predict_truth, 3, keep_dims=True)\n object_mask = tf.cast(iou_predict_truth >= object_mask, tf.float32\n ) * response\n noobject_mask = tf.ones_like(object_mask, dtype=tf.float32\n ) - object_mask\n boxes_tran = tf.stack([boxes[:, :, :, :, 0] * self.cell_size -\n offset, boxes[:, :, :, :, 1] * self.cell_size - tf.\n transpose(offset, (0, 2, 1, 3)), tf.sqrt(boxes[:, :, :, :, \n 2]), tf.sqrt(boxes[:, :, :, :, 3])])\n boxes_tran = tf.transpose(boxes_tran, [1, 2, 3, 4, 0])\n class_delta = response * (predict_classes - classes)\n class_loss = tf.reduce_mean(tf.reduce_sum(tf.square(class_delta\n ), axis=[1, 2, 3]), name='class_loss') * self.class_scale\n object_delta = object_mask * (predict_scales - iou_predict_truth)\n object_loss = tf.reduce_mean(tf.reduce_sum(tf.square(\n object_delta), axis=[1, 2, 3]), name='object_loss'\n ) * self.object_scale\n noobject_delta = noobject_mask * predict_scales\n noobject_loss = tf.reduce_mean(tf.reduce_sum(tf.square(\n noobject_delta), axis=[1, 2, 3]), name='noobject_loss'\n ) * self.no_object_scale\n coord_mask = tf.expand_dims(object_mask, 4)\n boxes_delta = coord_mask * (predict_boxes - boxes_tran)\n coord_loss = tf.reduce_mean(tf.reduce_sum(tf.square(boxes_delta\n ), axis=[1, 2, 3, 4]), name='coord_loss') * self.coord_scale\n tf.contrib.losses.add_loss(class_loss)\n tf.contrib.losses.add_loss(object_loss)\n tf.contrib.losses.add_loss(noobject_loss)\n tf.contrib.losses.add_loss(coord_loss)\n\n\ndef leaky_relu(alpha):\n\n def op(inputs):\n return tf.maximum(alpha * inputs, inputs)\n return op\n",
"<import token>\nslim = tf.contrib.slim\n\n\nclass Model:\n\n def __init__(self, training=True):\n self.classes = settings.classes_name\n self.num_classes = len(settings.classes_name)\n self.image_size = settings.image_size\n self.cell_size = settings.cell_size\n self.boxes_per_cell = settings.box_per_cell\n self.output_size = self.cell_size * self.cell_size * (self.\n num_classes + self.boxes_per_cell * 5)\n self.scale = 1.0 * self.image_size / self.cell_size\n self.boundary1 = self.cell_size * self.cell_size * self.num_classes\n self.boundary2 = (self.boundary1 + self.cell_size * self.cell_size *\n self.boxes_per_cell)\n self.object_scale = settings.object_scale\n self.no_object_scale = settings.no_object_scale\n self.class_scale = settings.class_scale\n self.coord_scale = settings.coordinate_scale\n self.offset = np.transpose(np.reshape(np.array([np.arange(self.\n cell_size)] * self.cell_size * self.boxes_per_cell), (self.\n boxes_per_cell, self.cell_size, self.cell_size)), (1, 2, 0))\n self.images = tf.placeholder(tf.float32, [None, settings.image_size,\n settings.image_size, 3])\n if settings.model_type == 'normal':\n self.logits = self.build_network(self.images, num_outputs=self.\n output_size, alpha=settings.alpha_relu, training=training)\n if settings.model_type == 'fast':\n self.logits = self.build_fast_network(self.images, num_outputs=\n self.output_size, alpha=settings.alpha_relu, training=training)\n if training:\n self.batch = tf.Variable(0)\n self.labels = tf.placeholder(tf.float32, [None, self.cell_size,\n self.cell_size, 5 + self.num_classes])\n self.loss_layer(self.logits, self.labels)\n self.total_loss = tf.contrib.losses.get_total_loss()\n self.learning_rate = tf.train.exponential_decay(settings.\n learning_rate, self.batch * settings.batch_size, settings.\n decay_step, settings.decay_rate, True)\n self.optimizer = tf.train.GradientDescentOptimizer(self.\n learning_rate).minimize(self.total_loss, global_step=self.batch\n )\n\n def build_network(self, images, num_outputs, alpha, keep_prob=settings.\n dropout, training=True, scope='yolo'):\n with tf.variable_scope(scope):\n with slim.arg_scope([slim.conv2d, slim.fully_connected],\n activation_fn=leaky_relu(alpha), weights_initializer=tf.\n truncated_normal_initializer(0.0, 0.01),\n weights_regularizer=slim.l2_regularizer(0.0005)):\n net = tf.pad(images, np.array([[0, 0], [3, 3], [3, 3], [0, \n 0]]), name='pad_1')\n net = slim.conv2d(net, 64, 7, 2, padding='VALID', scope=\n 'conv_2')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_3')\n net = slim.conv2d(net, 192, 3, scope='conv_4')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_5')\n net = slim.conv2d(net, 128, 1, scope='conv_6')\n net = slim.conv2d(net, 256, 3, scope='conv_7')\n net = slim.conv2d(net, 256, 1, scope='conv_8')\n net = slim.conv2d(net, 512, 3, scope='conv_9')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_10')\n net = slim.conv2d(net, 256, 1, scope='conv_11')\n net = slim.conv2d(net, 512, 3, scope='conv_12')\n net = slim.conv2d(net, 256, 1, scope='conv_13')\n net = slim.conv2d(net, 512, 3, scope='conv_14')\n net = slim.conv2d(net, 256, 1, scope='conv_15')\n net = slim.conv2d(net, 512, 3, scope='conv_16')\n net = slim.conv2d(net, 256, 1, scope='conv_17')\n net = slim.conv2d(net, 512, 3, scope='conv_18')\n net = slim.conv2d(net, 512, 1, scope='conv_19')\n net = slim.conv2d(net, 1024, 3, scope='conv_20')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_21')\n net = slim.conv2d(net, 512, 1, scope='conv_22')\n net = slim.conv2d(net, 1024, 3, scope='conv_23')\n net = slim.conv2d(net, 512, 1, scope='conv_24')\n net = slim.conv2d(net, 1024, 3, scope='conv_25')\n net = slim.conv2d(net, 1024, 3, scope='conv_26')\n net = tf.pad(net, np.array([[0, 0], [1, 1], [1, 1], [0, 0]]\n ), name='pad_27')\n net = slim.conv2d(net, 1024, 3, 2, padding='VALID', scope=\n 'conv_28')\n net = slim.conv2d(net, 1024, 3, scope='conv_29')\n net = slim.conv2d(net, 1024, 3, scope='conv_30')\n net = tf.transpose(net, [0, 3, 1, 2], name='trans_31')\n net = slim.flatten(net, scope='flat_32')\n net = slim.fully_connected(net, 512, scope='fc_33')\n net = slim.fully_connected(net, 4096, scope='fc_34')\n net = slim.dropout(net, keep_prob=keep_prob, is_training=\n training, scope='dropout_35')\n net = slim.fully_connected(net, num_outputs, activation_fn=\n None, scope='fc_36')\n return net\n\n def build_fast_network(self, images, num_outputs, alpha, keep_prob=\n settings.dropout, training=True, scope='yolo'):\n with tf.variable_scope(scope):\n with slim.arg_scope([slim.conv2d, slim.fully_connected],\n activation_fn=leaky_relu(alpha), weights_initializer=tf.\n truncated_normal_initializer(0.0, 0.01),\n weights_regularizer=slim.l2_regularizer(0.0005)):\n net = tf.pad(images, np.array([[0, 0], [3, 3], [3, 3], [0, \n 0]]), name='pad_1')\n net = slim.conv2d(net, 64, 7, 2, padding='VALID', scope=\n 'conv_2')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_3')\n net = slim.conv2d(net, 192, 3, scope='conv_4')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_5')\n net = slim.conv2d(net, 128, 1, scope='conv_6')\n net = slim.conv2d(net, 256, 3, scope='conv_7')\n net = slim.conv2d(net, 512, 3, scope='conv_9')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_10')\n net = slim.conv2d(net, 256, 1, scope='conv_11')\n net = slim.conv2d(net, 512, 3, scope='conv_12')\n net = slim.conv2d(net, 1024, 3, scope='conv_20')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_21')\n net = slim.conv2d(net, 512, 1, scope='conv_22')\n net = slim.conv2d(net, 1024, 3, scope='conv_23')\n net = slim.conv2d(net, 1024, 3, scope='conv_26')\n net = tf.pad(net, np.array([[0, 0], [1, 1], [1, 1], [0, 0]]\n ), name='pad_27')\n net = slim.conv2d(net, 1024, 3, 2, padding='VALID', scope=\n 'conv_28')\n net = tf.transpose(net, [0, 3, 1, 2], name='trans_31')\n net = slim.flatten(net, scope='flat_32')\n net = slim.fully_connected(net, 512, scope='fc_33')\n net = slim.fully_connected(net, 4096, scope='fc_34')\n net = slim.dropout(net, keep_prob=keep_prob, is_training=\n training, scope='dropout_35')\n net = slim.fully_connected(net, num_outputs, activation_fn=\n None, scope='fc_36')\n return net\n\n def calc_iou(self, boxes1, boxes2, scope='iou'):\n with tf.variable_scope(scope):\n boxes1 = tf.stack([boxes1[:, :, :, :, 0] - boxes1[:, :, :, :, 2\n ] / 2.0, boxes1[:, :, :, :, 1] - boxes1[:, :, :, :, 3] / \n 2.0, boxes1[:, :, :, :, 0] + boxes1[:, :, :, :, 2] / 2.0, \n boxes1[:, :, :, :, 1] + boxes1[:, :, :, :, 3] / 2.0])\n boxes1 = tf.transpose(boxes1, [1, 2, 3, 4, 0])\n boxes2 = tf.stack([boxes2[:, :, :, :, 0] - boxes2[:, :, :, :, 2\n ] / 2.0, boxes2[:, :, :, :, 1] - boxes2[:, :, :, :, 3] / \n 2.0, boxes2[:, :, :, :, 0] + boxes2[:, :, :, :, 2] / 2.0, \n boxes2[:, :, :, :, 1] + boxes2[:, :, :, :, 3] / 2.0])\n boxes2 = tf.transpose(boxes2, [1, 2, 3, 4, 0])\n lu = tf.maximum(boxes1[:, :, :, :, :2], boxes2[:, :, :, :, :2])\n rd = tf.minimum(boxes1[:, :, :, :, 2:], boxes2[:, :, :, :, 2:])\n intersection = tf.maximum(0.0, rd - lu)\n inter_square = intersection[:, :, :, :, 0] * intersection[:, :,\n :, :, 1]\n square1 = (boxes1[:, :, :, :, 2] - boxes1[:, :, :, :, 0]) * (boxes1\n [:, :, :, :, 3] - boxes1[:, :, :, :, 1])\n square2 = (boxes2[:, :, :, :, 2] - boxes2[:, :, :, :, 0]) * (boxes2\n [:, :, :, :, 3] - boxes2[:, :, :, :, 1])\n union_square = tf.maximum(square1 + square2 - inter_square, 1e-10)\n return tf.clip_by_value(inter_square / union_square, 0.0, 1.0)\n\n def loss_layer(self, predicts, labels, scope='loss_layer'):\n with tf.variable_scope(scope):\n predict_classes = tf.reshape(predicts[:, :self.boundary1], [\n settings.batch_size, self.cell_size, self.cell_size, self.\n num_classes])\n predict_scales = tf.reshape(predicts[:, self.boundary1:self.\n boundary2], [settings.batch_size, self.cell_size, self.\n cell_size, self.boxes_per_cell])\n predict_boxes = tf.reshape(predicts[:, self.boundary2:], [\n settings.batch_size, self.cell_size, self.cell_size, self.\n boxes_per_cell, 4])\n response = tf.reshape(labels[:, :, :, 0], [settings.batch_size,\n self.cell_size, self.cell_size, 1])\n boxes = tf.reshape(labels[:, :, :, 1:5], [settings.batch_size,\n self.cell_size, self.cell_size, 1, 4])\n boxes = tf.tile(boxes, [1, 1, 1, self.boxes_per_cell, 1]\n ) / self.image_size\n classes = labels[:, :, :, 5:]\n offset = tf.constant(self.offset, dtype=tf.float32)\n offset = tf.reshape(offset, [1, self.cell_size, self.cell_size,\n self.boxes_per_cell])\n offset = tf.tile(offset, [settings.batch_size, 1, 1, 1])\n predict_boxes_tran = tf.stack([(predict_boxes[:, :, :, :, 0] +\n offset) / self.cell_size, (predict_boxes[:, :, :, :, 1] +\n tf.transpose(offset, (0, 2, 1, 3))) / self.cell_size, tf.\n square(predict_boxes[:, :, :, :, 2]), tf.square(\n predict_boxes[:, :, :, :, 3])])\n predict_boxes_tran = tf.transpose(predict_boxes_tran, [1, 2, 3,\n 4, 0])\n iou_predict_truth = self.calc_iou(predict_boxes_tran, boxes)\n object_mask = tf.reduce_max(iou_predict_truth, 3, keep_dims=True)\n object_mask = tf.cast(iou_predict_truth >= object_mask, tf.float32\n ) * response\n noobject_mask = tf.ones_like(object_mask, dtype=tf.float32\n ) - object_mask\n boxes_tran = tf.stack([boxes[:, :, :, :, 0] * self.cell_size -\n offset, boxes[:, :, :, :, 1] * self.cell_size - tf.\n transpose(offset, (0, 2, 1, 3)), tf.sqrt(boxes[:, :, :, :, \n 2]), tf.sqrt(boxes[:, :, :, :, 3])])\n boxes_tran = tf.transpose(boxes_tran, [1, 2, 3, 4, 0])\n class_delta = response * (predict_classes - classes)\n class_loss = tf.reduce_mean(tf.reduce_sum(tf.square(class_delta\n ), axis=[1, 2, 3]), name='class_loss') * self.class_scale\n object_delta = object_mask * (predict_scales - iou_predict_truth)\n object_loss = tf.reduce_mean(tf.reduce_sum(tf.square(\n object_delta), axis=[1, 2, 3]), name='object_loss'\n ) * self.object_scale\n noobject_delta = noobject_mask * predict_scales\n noobject_loss = tf.reduce_mean(tf.reduce_sum(tf.square(\n noobject_delta), axis=[1, 2, 3]), name='noobject_loss'\n ) * self.no_object_scale\n coord_mask = tf.expand_dims(object_mask, 4)\n boxes_delta = coord_mask * (predict_boxes - boxes_tran)\n coord_loss = tf.reduce_mean(tf.reduce_sum(tf.square(boxes_delta\n ), axis=[1, 2, 3, 4]), name='coord_loss') * self.coord_scale\n tf.contrib.losses.add_loss(class_loss)\n tf.contrib.losses.add_loss(object_loss)\n tf.contrib.losses.add_loss(noobject_loss)\n tf.contrib.losses.add_loss(coord_loss)\n\n\ndef leaky_relu(alpha):\n\n def op(inputs):\n return tf.maximum(alpha * inputs, inputs)\n return op\n",
"<import token>\n<assignment token>\n\n\nclass Model:\n\n def __init__(self, training=True):\n self.classes = settings.classes_name\n self.num_classes = len(settings.classes_name)\n self.image_size = settings.image_size\n self.cell_size = settings.cell_size\n self.boxes_per_cell = settings.box_per_cell\n self.output_size = self.cell_size * self.cell_size * (self.\n num_classes + self.boxes_per_cell * 5)\n self.scale = 1.0 * self.image_size / self.cell_size\n self.boundary1 = self.cell_size * self.cell_size * self.num_classes\n self.boundary2 = (self.boundary1 + self.cell_size * self.cell_size *\n self.boxes_per_cell)\n self.object_scale = settings.object_scale\n self.no_object_scale = settings.no_object_scale\n self.class_scale = settings.class_scale\n self.coord_scale = settings.coordinate_scale\n self.offset = np.transpose(np.reshape(np.array([np.arange(self.\n cell_size)] * self.cell_size * self.boxes_per_cell), (self.\n boxes_per_cell, self.cell_size, self.cell_size)), (1, 2, 0))\n self.images = tf.placeholder(tf.float32, [None, settings.image_size,\n settings.image_size, 3])\n if settings.model_type == 'normal':\n self.logits = self.build_network(self.images, num_outputs=self.\n output_size, alpha=settings.alpha_relu, training=training)\n if settings.model_type == 'fast':\n self.logits = self.build_fast_network(self.images, num_outputs=\n self.output_size, alpha=settings.alpha_relu, training=training)\n if training:\n self.batch = tf.Variable(0)\n self.labels = tf.placeholder(tf.float32, [None, self.cell_size,\n self.cell_size, 5 + self.num_classes])\n self.loss_layer(self.logits, self.labels)\n self.total_loss = tf.contrib.losses.get_total_loss()\n self.learning_rate = tf.train.exponential_decay(settings.\n learning_rate, self.batch * settings.batch_size, settings.\n decay_step, settings.decay_rate, True)\n self.optimizer = tf.train.GradientDescentOptimizer(self.\n learning_rate).minimize(self.total_loss, global_step=self.batch\n )\n\n def build_network(self, images, num_outputs, alpha, keep_prob=settings.\n dropout, training=True, scope='yolo'):\n with tf.variable_scope(scope):\n with slim.arg_scope([slim.conv2d, slim.fully_connected],\n activation_fn=leaky_relu(alpha), weights_initializer=tf.\n truncated_normal_initializer(0.0, 0.01),\n weights_regularizer=slim.l2_regularizer(0.0005)):\n net = tf.pad(images, np.array([[0, 0], [3, 3], [3, 3], [0, \n 0]]), name='pad_1')\n net = slim.conv2d(net, 64, 7, 2, padding='VALID', scope=\n 'conv_2')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_3')\n net = slim.conv2d(net, 192, 3, scope='conv_4')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_5')\n net = slim.conv2d(net, 128, 1, scope='conv_6')\n net = slim.conv2d(net, 256, 3, scope='conv_7')\n net = slim.conv2d(net, 256, 1, scope='conv_8')\n net = slim.conv2d(net, 512, 3, scope='conv_9')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_10')\n net = slim.conv2d(net, 256, 1, scope='conv_11')\n net = slim.conv2d(net, 512, 3, scope='conv_12')\n net = slim.conv2d(net, 256, 1, scope='conv_13')\n net = slim.conv2d(net, 512, 3, scope='conv_14')\n net = slim.conv2d(net, 256, 1, scope='conv_15')\n net = slim.conv2d(net, 512, 3, scope='conv_16')\n net = slim.conv2d(net, 256, 1, scope='conv_17')\n net = slim.conv2d(net, 512, 3, scope='conv_18')\n net = slim.conv2d(net, 512, 1, scope='conv_19')\n net = slim.conv2d(net, 1024, 3, scope='conv_20')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_21')\n net = slim.conv2d(net, 512, 1, scope='conv_22')\n net = slim.conv2d(net, 1024, 3, scope='conv_23')\n net = slim.conv2d(net, 512, 1, scope='conv_24')\n net = slim.conv2d(net, 1024, 3, scope='conv_25')\n net = slim.conv2d(net, 1024, 3, scope='conv_26')\n net = tf.pad(net, np.array([[0, 0], [1, 1], [1, 1], [0, 0]]\n ), name='pad_27')\n net = slim.conv2d(net, 1024, 3, 2, padding='VALID', scope=\n 'conv_28')\n net = slim.conv2d(net, 1024, 3, scope='conv_29')\n net = slim.conv2d(net, 1024, 3, scope='conv_30')\n net = tf.transpose(net, [0, 3, 1, 2], name='trans_31')\n net = slim.flatten(net, scope='flat_32')\n net = slim.fully_connected(net, 512, scope='fc_33')\n net = slim.fully_connected(net, 4096, scope='fc_34')\n net = slim.dropout(net, keep_prob=keep_prob, is_training=\n training, scope='dropout_35')\n net = slim.fully_connected(net, num_outputs, activation_fn=\n None, scope='fc_36')\n return net\n\n def build_fast_network(self, images, num_outputs, alpha, keep_prob=\n settings.dropout, training=True, scope='yolo'):\n with tf.variable_scope(scope):\n with slim.arg_scope([slim.conv2d, slim.fully_connected],\n activation_fn=leaky_relu(alpha), weights_initializer=tf.\n truncated_normal_initializer(0.0, 0.01),\n weights_regularizer=slim.l2_regularizer(0.0005)):\n net = tf.pad(images, np.array([[0, 0], [3, 3], [3, 3], [0, \n 0]]), name='pad_1')\n net = slim.conv2d(net, 64, 7, 2, padding='VALID', scope=\n 'conv_2')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_3')\n net = slim.conv2d(net, 192, 3, scope='conv_4')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_5')\n net = slim.conv2d(net, 128, 1, scope='conv_6')\n net = slim.conv2d(net, 256, 3, scope='conv_7')\n net = slim.conv2d(net, 512, 3, scope='conv_9')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_10')\n net = slim.conv2d(net, 256, 1, scope='conv_11')\n net = slim.conv2d(net, 512, 3, scope='conv_12')\n net = slim.conv2d(net, 1024, 3, scope='conv_20')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_21')\n net = slim.conv2d(net, 512, 1, scope='conv_22')\n net = slim.conv2d(net, 1024, 3, scope='conv_23')\n net = slim.conv2d(net, 1024, 3, scope='conv_26')\n net = tf.pad(net, np.array([[0, 0], [1, 1], [1, 1], [0, 0]]\n ), name='pad_27')\n net = slim.conv2d(net, 1024, 3, 2, padding='VALID', scope=\n 'conv_28')\n net = tf.transpose(net, [0, 3, 1, 2], name='trans_31')\n net = slim.flatten(net, scope='flat_32')\n net = slim.fully_connected(net, 512, scope='fc_33')\n net = slim.fully_connected(net, 4096, scope='fc_34')\n net = slim.dropout(net, keep_prob=keep_prob, is_training=\n training, scope='dropout_35')\n net = slim.fully_connected(net, num_outputs, activation_fn=\n None, scope='fc_36')\n return net\n\n def calc_iou(self, boxes1, boxes2, scope='iou'):\n with tf.variable_scope(scope):\n boxes1 = tf.stack([boxes1[:, :, :, :, 0] - boxes1[:, :, :, :, 2\n ] / 2.0, boxes1[:, :, :, :, 1] - boxes1[:, :, :, :, 3] / \n 2.0, boxes1[:, :, :, :, 0] + boxes1[:, :, :, :, 2] / 2.0, \n boxes1[:, :, :, :, 1] + boxes1[:, :, :, :, 3] / 2.0])\n boxes1 = tf.transpose(boxes1, [1, 2, 3, 4, 0])\n boxes2 = tf.stack([boxes2[:, :, :, :, 0] - boxes2[:, :, :, :, 2\n ] / 2.0, boxes2[:, :, :, :, 1] - boxes2[:, :, :, :, 3] / \n 2.0, boxes2[:, :, :, :, 0] + boxes2[:, :, :, :, 2] / 2.0, \n boxes2[:, :, :, :, 1] + boxes2[:, :, :, :, 3] / 2.0])\n boxes2 = tf.transpose(boxes2, [1, 2, 3, 4, 0])\n lu = tf.maximum(boxes1[:, :, :, :, :2], boxes2[:, :, :, :, :2])\n rd = tf.minimum(boxes1[:, :, :, :, 2:], boxes2[:, :, :, :, 2:])\n intersection = tf.maximum(0.0, rd - lu)\n inter_square = intersection[:, :, :, :, 0] * intersection[:, :,\n :, :, 1]\n square1 = (boxes1[:, :, :, :, 2] - boxes1[:, :, :, :, 0]) * (boxes1\n [:, :, :, :, 3] - boxes1[:, :, :, :, 1])\n square2 = (boxes2[:, :, :, :, 2] - boxes2[:, :, :, :, 0]) * (boxes2\n [:, :, :, :, 3] - boxes2[:, :, :, :, 1])\n union_square = tf.maximum(square1 + square2 - inter_square, 1e-10)\n return tf.clip_by_value(inter_square / union_square, 0.0, 1.0)\n\n def loss_layer(self, predicts, labels, scope='loss_layer'):\n with tf.variable_scope(scope):\n predict_classes = tf.reshape(predicts[:, :self.boundary1], [\n settings.batch_size, self.cell_size, self.cell_size, self.\n num_classes])\n predict_scales = tf.reshape(predicts[:, self.boundary1:self.\n boundary2], [settings.batch_size, self.cell_size, self.\n cell_size, self.boxes_per_cell])\n predict_boxes = tf.reshape(predicts[:, self.boundary2:], [\n settings.batch_size, self.cell_size, self.cell_size, self.\n boxes_per_cell, 4])\n response = tf.reshape(labels[:, :, :, 0], [settings.batch_size,\n self.cell_size, self.cell_size, 1])\n boxes = tf.reshape(labels[:, :, :, 1:5], [settings.batch_size,\n self.cell_size, self.cell_size, 1, 4])\n boxes = tf.tile(boxes, [1, 1, 1, self.boxes_per_cell, 1]\n ) / self.image_size\n classes = labels[:, :, :, 5:]\n offset = tf.constant(self.offset, dtype=tf.float32)\n offset = tf.reshape(offset, [1, self.cell_size, self.cell_size,\n self.boxes_per_cell])\n offset = tf.tile(offset, [settings.batch_size, 1, 1, 1])\n predict_boxes_tran = tf.stack([(predict_boxes[:, :, :, :, 0] +\n offset) / self.cell_size, (predict_boxes[:, :, :, :, 1] +\n tf.transpose(offset, (0, 2, 1, 3))) / self.cell_size, tf.\n square(predict_boxes[:, :, :, :, 2]), tf.square(\n predict_boxes[:, :, :, :, 3])])\n predict_boxes_tran = tf.transpose(predict_boxes_tran, [1, 2, 3,\n 4, 0])\n iou_predict_truth = self.calc_iou(predict_boxes_tran, boxes)\n object_mask = tf.reduce_max(iou_predict_truth, 3, keep_dims=True)\n object_mask = tf.cast(iou_predict_truth >= object_mask, tf.float32\n ) * response\n noobject_mask = tf.ones_like(object_mask, dtype=tf.float32\n ) - object_mask\n boxes_tran = tf.stack([boxes[:, :, :, :, 0] * self.cell_size -\n offset, boxes[:, :, :, :, 1] * self.cell_size - tf.\n transpose(offset, (0, 2, 1, 3)), tf.sqrt(boxes[:, :, :, :, \n 2]), tf.sqrt(boxes[:, :, :, :, 3])])\n boxes_tran = tf.transpose(boxes_tran, [1, 2, 3, 4, 0])\n class_delta = response * (predict_classes - classes)\n class_loss = tf.reduce_mean(tf.reduce_sum(tf.square(class_delta\n ), axis=[1, 2, 3]), name='class_loss') * self.class_scale\n object_delta = object_mask * (predict_scales - iou_predict_truth)\n object_loss = tf.reduce_mean(tf.reduce_sum(tf.square(\n object_delta), axis=[1, 2, 3]), name='object_loss'\n ) * self.object_scale\n noobject_delta = noobject_mask * predict_scales\n noobject_loss = tf.reduce_mean(tf.reduce_sum(tf.square(\n noobject_delta), axis=[1, 2, 3]), name='noobject_loss'\n ) * self.no_object_scale\n coord_mask = tf.expand_dims(object_mask, 4)\n boxes_delta = coord_mask * (predict_boxes - boxes_tran)\n coord_loss = tf.reduce_mean(tf.reduce_sum(tf.square(boxes_delta\n ), axis=[1, 2, 3, 4]), name='coord_loss') * self.coord_scale\n tf.contrib.losses.add_loss(class_loss)\n tf.contrib.losses.add_loss(object_loss)\n tf.contrib.losses.add_loss(noobject_loss)\n tf.contrib.losses.add_loss(coord_loss)\n\n\ndef leaky_relu(alpha):\n\n def op(inputs):\n return tf.maximum(alpha * inputs, inputs)\n return op\n",
"<import token>\n<assignment token>\n\n\nclass Model:\n\n def __init__(self, training=True):\n self.classes = settings.classes_name\n self.num_classes = len(settings.classes_name)\n self.image_size = settings.image_size\n self.cell_size = settings.cell_size\n self.boxes_per_cell = settings.box_per_cell\n self.output_size = self.cell_size * self.cell_size * (self.\n num_classes + self.boxes_per_cell * 5)\n self.scale = 1.0 * self.image_size / self.cell_size\n self.boundary1 = self.cell_size * self.cell_size * self.num_classes\n self.boundary2 = (self.boundary1 + self.cell_size * self.cell_size *\n self.boxes_per_cell)\n self.object_scale = settings.object_scale\n self.no_object_scale = settings.no_object_scale\n self.class_scale = settings.class_scale\n self.coord_scale = settings.coordinate_scale\n self.offset = np.transpose(np.reshape(np.array([np.arange(self.\n cell_size)] * self.cell_size * self.boxes_per_cell), (self.\n boxes_per_cell, self.cell_size, self.cell_size)), (1, 2, 0))\n self.images = tf.placeholder(tf.float32, [None, settings.image_size,\n settings.image_size, 3])\n if settings.model_type == 'normal':\n self.logits = self.build_network(self.images, num_outputs=self.\n output_size, alpha=settings.alpha_relu, training=training)\n if settings.model_type == 'fast':\n self.logits = self.build_fast_network(self.images, num_outputs=\n self.output_size, alpha=settings.alpha_relu, training=training)\n if training:\n self.batch = tf.Variable(0)\n self.labels = tf.placeholder(tf.float32, [None, self.cell_size,\n self.cell_size, 5 + self.num_classes])\n self.loss_layer(self.logits, self.labels)\n self.total_loss = tf.contrib.losses.get_total_loss()\n self.learning_rate = tf.train.exponential_decay(settings.\n learning_rate, self.batch * settings.batch_size, settings.\n decay_step, settings.decay_rate, True)\n self.optimizer = tf.train.GradientDescentOptimizer(self.\n learning_rate).minimize(self.total_loss, global_step=self.batch\n )\n\n def build_network(self, images, num_outputs, alpha, keep_prob=settings.\n dropout, training=True, scope='yolo'):\n with tf.variable_scope(scope):\n with slim.arg_scope([slim.conv2d, slim.fully_connected],\n activation_fn=leaky_relu(alpha), weights_initializer=tf.\n truncated_normal_initializer(0.0, 0.01),\n weights_regularizer=slim.l2_regularizer(0.0005)):\n net = tf.pad(images, np.array([[0, 0], [3, 3], [3, 3], [0, \n 0]]), name='pad_1')\n net = slim.conv2d(net, 64, 7, 2, padding='VALID', scope=\n 'conv_2')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_3')\n net = slim.conv2d(net, 192, 3, scope='conv_4')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_5')\n net = slim.conv2d(net, 128, 1, scope='conv_6')\n net = slim.conv2d(net, 256, 3, scope='conv_7')\n net = slim.conv2d(net, 256, 1, scope='conv_8')\n net = slim.conv2d(net, 512, 3, scope='conv_9')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_10')\n net = slim.conv2d(net, 256, 1, scope='conv_11')\n net = slim.conv2d(net, 512, 3, scope='conv_12')\n net = slim.conv2d(net, 256, 1, scope='conv_13')\n net = slim.conv2d(net, 512, 3, scope='conv_14')\n net = slim.conv2d(net, 256, 1, scope='conv_15')\n net = slim.conv2d(net, 512, 3, scope='conv_16')\n net = slim.conv2d(net, 256, 1, scope='conv_17')\n net = slim.conv2d(net, 512, 3, scope='conv_18')\n net = slim.conv2d(net, 512, 1, scope='conv_19')\n net = slim.conv2d(net, 1024, 3, scope='conv_20')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_21')\n net = slim.conv2d(net, 512, 1, scope='conv_22')\n net = slim.conv2d(net, 1024, 3, scope='conv_23')\n net = slim.conv2d(net, 512, 1, scope='conv_24')\n net = slim.conv2d(net, 1024, 3, scope='conv_25')\n net = slim.conv2d(net, 1024, 3, scope='conv_26')\n net = tf.pad(net, np.array([[0, 0], [1, 1], [1, 1], [0, 0]]\n ), name='pad_27')\n net = slim.conv2d(net, 1024, 3, 2, padding='VALID', scope=\n 'conv_28')\n net = slim.conv2d(net, 1024, 3, scope='conv_29')\n net = slim.conv2d(net, 1024, 3, scope='conv_30')\n net = tf.transpose(net, [0, 3, 1, 2], name='trans_31')\n net = slim.flatten(net, scope='flat_32')\n net = slim.fully_connected(net, 512, scope='fc_33')\n net = slim.fully_connected(net, 4096, scope='fc_34')\n net = slim.dropout(net, keep_prob=keep_prob, is_training=\n training, scope='dropout_35')\n net = slim.fully_connected(net, num_outputs, activation_fn=\n None, scope='fc_36')\n return net\n\n def build_fast_network(self, images, num_outputs, alpha, keep_prob=\n settings.dropout, training=True, scope='yolo'):\n with tf.variable_scope(scope):\n with slim.arg_scope([slim.conv2d, slim.fully_connected],\n activation_fn=leaky_relu(alpha), weights_initializer=tf.\n truncated_normal_initializer(0.0, 0.01),\n weights_regularizer=slim.l2_regularizer(0.0005)):\n net = tf.pad(images, np.array([[0, 0], [3, 3], [3, 3], [0, \n 0]]), name='pad_1')\n net = slim.conv2d(net, 64, 7, 2, padding='VALID', scope=\n 'conv_2')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_3')\n net = slim.conv2d(net, 192, 3, scope='conv_4')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_5')\n net = slim.conv2d(net, 128, 1, scope='conv_6')\n net = slim.conv2d(net, 256, 3, scope='conv_7')\n net = slim.conv2d(net, 512, 3, scope='conv_9')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_10')\n net = slim.conv2d(net, 256, 1, scope='conv_11')\n net = slim.conv2d(net, 512, 3, scope='conv_12')\n net = slim.conv2d(net, 1024, 3, scope='conv_20')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_21')\n net = slim.conv2d(net, 512, 1, scope='conv_22')\n net = slim.conv2d(net, 1024, 3, scope='conv_23')\n net = slim.conv2d(net, 1024, 3, scope='conv_26')\n net = tf.pad(net, np.array([[0, 0], [1, 1], [1, 1], [0, 0]]\n ), name='pad_27')\n net = slim.conv2d(net, 1024, 3, 2, padding='VALID', scope=\n 'conv_28')\n net = tf.transpose(net, [0, 3, 1, 2], name='trans_31')\n net = slim.flatten(net, scope='flat_32')\n net = slim.fully_connected(net, 512, scope='fc_33')\n net = slim.fully_connected(net, 4096, scope='fc_34')\n net = slim.dropout(net, keep_prob=keep_prob, is_training=\n training, scope='dropout_35')\n net = slim.fully_connected(net, num_outputs, activation_fn=\n None, scope='fc_36')\n return net\n\n def calc_iou(self, boxes1, boxes2, scope='iou'):\n with tf.variable_scope(scope):\n boxes1 = tf.stack([boxes1[:, :, :, :, 0] - boxes1[:, :, :, :, 2\n ] / 2.0, boxes1[:, :, :, :, 1] - boxes1[:, :, :, :, 3] / \n 2.0, boxes1[:, :, :, :, 0] + boxes1[:, :, :, :, 2] / 2.0, \n boxes1[:, :, :, :, 1] + boxes1[:, :, :, :, 3] / 2.0])\n boxes1 = tf.transpose(boxes1, [1, 2, 3, 4, 0])\n boxes2 = tf.stack([boxes2[:, :, :, :, 0] - boxes2[:, :, :, :, 2\n ] / 2.0, boxes2[:, :, :, :, 1] - boxes2[:, :, :, :, 3] / \n 2.0, boxes2[:, :, :, :, 0] + boxes2[:, :, :, :, 2] / 2.0, \n boxes2[:, :, :, :, 1] + boxes2[:, :, :, :, 3] / 2.0])\n boxes2 = tf.transpose(boxes2, [1, 2, 3, 4, 0])\n lu = tf.maximum(boxes1[:, :, :, :, :2], boxes2[:, :, :, :, :2])\n rd = tf.minimum(boxes1[:, :, :, :, 2:], boxes2[:, :, :, :, 2:])\n intersection = tf.maximum(0.0, rd - lu)\n inter_square = intersection[:, :, :, :, 0] * intersection[:, :,\n :, :, 1]\n square1 = (boxes1[:, :, :, :, 2] - boxes1[:, :, :, :, 0]) * (boxes1\n [:, :, :, :, 3] - boxes1[:, :, :, :, 1])\n square2 = (boxes2[:, :, :, :, 2] - boxes2[:, :, :, :, 0]) * (boxes2\n [:, :, :, :, 3] - boxes2[:, :, :, :, 1])\n union_square = tf.maximum(square1 + square2 - inter_square, 1e-10)\n return tf.clip_by_value(inter_square / union_square, 0.0, 1.0)\n\n def loss_layer(self, predicts, labels, scope='loss_layer'):\n with tf.variable_scope(scope):\n predict_classes = tf.reshape(predicts[:, :self.boundary1], [\n settings.batch_size, self.cell_size, self.cell_size, self.\n num_classes])\n predict_scales = tf.reshape(predicts[:, self.boundary1:self.\n boundary2], [settings.batch_size, self.cell_size, self.\n cell_size, self.boxes_per_cell])\n predict_boxes = tf.reshape(predicts[:, self.boundary2:], [\n settings.batch_size, self.cell_size, self.cell_size, self.\n boxes_per_cell, 4])\n response = tf.reshape(labels[:, :, :, 0], [settings.batch_size,\n self.cell_size, self.cell_size, 1])\n boxes = tf.reshape(labels[:, :, :, 1:5], [settings.batch_size,\n self.cell_size, self.cell_size, 1, 4])\n boxes = tf.tile(boxes, [1, 1, 1, self.boxes_per_cell, 1]\n ) / self.image_size\n classes = labels[:, :, :, 5:]\n offset = tf.constant(self.offset, dtype=tf.float32)\n offset = tf.reshape(offset, [1, self.cell_size, self.cell_size,\n self.boxes_per_cell])\n offset = tf.tile(offset, [settings.batch_size, 1, 1, 1])\n predict_boxes_tran = tf.stack([(predict_boxes[:, :, :, :, 0] +\n offset) / self.cell_size, (predict_boxes[:, :, :, :, 1] +\n tf.transpose(offset, (0, 2, 1, 3))) / self.cell_size, tf.\n square(predict_boxes[:, :, :, :, 2]), tf.square(\n predict_boxes[:, :, :, :, 3])])\n predict_boxes_tran = tf.transpose(predict_boxes_tran, [1, 2, 3,\n 4, 0])\n iou_predict_truth = self.calc_iou(predict_boxes_tran, boxes)\n object_mask = tf.reduce_max(iou_predict_truth, 3, keep_dims=True)\n object_mask = tf.cast(iou_predict_truth >= object_mask, tf.float32\n ) * response\n noobject_mask = tf.ones_like(object_mask, dtype=tf.float32\n ) - object_mask\n boxes_tran = tf.stack([boxes[:, :, :, :, 0] * self.cell_size -\n offset, boxes[:, :, :, :, 1] * self.cell_size - tf.\n transpose(offset, (0, 2, 1, 3)), tf.sqrt(boxes[:, :, :, :, \n 2]), tf.sqrt(boxes[:, :, :, :, 3])])\n boxes_tran = tf.transpose(boxes_tran, [1, 2, 3, 4, 0])\n class_delta = response * (predict_classes - classes)\n class_loss = tf.reduce_mean(tf.reduce_sum(tf.square(class_delta\n ), axis=[1, 2, 3]), name='class_loss') * self.class_scale\n object_delta = object_mask * (predict_scales - iou_predict_truth)\n object_loss = tf.reduce_mean(tf.reduce_sum(tf.square(\n object_delta), axis=[1, 2, 3]), name='object_loss'\n ) * self.object_scale\n noobject_delta = noobject_mask * predict_scales\n noobject_loss = tf.reduce_mean(tf.reduce_sum(tf.square(\n noobject_delta), axis=[1, 2, 3]), name='noobject_loss'\n ) * self.no_object_scale\n coord_mask = tf.expand_dims(object_mask, 4)\n boxes_delta = coord_mask * (predict_boxes - boxes_tran)\n coord_loss = tf.reduce_mean(tf.reduce_sum(tf.square(boxes_delta\n ), axis=[1, 2, 3, 4]), name='coord_loss') * self.coord_scale\n tf.contrib.losses.add_loss(class_loss)\n tf.contrib.losses.add_loss(object_loss)\n tf.contrib.losses.add_loss(noobject_loss)\n tf.contrib.losses.add_loss(coord_loss)\n\n\n<function token>\n",
"<import token>\n<assignment token>\n\n\nclass Model:\n <function token>\n\n def build_network(self, images, num_outputs, alpha, keep_prob=settings.\n dropout, training=True, scope='yolo'):\n with tf.variable_scope(scope):\n with slim.arg_scope([slim.conv2d, slim.fully_connected],\n activation_fn=leaky_relu(alpha), weights_initializer=tf.\n truncated_normal_initializer(0.0, 0.01),\n weights_regularizer=slim.l2_regularizer(0.0005)):\n net = tf.pad(images, np.array([[0, 0], [3, 3], [3, 3], [0, \n 0]]), name='pad_1')\n net = slim.conv2d(net, 64, 7, 2, padding='VALID', scope=\n 'conv_2')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_3')\n net = slim.conv2d(net, 192, 3, scope='conv_4')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_5')\n net = slim.conv2d(net, 128, 1, scope='conv_6')\n net = slim.conv2d(net, 256, 3, scope='conv_7')\n net = slim.conv2d(net, 256, 1, scope='conv_8')\n net = slim.conv2d(net, 512, 3, scope='conv_9')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_10')\n net = slim.conv2d(net, 256, 1, scope='conv_11')\n net = slim.conv2d(net, 512, 3, scope='conv_12')\n net = slim.conv2d(net, 256, 1, scope='conv_13')\n net = slim.conv2d(net, 512, 3, scope='conv_14')\n net = slim.conv2d(net, 256, 1, scope='conv_15')\n net = slim.conv2d(net, 512, 3, scope='conv_16')\n net = slim.conv2d(net, 256, 1, scope='conv_17')\n net = slim.conv2d(net, 512, 3, scope='conv_18')\n net = slim.conv2d(net, 512, 1, scope='conv_19')\n net = slim.conv2d(net, 1024, 3, scope='conv_20')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_21')\n net = slim.conv2d(net, 512, 1, scope='conv_22')\n net = slim.conv2d(net, 1024, 3, scope='conv_23')\n net = slim.conv2d(net, 512, 1, scope='conv_24')\n net = slim.conv2d(net, 1024, 3, scope='conv_25')\n net = slim.conv2d(net, 1024, 3, scope='conv_26')\n net = tf.pad(net, np.array([[0, 0], [1, 1], [1, 1], [0, 0]]\n ), name='pad_27')\n net = slim.conv2d(net, 1024, 3, 2, padding='VALID', scope=\n 'conv_28')\n net = slim.conv2d(net, 1024, 3, scope='conv_29')\n net = slim.conv2d(net, 1024, 3, scope='conv_30')\n net = tf.transpose(net, [0, 3, 1, 2], name='trans_31')\n net = slim.flatten(net, scope='flat_32')\n net = slim.fully_connected(net, 512, scope='fc_33')\n net = slim.fully_connected(net, 4096, scope='fc_34')\n net = slim.dropout(net, keep_prob=keep_prob, is_training=\n training, scope='dropout_35')\n net = slim.fully_connected(net, num_outputs, activation_fn=\n None, scope='fc_36')\n return net\n\n def build_fast_network(self, images, num_outputs, alpha, keep_prob=\n settings.dropout, training=True, scope='yolo'):\n with tf.variable_scope(scope):\n with slim.arg_scope([slim.conv2d, slim.fully_connected],\n activation_fn=leaky_relu(alpha), weights_initializer=tf.\n truncated_normal_initializer(0.0, 0.01),\n weights_regularizer=slim.l2_regularizer(0.0005)):\n net = tf.pad(images, np.array([[0, 0], [3, 3], [3, 3], [0, \n 0]]), name='pad_1')\n net = slim.conv2d(net, 64, 7, 2, padding='VALID', scope=\n 'conv_2')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_3')\n net = slim.conv2d(net, 192, 3, scope='conv_4')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_5')\n net = slim.conv2d(net, 128, 1, scope='conv_6')\n net = slim.conv2d(net, 256, 3, scope='conv_7')\n net = slim.conv2d(net, 512, 3, scope='conv_9')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_10')\n net = slim.conv2d(net, 256, 1, scope='conv_11')\n net = slim.conv2d(net, 512, 3, scope='conv_12')\n net = slim.conv2d(net, 1024, 3, scope='conv_20')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_21')\n net = slim.conv2d(net, 512, 1, scope='conv_22')\n net = slim.conv2d(net, 1024, 3, scope='conv_23')\n net = slim.conv2d(net, 1024, 3, scope='conv_26')\n net = tf.pad(net, np.array([[0, 0], [1, 1], [1, 1], [0, 0]]\n ), name='pad_27')\n net = slim.conv2d(net, 1024, 3, 2, padding='VALID', scope=\n 'conv_28')\n net = tf.transpose(net, [0, 3, 1, 2], name='trans_31')\n net = slim.flatten(net, scope='flat_32')\n net = slim.fully_connected(net, 512, scope='fc_33')\n net = slim.fully_connected(net, 4096, scope='fc_34')\n net = slim.dropout(net, keep_prob=keep_prob, is_training=\n training, scope='dropout_35')\n net = slim.fully_connected(net, num_outputs, activation_fn=\n None, scope='fc_36')\n return net\n\n def calc_iou(self, boxes1, boxes2, scope='iou'):\n with tf.variable_scope(scope):\n boxes1 = tf.stack([boxes1[:, :, :, :, 0] - boxes1[:, :, :, :, 2\n ] / 2.0, boxes1[:, :, :, :, 1] - boxes1[:, :, :, :, 3] / \n 2.0, boxes1[:, :, :, :, 0] + boxes1[:, :, :, :, 2] / 2.0, \n boxes1[:, :, :, :, 1] + boxes1[:, :, :, :, 3] / 2.0])\n boxes1 = tf.transpose(boxes1, [1, 2, 3, 4, 0])\n boxes2 = tf.stack([boxes2[:, :, :, :, 0] - boxes2[:, :, :, :, 2\n ] / 2.0, boxes2[:, :, :, :, 1] - boxes2[:, :, :, :, 3] / \n 2.0, boxes2[:, :, :, :, 0] + boxes2[:, :, :, :, 2] / 2.0, \n boxes2[:, :, :, :, 1] + boxes2[:, :, :, :, 3] / 2.0])\n boxes2 = tf.transpose(boxes2, [1, 2, 3, 4, 0])\n lu = tf.maximum(boxes1[:, :, :, :, :2], boxes2[:, :, :, :, :2])\n rd = tf.minimum(boxes1[:, :, :, :, 2:], boxes2[:, :, :, :, 2:])\n intersection = tf.maximum(0.0, rd - lu)\n inter_square = intersection[:, :, :, :, 0] * intersection[:, :,\n :, :, 1]\n square1 = (boxes1[:, :, :, :, 2] - boxes1[:, :, :, :, 0]) * (boxes1\n [:, :, :, :, 3] - boxes1[:, :, :, :, 1])\n square2 = (boxes2[:, :, :, :, 2] - boxes2[:, :, :, :, 0]) * (boxes2\n [:, :, :, :, 3] - boxes2[:, :, :, :, 1])\n union_square = tf.maximum(square1 + square2 - inter_square, 1e-10)\n return tf.clip_by_value(inter_square / union_square, 0.0, 1.0)\n\n def loss_layer(self, predicts, labels, scope='loss_layer'):\n with tf.variable_scope(scope):\n predict_classes = tf.reshape(predicts[:, :self.boundary1], [\n settings.batch_size, self.cell_size, self.cell_size, self.\n num_classes])\n predict_scales = tf.reshape(predicts[:, self.boundary1:self.\n boundary2], [settings.batch_size, self.cell_size, self.\n cell_size, self.boxes_per_cell])\n predict_boxes = tf.reshape(predicts[:, self.boundary2:], [\n settings.batch_size, self.cell_size, self.cell_size, self.\n boxes_per_cell, 4])\n response = tf.reshape(labels[:, :, :, 0], [settings.batch_size,\n self.cell_size, self.cell_size, 1])\n boxes = tf.reshape(labels[:, :, :, 1:5], [settings.batch_size,\n self.cell_size, self.cell_size, 1, 4])\n boxes = tf.tile(boxes, [1, 1, 1, self.boxes_per_cell, 1]\n ) / self.image_size\n classes = labels[:, :, :, 5:]\n offset = tf.constant(self.offset, dtype=tf.float32)\n offset = tf.reshape(offset, [1, self.cell_size, self.cell_size,\n self.boxes_per_cell])\n offset = tf.tile(offset, [settings.batch_size, 1, 1, 1])\n predict_boxes_tran = tf.stack([(predict_boxes[:, :, :, :, 0] +\n offset) / self.cell_size, (predict_boxes[:, :, :, :, 1] +\n tf.transpose(offset, (0, 2, 1, 3))) / self.cell_size, tf.\n square(predict_boxes[:, :, :, :, 2]), tf.square(\n predict_boxes[:, :, :, :, 3])])\n predict_boxes_tran = tf.transpose(predict_boxes_tran, [1, 2, 3,\n 4, 0])\n iou_predict_truth = self.calc_iou(predict_boxes_tran, boxes)\n object_mask = tf.reduce_max(iou_predict_truth, 3, keep_dims=True)\n object_mask = tf.cast(iou_predict_truth >= object_mask, tf.float32\n ) * response\n noobject_mask = tf.ones_like(object_mask, dtype=tf.float32\n ) - object_mask\n boxes_tran = tf.stack([boxes[:, :, :, :, 0] * self.cell_size -\n offset, boxes[:, :, :, :, 1] * self.cell_size - tf.\n transpose(offset, (0, 2, 1, 3)), tf.sqrt(boxes[:, :, :, :, \n 2]), tf.sqrt(boxes[:, :, :, :, 3])])\n boxes_tran = tf.transpose(boxes_tran, [1, 2, 3, 4, 0])\n class_delta = response * (predict_classes - classes)\n class_loss = tf.reduce_mean(tf.reduce_sum(tf.square(class_delta\n ), axis=[1, 2, 3]), name='class_loss') * self.class_scale\n object_delta = object_mask * (predict_scales - iou_predict_truth)\n object_loss = tf.reduce_mean(tf.reduce_sum(tf.square(\n object_delta), axis=[1, 2, 3]), name='object_loss'\n ) * self.object_scale\n noobject_delta = noobject_mask * predict_scales\n noobject_loss = tf.reduce_mean(tf.reduce_sum(tf.square(\n noobject_delta), axis=[1, 2, 3]), name='noobject_loss'\n ) * self.no_object_scale\n coord_mask = tf.expand_dims(object_mask, 4)\n boxes_delta = coord_mask * (predict_boxes - boxes_tran)\n coord_loss = tf.reduce_mean(tf.reduce_sum(tf.square(boxes_delta\n ), axis=[1, 2, 3, 4]), name='coord_loss') * self.coord_scale\n tf.contrib.losses.add_loss(class_loss)\n tf.contrib.losses.add_loss(object_loss)\n tf.contrib.losses.add_loss(noobject_loss)\n tf.contrib.losses.add_loss(coord_loss)\n\n\n<function token>\n",
"<import token>\n<assignment token>\n\n\nclass Model:\n <function token>\n\n def build_network(self, images, num_outputs, alpha, keep_prob=settings.\n dropout, training=True, scope='yolo'):\n with tf.variable_scope(scope):\n with slim.arg_scope([slim.conv2d, slim.fully_connected],\n activation_fn=leaky_relu(alpha), weights_initializer=tf.\n truncated_normal_initializer(0.0, 0.01),\n weights_regularizer=slim.l2_regularizer(0.0005)):\n net = tf.pad(images, np.array([[0, 0], [3, 3], [3, 3], [0, \n 0]]), name='pad_1')\n net = slim.conv2d(net, 64, 7, 2, padding='VALID', scope=\n 'conv_2')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_3')\n net = slim.conv2d(net, 192, 3, scope='conv_4')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_5')\n net = slim.conv2d(net, 128, 1, scope='conv_6')\n net = slim.conv2d(net, 256, 3, scope='conv_7')\n net = slim.conv2d(net, 256, 1, scope='conv_8')\n net = slim.conv2d(net, 512, 3, scope='conv_9')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_10')\n net = slim.conv2d(net, 256, 1, scope='conv_11')\n net = slim.conv2d(net, 512, 3, scope='conv_12')\n net = slim.conv2d(net, 256, 1, scope='conv_13')\n net = slim.conv2d(net, 512, 3, scope='conv_14')\n net = slim.conv2d(net, 256, 1, scope='conv_15')\n net = slim.conv2d(net, 512, 3, scope='conv_16')\n net = slim.conv2d(net, 256, 1, scope='conv_17')\n net = slim.conv2d(net, 512, 3, scope='conv_18')\n net = slim.conv2d(net, 512, 1, scope='conv_19')\n net = slim.conv2d(net, 1024, 3, scope='conv_20')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_21')\n net = slim.conv2d(net, 512, 1, scope='conv_22')\n net = slim.conv2d(net, 1024, 3, scope='conv_23')\n net = slim.conv2d(net, 512, 1, scope='conv_24')\n net = slim.conv2d(net, 1024, 3, scope='conv_25')\n net = slim.conv2d(net, 1024, 3, scope='conv_26')\n net = tf.pad(net, np.array([[0, 0], [1, 1], [1, 1], [0, 0]]\n ), name='pad_27')\n net = slim.conv2d(net, 1024, 3, 2, padding='VALID', scope=\n 'conv_28')\n net = slim.conv2d(net, 1024, 3, scope='conv_29')\n net = slim.conv2d(net, 1024, 3, scope='conv_30')\n net = tf.transpose(net, [0, 3, 1, 2], name='trans_31')\n net = slim.flatten(net, scope='flat_32')\n net = slim.fully_connected(net, 512, scope='fc_33')\n net = slim.fully_connected(net, 4096, scope='fc_34')\n net = slim.dropout(net, keep_prob=keep_prob, is_training=\n training, scope='dropout_35')\n net = slim.fully_connected(net, num_outputs, activation_fn=\n None, scope='fc_36')\n return net\n\n def build_fast_network(self, images, num_outputs, alpha, keep_prob=\n settings.dropout, training=True, scope='yolo'):\n with tf.variable_scope(scope):\n with slim.arg_scope([slim.conv2d, slim.fully_connected],\n activation_fn=leaky_relu(alpha), weights_initializer=tf.\n truncated_normal_initializer(0.0, 0.01),\n weights_regularizer=slim.l2_regularizer(0.0005)):\n net = tf.pad(images, np.array([[0, 0], [3, 3], [3, 3], [0, \n 0]]), name='pad_1')\n net = slim.conv2d(net, 64, 7, 2, padding='VALID', scope=\n 'conv_2')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_3')\n net = slim.conv2d(net, 192, 3, scope='conv_4')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_5')\n net = slim.conv2d(net, 128, 1, scope='conv_6')\n net = slim.conv2d(net, 256, 3, scope='conv_7')\n net = slim.conv2d(net, 512, 3, scope='conv_9')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_10')\n net = slim.conv2d(net, 256, 1, scope='conv_11')\n net = slim.conv2d(net, 512, 3, scope='conv_12')\n net = slim.conv2d(net, 1024, 3, scope='conv_20')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_21')\n net = slim.conv2d(net, 512, 1, scope='conv_22')\n net = slim.conv2d(net, 1024, 3, scope='conv_23')\n net = slim.conv2d(net, 1024, 3, scope='conv_26')\n net = tf.pad(net, np.array([[0, 0], [1, 1], [1, 1], [0, 0]]\n ), name='pad_27')\n net = slim.conv2d(net, 1024, 3, 2, padding='VALID', scope=\n 'conv_28')\n net = tf.transpose(net, [0, 3, 1, 2], name='trans_31')\n net = slim.flatten(net, scope='flat_32')\n net = slim.fully_connected(net, 512, scope='fc_33')\n net = slim.fully_connected(net, 4096, scope='fc_34')\n net = slim.dropout(net, keep_prob=keep_prob, is_training=\n training, scope='dropout_35')\n net = slim.fully_connected(net, num_outputs, activation_fn=\n None, scope='fc_36')\n return net\n\n def calc_iou(self, boxes1, boxes2, scope='iou'):\n with tf.variable_scope(scope):\n boxes1 = tf.stack([boxes1[:, :, :, :, 0] - boxes1[:, :, :, :, 2\n ] / 2.0, boxes1[:, :, :, :, 1] - boxes1[:, :, :, :, 3] / \n 2.0, boxes1[:, :, :, :, 0] + boxes1[:, :, :, :, 2] / 2.0, \n boxes1[:, :, :, :, 1] + boxes1[:, :, :, :, 3] / 2.0])\n boxes1 = tf.transpose(boxes1, [1, 2, 3, 4, 0])\n boxes2 = tf.stack([boxes2[:, :, :, :, 0] - boxes2[:, :, :, :, 2\n ] / 2.0, boxes2[:, :, :, :, 1] - boxes2[:, :, :, :, 3] / \n 2.0, boxes2[:, :, :, :, 0] + boxes2[:, :, :, :, 2] / 2.0, \n boxes2[:, :, :, :, 1] + boxes2[:, :, :, :, 3] / 2.0])\n boxes2 = tf.transpose(boxes2, [1, 2, 3, 4, 0])\n lu = tf.maximum(boxes1[:, :, :, :, :2], boxes2[:, :, :, :, :2])\n rd = tf.minimum(boxes1[:, :, :, :, 2:], boxes2[:, :, :, :, 2:])\n intersection = tf.maximum(0.0, rd - lu)\n inter_square = intersection[:, :, :, :, 0] * intersection[:, :,\n :, :, 1]\n square1 = (boxes1[:, :, :, :, 2] - boxes1[:, :, :, :, 0]) * (boxes1\n [:, :, :, :, 3] - boxes1[:, :, :, :, 1])\n square2 = (boxes2[:, :, :, :, 2] - boxes2[:, :, :, :, 0]) * (boxes2\n [:, :, :, :, 3] - boxes2[:, :, :, :, 1])\n union_square = tf.maximum(square1 + square2 - inter_square, 1e-10)\n return tf.clip_by_value(inter_square / union_square, 0.0, 1.0)\n <function token>\n\n\n<function token>\n",
"<import token>\n<assignment token>\n\n\nclass Model:\n <function token>\n\n def build_network(self, images, num_outputs, alpha, keep_prob=settings.\n dropout, training=True, scope='yolo'):\n with tf.variable_scope(scope):\n with slim.arg_scope([slim.conv2d, slim.fully_connected],\n activation_fn=leaky_relu(alpha), weights_initializer=tf.\n truncated_normal_initializer(0.0, 0.01),\n weights_regularizer=slim.l2_regularizer(0.0005)):\n net = tf.pad(images, np.array([[0, 0], [3, 3], [3, 3], [0, \n 0]]), name='pad_1')\n net = slim.conv2d(net, 64, 7, 2, padding='VALID', scope=\n 'conv_2')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_3')\n net = slim.conv2d(net, 192, 3, scope='conv_4')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_5')\n net = slim.conv2d(net, 128, 1, scope='conv_6')\n net = slim.conv2d(net, 256, 3, scope='conv_7')\n net = slim.conv2d(net, 256, 1, scope='conv_8')\n net = slim.conv2d(net, 512, 3, scope='conv_9')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_10')\n net = slim.conv2d(net, 256, 1, scope='conv_11')\n net = slim.conv2d(net, 512, 3, scope='conv_12')\n net = slim.conv2d(net, 256, 1, scope='conv_13')\n net = slim.conv2d(net, 512, 3, scope='conv_14')\n net = slim.conv2d(net, 256, 1, scope='conv_15')\n net = slim.conv2d(net, 512, 3, scope='conv_16')\n net = slim.conv2d(net, 256, 1, scope='conv_17')\n net = slim.conv2d(net, 512, 3, scope='conv_18')\n net = slim.conv2d(net, 512, 1, scope='conv_19')\n net = slim.conv2d(net, 1024, 3, scope='conv_20')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_21')\n net = slim.conv2d(net, 512, 1, scope='conv_22')\n net = slim.conv2d(net, 1024, 3, scope='conv_23')\n net = slim.conv2d(net, 512, 1, scope='conv_24')\n net = slim.conv2d(net, 1024, 3, scope='conv_25')\n net = slim.conv2d(net, 1024, 3, scope='conv_26')\n net = tf.pad(net, np.array([[0, 0], [1, 1], [1, 1], [0, 0]]\n ), name='pad_27')\n net = slim.conv2d(net, 1024, 3, 2, padding='VALID', scope=\n 'conv_28')\n net = slim.conv2d(net, 1024, 3, scope='conv_29')\n net = slim.conv2d(net, 1024, 3, scope='conv_30')\n net = tf.transpose(net, [0, 3, 1, 2], name='trans_31')\n net = slim.flatten(net, scope='flat_32')\n net = slim.fully_connected(net, 512, scope='fc_33')\n net = slim.fully_connected(net, 4096, scope='fc_34')\n net = slim.dropout(net, keep_prob=keep_prob, is_training=\n training, scope='dropout_35')\n net = slim.fully_connected(net, num_outputs, activation_fn=\n None, scope='fc_36')\n return net\n <function token>\n\n def calc_iou(self, boxes1, boxes2, scope='iou'):\n with tf.variable_scope(scope):\n boxes1 = tf.stack([boxes1[:, :, :, :, 0] - boxes1[:, :, :, :, 2\n ] / 2.0, boxes1[:, :, :, :, 1] - boxes1[:, :, :, :, 3] / \n 2.0, boxes1[:, :, :, :, 0] + boxes1[:, :, :, :, 2] / 2.0, \n boxes1[:, :, :, :, 1] + boxes1[:, :, :, :, 3] / 2.0])\n boxes1 = tf.transpose(boxes1, [1, 2, 3, 4, 0])\n boxes2 = tf.stack([boxes2[:, :, :, :, 0] - boxes2[:, :, :, :, 2\n ] / 2.0, boxes2[:, :, :, :, 1] - boxes2[:, :, :, :, 3] / \n 2.0, boxes2[:, :, :, :, 0] + boxes2[:, :, :, :, 2] / 2.0, \n boxes2[:, :, :, :, 1] + boxes2[:, :, :, :, 3] / 2.0])\n boxes2 = tf.transpose(boxes2, [1, 2, 3, 4, 0])\n lu = tf.maximum(boxes1[:, :, :, :, :2], boxes2[:, :, :, :, :2])\n rd = tf.minimum(boxes1[:, :, :, :, 2:], boxes2[:, :, :, :, 2:])\n intersection = tf.maximum(0.0, rd - lu)\n inter_square = intersection[:, :, :, :, 0] * intersection[:, :,\n :, :, 1]\n square1 = (boxes1[:, :, :, :, 2] - boxes1[:, :, :, :, 0]) * (boxes1\n [:, :, :, :, 3] - boxes1[:, :, :, :, 1])\n square2 = (boxes2[:, :, :, :, 2] - boxes2[:, :, :, :, 0]) * (boxes2\n [:, :, :, :, 3] - boxes2[:, :, :, :, 1])\n union_square = tf.maximum(square1 + square2 - inter_square, 1e-10)\n return tf.clip_by_value(inter_square / union_square, 0.0, 1.0)\n <function token>\n\n\n<function token>\n",
"<import token>\n<assignment token>\n\n\nclass Model:\n <function token>\n\n def build_network(self, images, num_outputs, alpha, keep_prob=settings.\n dropout, training=True, scope='yolo'):\n with tf.variable_scope(scope):\n with slim.arg_scope([slim.conv2d, slim.fully_connected],\n activation_fn=leaky_relu(alpha), weights_initializer=tf.\n truncated_normal_initializer(0.0, 0.01),\n weights_regularizer=slim.l2_regularizer(0.0005)):\n net = tf.pad(images, np.array([[0, 0], [3, 3], [3, 3], [0, \n 0]]), name='pad_1')\n net = slim.conv2d(net, 64, 7, 2, padding='VALID', scope=\n 'conv_2')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_3')\n net = slim.conv2d(net, 192, 3, scope='conv_4')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_5')\n net = slim.conv2d(net, 128, 1, scope='conv_6')\n net = slim.conv2d(net, 256, 3, scope='conv_7')\n net = slim.conv2d(net, 256, 1, scope='conv_8')\n net = slim.conv2d(net, 512, 3, scope='conv_9')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_10')\n net = slim.conv2d(net, 256, 1, scope='conv_11')\n net = slim.conv2d(net, 512, 3, scope='conv_12')\n net = slim.conv2d(net, 256, 1, scope='conv_13')\n net = slim.conv2d(net, 512, 3, scope='conv_14')\n net = slim.conv2d(net, 256, 1, scope='conv_15')\n net = slim.conv2d(net, 512, 3, scope='conv_16')\n net = slim.conv2d(net, 256, 1, scope='conv_17')\n net = slim.conv2d(net, 512, 3, scope='conv_18')\n net = slim.conv2d(net, 512, 1, scope='conv_19')\n net = slim.conv2d(net, 1024, 3, scope='conv_20')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_21')\n net = slim.conv2d(net, 512, 1, scope='conv_22')\n net = slim.conv2d(net, 1024, 3, scope='conv_23')\n net = slim.conv2d(net, 512, 1, scope='conv_24')\n net = slim.conv2d(net, 1024, 3, scope='conv_25')\n net = slim.conv2d(net, 1024, 3, scope='conv_26')\n net = tf.pad(net, np.array([[0, 0], [1, 1], [1, 1], [0, 0]]\n ), name='pad_27')\n net = slim.conv2d(net, 1024, 3, 2, padding='VALID', scope=\n 'conv_28')\n net = slim.conv2d(net, 1024, 3, scope='conv_29')\n net = slim.conv2d(net, 1024, 3, scope='conv_30')\n net = tf.transpose(net, [0, 3, 1, 2], name='trans_31')\n net = slim.flatten(net, scope='flat_32')\n net = slim.fully_connected(net, 512, scope='fc_33')\n net = slim.fully_connected(net, 4096, scope='fc_34')\n net = slim.dropout(net, keep_prob=keep_prob, is_training=\n training, scope='dropout_35')\n net = slim.fully_connected(net, num_outputs, activation_fn=\n None, scope='fc_36')\n return net\n <function token>\n <function token>\n <function token>\n\n\n<function token>\n",
"<import token>\n<assignment token>\n\n\nclass Model:\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\n<function token>\n",
"<import token>\n<assignment token>\n<class token>\n<function token>\n"
] | false |
820 |
920cd41b18f5cfb45f46c44ed707cebe682d4dd9
|
# Software License Agreement (BSD License)
#
# Copyright (c) 2009-2011, Eucalyptus Systems, Inc.
# All rights reserved.
#
# Redistribution and use of this software in source and binary forms, with or
# without modification, are permitted provided that the following conditions
# are met:
#
# Redistributions of source code must retain the above
# copyright notice, this list of conditions and the
# following disclaimer.
#
# Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other
# materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Author: [email protected]
'''
@author: clarkmatthew
extension of the boto instance class, with added convenience methods + objects
Add common instance test routines to this class
Examples:
from eucaops import Eucaops
from nephoria.windows_instance import WinInstance
tester = Eucaops(credpath='eucarc-10.111.5.80-eucalyptus-sys_admin')
wins = WinInstance.make_euinstance_from_instance(tester.get_instances(idstring='i-89E13DA8')[0], tester=tester, keypair='test')
vol = tester.get_volume(status='available', zone=wins.placement)
wins.attach_volume(vol)
'''
import socket
import os
import re
import time
import copy
import types
import operator
from prettytable import PrettyTable, ALL
from boto.ec2.instance import Instance
from nephoria.aws.ec2.euvolume import EuVolume
from cloud_utils.log_utils import eulogger, get_line, markup
from nephoria.euca.taggedresource import TaggedResource
from boto.ec2.instance import InstanceState
from datetime import datetime
from cloud_utils.net_utils import winrm_connection
termline = get_line()
class WinInstanceDiskType():
gigabyte = 1073741824
megabyte = 1048576
def __init__(self, win_instance, wmic_dict):
self.check_dict_requires(wmic_dict)
self.__dict__ = self.convert_numbers_in_dict(copy.copy(wmic_dict))
self.win_instance = win_instance
self.size_in_gb = self.get_size_in_gb()
self.size_in_mb = self.get_size_in_mb()
self.size = long(self.size or 0)
self.last_updated = time.time()
self.setup()
def setup(self):
raise Exception('Not Implemented')
def check_dict_requires(self, wmic_dict):
raise Exception('Not Implemented')
def convert_numbers_in_dict(self, dict):
#convert strings representing numbers to ints
for key in dict:
value = str(dict[key])
if (re.search("\S", str(dict[key])) and not re.search("\D", str(dict[key]))):
dict[key] = long(dict[key])
return dict
def get_partition_ids(self):
retlist = []
for part in self.disk_partitions:
retlist.append(part.deviceid)
return retlist
def get_logicaldisk_ids(self):
retlist = []
for part in self.disk_partitions:
retlist.extend(part.get_logicaldisk_ids())
return retlist
def get_size_in_gb(self):
'''
Attempts to convert self.size from bytes to gigabytes as well as round up > .99 to account for a differences
in how the size is represented
'''
self.size = int(self.size or 0)
gigs = self.size / self.gigabyte
if (self.size % self.gigabyte) /float(self.gigabyte) > .99:
gigs += 1
return gigs
def get_size_in_mb(self):
'''
Attempts to convert self.size from bytes to gigabytes as well as round up > .99 to account for a differences
in how the size is represented
'''
self.size = int(self.size or 0)
mb = self.size / self.megabyte
if (self.size % self.megabyte) /float(self.megabyte) > .99:
mb += 1
return mb
def print_self(self):
self.get_summary(printmethod=self.win_instance.debug)
def get_summary(self, printheader=True, printmethod=None):
raise Exception('Method not implemented')
def print_self_full(self, printmethod=None):
'''
formats and prints self.dict
'''
self.win_instance.print_dict(dict=self.__dict__, printmethod=printmethod)
class WinInstanceDiskDrive(WinInstanceDiskType):
def setup(self):
if not hasattr(self, 'serialnumber'):
self.serialnumber = ''
if not hasattr(self, 'caption'):
self.caption = ''
if hasattr(self, 'model'):
self.caption = self.model
else:
self.model = self.caption
self.cygwin_scsi_drive = self.win_instance.get_cygwin_scsi_dev_for_windows_drive(windisk=self)
self.update_ebs_info()
self.disk_partitions = []
def check_dict_requires(self, wmic_dict):
if not ('deviceid' in wmic_dict and
'size' in wmic_dict and
('caption' in wmic_dict or 'model in wmic_dict') and
'index' in wmic_dict):
raise Exception('wmic_dict passed does not contain needed attributes; deviceid, size, caption, and index')
def get_partition_ids(self):
retlist = []
for part in self.disk_partitions:
retlist.append(part.deviceid)
return retlist
def get_logicaldisk_ids(self):
retlist = []
for part in self.disk_partitions:
retlist.extend(part.get_logicaldisk_ids())
return retlist
def update_md5_info_from_ebs(self):
self.md5 = None
self.md5len = None
for vol in self.win_instance.attached_vols:
if vol.guestdev == self.deviceid:
if not vol.md5:
vol.md5len = 1024
vol.md5 = self.win_instance.get_dev_md5(self.cygwin_scsi_drive, vol.md5len)
self.md5 = vol.md5
self.md5len = vol.md5len
break
def update_ebs_info_from_serial_number(self):
'''
Attempts to parse the serial number field from an EBS volume and find the correlating ebs volume
example format: vol-81C13EA4-dev-sdg
'''
if re.match("^vol-", self.serialnumber):
split = self.serialnumber.split('-')
self.ebs_volume = str(split[0]) + "-" + str(split[1])
self.ebs_cloud_dev = "/" + str(split[2]) + "/" + str(split[3])
else:
self.ebs_volume = ''
self.ebs_cloud_dev = ''
def update_ebs_info(self):
self.update_ebs_info_from_serial_number()
if not self.ebs_volume:
if self.index == 0 and self.win_instance.root_device_type == 'ebs':
bdm = self.win_instance.block_device_mapping[self.win_instance.root_device_name]
self.ebs_volume = bdm.volume_id
else:
for vol in self.win_instance.attached_vols:
if vol.guestdev == self.deviceid:
self.ebs_volume = vol.id
break
if not self.ebs_cloud_dev and self.ebs_volume:
volume = self.win_instance.tester.get_volume(volume_id=self.ebs_volume)
if hasattr(volume,'attach_data') and volume.attach_data:
self.ebs_cloud_dev = volume.attach_data.device
self.update_md5_info_from_ebs()
def get_summary(self, printheader=True, printmethod=None):
buf = ""
deviceid = 20
size = 16
sizegb = 7
ebsvol = 12
serialnumber = 24
caption = 36
part_count = 6
logical_ids = 8
cygdrive = 10
md5 = 32
header = "DISKDRIVE DEV ID".center(deviceid) + "|" + \
"SIZE B".center(size) + "|" + \
"SIZE GB".center(sizegb) + "|" + \
"EBS VOL".center(ebsvol) + "|" + \
"CAPTION".center(caption) + "|" + \
"PARTS".center(part_count) + "|" + \
"LOGICAL".center(logical_ids) + "|" + \
"CYGDRIVE".center(cygdrive) + "|" + \
"SERIAL NUMBER".center(serialnumber) + "|" + \
"MD5 CHECK SUM".center(md5) + "|"
summary = str(self.deviceid).center(deviceid) + "|" + \
str(self.size).center(size) + "|" + \
str(self.size_in_gb).center(sizegb) + "|" + \
str(self.ebs_volume).center(ebsvol) + "|" + \
str(self.caption).center(caption) + "|" + \
str(self.partitions).center(part_count) + "|" + \
str(",".join(str(x) for x in self.get_logicaldisk_ids())).center(logical_ids) + "|" + \
str(self.cygwin_scsi_drive).center(cygdrive) + "|" + \
str(self.serialnumber).center(serialnumber) + "|" + \
str(self.md5).center(md5) + "|"
length = len(header)
if len(summary) > length:
length = len(summary)
line = get_line(length)
if printheader:
buf += line + header + line
buf += summary + line
if printmethod:
printmethod(buf)
return buf
class WinInstanceDiskPartition(WinInstanceDiskType):
def setup(self):
#self.cygwin_scsi_drive = self.win_instance.get_cygwin_scsi_dev_for_windows_drive(drive_id=self.deviceid)
self.logicaldisks = []
#Set values in case 'brief' was used when fetching partitions
if not hasattr(self,'deviceid'):
self.deviceid = self.name
if not hasattr(self,'bootable'):
self.bootable = self.bootpartition
if not hasattr(self,'diskindex'):
self.diskindex = self.get_disk_index_from_name()
def check_dict_requires(self, wmic_dict):
if not ('name' in wmic_dict and
'size' in wmic_dict and
'bootpartition' in wmic_dict and
'index' in wmic_dict):
raise Exception('wmic_dict passed does not contain needed attributes; deviceid, size, index and bootable')
def get_disk_index_from_name(self):
diskindex = None
diskindexstring = self.name.split(',')[0]
if re.search('disk', diskindexstring, re.IGNORECASE):
diskindex = int(diskindexstring.split('#')[1])
return diskindex
def get_logicaldisk_ids(self):
retlist = []
for disk in self.logicaldisks:
retlist.append(disk.deviceid)
return retlist
def get_summary(self, printheader=True, printmethod=None):
buf = ""
deviceid = 24
size = 16
sizegb = 12
sizemb = 12
bootable = 10
header = "PARTITION DEV ID".center(deviceid) + "|" + \
"SIZE B".center(size) + "|" + \
"SIZE GB".center(sizegb) + "|" + \
"SIZE MB".center(sizemb) + "|" + \
"BOOTABLE".center(bootable) + "|"
summary = str(self.deviceid).center(deviceid) + "|" + \
str(self.size).center(size) + "|" + \
str(self.size_in_gb).center(sizegb) + "|" + \
str(self.size_in_mb).center(sizemb) + "|" + \
str(self.bootable).center(bootable) + "|"
length = len(header)
if len(summary) > length:
length = len(summary)
line = get_line(length)
if printheader:
buf += line + header + line
buf += summary + line
if printmethod:
printmethod(buf)
return buf
class WinInstanceLogicalDisk(WinInstanceDiskType):
def setup(self):
self.cygwin_scsi_drive = self.win_instance.get_cygwin_scsi_dev_for_windows_drive(windisk=self)
self.partition = None
def check_dict_requires(self, wmic_dict):
if not ('deviceid' in wmic_dict and
'size' in wmic_dict and
'description' in wmic_dict and
'freespace' in wmic_dict and
'filesystem' in wmic_dict):
raise Exception('wmic_dict passed does not contain needed attributes; deviceid, size, and description')
def get_summary(self, printheader=True, printmethod=None):
buf = ""
deviceid = 24
size = 16
freespace = 16
filesystem = 24
description = 30
cygdrive = 10
header = "LOGICAL DEV ID".center(deviceid) + "|" + \
"SIZE".center(size) + "|" + \
"FREE SPACE".center(freespace) + "|" + \
"FILE SYSTEM".center(filesystem) + "|" + \
"DESCRIPTION".center(description) + "|" + \
"CYGDRIVE".center(cygdrive) + "|"
summary = str(self.deviceid).center(deviceid) + "|" + \
str(self.size).center(size) + "|" + \
str(self.freespace).center(freespace) + "|" + \
str(self.filesystem).center(filesystem) + "|" + \
str(self.description).center(description) + "|" + \
str(self.cygwin_scsi_drive).center(cygdrive) + "|"
length = len(header)
if len(summary) > length:
length = len(summary)
line = get_line(length)
if printheader:
buf += line + header + line
buf += summary + line
if printmethod:
printmethod(buf)
return buf
class WinInstance(Instance, TaggedResource):
gigabyte = 1073741824
megabyte = 1048576
@classmethod
def make_euinstance_from_instance(cls,
instance,
tester,
debugmethod = None,
keypair=None,
keypath=None,
password=None,
username="Administrator",
auto_connect = True,
verbose=True,
timeout=120,
private_addressing = False,
reservation = None,
cmdstart=None,
try_non_root_exec=True,
winrm_port='5985',
winrm_protocol='http',
rdp_port='3389',
rootfs_device = "sda",
block_device_prefix = "sd",
bdm_root_vol = None,
virtio_blk = True,
cygwin_path = None,
disk_update_interval=10,
retry=2,
brief=False
):
'''
Primary constructor for this class. Note: to avoid an ssh session within this method, provide keys, username/pass later.
Arguments:
instance - mandatory- a Boto instance object used to build this euinstance object
keypair - optional- a boto keypair object used for creating ssh connection to the instance
username - optional- string used to create ssh connection as an alternative to keypair
password - optional- string used to create ssh connection to this instance as an alternative to keypair
exec_password -optional -string used for su or sudo where prompted for password, will default to 'password'
auto_connect -optional -boolean, if True will attempt to automatically create an ssh session for this instance
try_non_root_exec -optional -boolean, if True will attempt to use sudo if available else su -c to execute privileged commands
timeout - optional- integer used for ssh connection timeout
debugmethod - optional - method, used for debug output
verbose - optional - boolean to determine if debug is to be printed using debug()
retry - optional - integer, ssh connection attempts for non-authentication failures
'''
newins = WinInstance(instance.connection)
newins.__dict__ = instance.__dict__
newins.tester = tester
newins.winrm_port = winrm_port
newins.rdp_port = rdp_port
newins.bdm_root_vol = None
newins.winrm_protocol = winrm_protocol
newins.debugmethod = debugmethod
if newins.debugmethod is None:
newins.log = eulogger.Eulogger(identifier= str(instance.id))
newins.debugmethod= newins.log.debug
if (keypair is not None):
if isinstance(keypair,types.StringTypes):
keyname = keypair
keypair = tester.get_keypair(keyname)
else:
keyname = keypair.name
newins.keypath = keypath or os.getcwd() + "/" + keyname + ".pem"
newins.keypair = keypair
newins.password = password
newins.username = username
newins.verbose = verbose
newins.attached_vols=[]
newins.timeout = timeout
newins.virtio_blk = virtio_blk
newins.disk_update_interval = disk_update_interval
newins.retry = retry
newins.brief = brief
newins.rootfs_device = rootfs_device
newins.block_device_prefix = block_device_prefix
newins.private_addressing = private_addressing
newins.reservation = reservation or newins.get_reservation()
if newins.reservation:
newins.security_groups = newins.tester.get_instance_security_groups(newins)
else:
newins.security_groups = None
newins.laststate = newins.state
newins.cmdstart = cmdstart
newins.auto_connect = auto_connect
newins.set_last_status()
newins.update_vm_type_info()
newins.cygwin_path = cygwin_path
newins.system_info = None
newins.diskdrives = []
newins.disk_partitions = []
newins.logicaldisks = []
newins.cygwin_dev_map = {}
#newins.set_block_device_prefix()
if newins.root_device_type == 'ebs':
try:
volume = newins.tester.get_volume(volume_id = newins.block_device_mapping.get(newins.root_device_name).volume_id)
newins.bdm_root_vol = EuVolume.make_euvol_from_vol(volume, tester=newins.tester,cmdstart=newins.cmdstart)
except:pass
newins.winrm = None
if newins.auto_connect and newins.state == 'running':
newins.connect_to_instance(timeout=timeout)
return newins
@property
def age(self):
launchtime = self.tester.get_datetime_from_resource_string(self.launch_time)
# return the elapsed time in seconds
return (time.mktime(datetime.utcnow().utctimetuple()) -
time.mktime(launchtime.utctimetuple()))
def update(self, validate=False, dry_run=False,
err_state='terminated', err_code=-1):
ret = None
tb = ""
retries = 2
for x in xrange(0, retries):
try:
#send with validation True, fail later...
ret = super(WinInstance, self).update(validate=True,
dry_run=dry_run)
break
except ValueError:
if validate:
raise
tb = self.tester.get_traceback()
self.debug('Failed to update instance. Attempt:{0}/{1}'
.format(x, retries))
if not ret:
failmsg = 'Failed to update instance. Instance may no longer ' \
'be present on system"{0}"'.format(self.id)
self.debug('{0}\n{1}'.format(tb, failmsg))
self.debug('{0} setting fake state to:"{1}"'.format(self.id,
err_state))
state = InstanceState(name=err_state, code=err_code)
self._state = state
ret = self.state
self.set_last_status()
return ret
def update_vm_type_info(self):
self.vmtype_info = self.tester.get_vm_type_from_zone(self.placement,self.instance_type)
return self.vmtype_info
def set_last_status(self,status=None):
self.laststate = self.state
self.laststatetime = time.time()
self.age_at_state = self.tester.get_instance_time_launched(self)
#Also record age from user's perspective, ie when they issued the run instance request (if this is available)
if self.cmdstart:
self.age_from_run_cmd = "{0:.2f}".format(time.time() - self.cmdstart)
else:
self.age_from_run_cmd = None
def print_dict(self, dict=None, printmethod=None):
'''
formats and prints
'''
printmethod = printmethod or self.debug
buf = "\n"
dict = dict or self.__dict__
longest_key = 0
for key in dict:
if len(key) > longest_key:
longest_key = len(key)
for key in dict:
buf += str(key).ljust(longest_key) + " -----> :" + str(dict[key]) + "\n"
printmethod(buf)
def printself(self, title=True, footer=True, printmethod=None, printme=True):
def state_markup(state):
# Markup instance state...
if state == 'running':
return markup(state, markups=[1, 92])
if state == 'terminated':
return markup(state, markups=[1, 97])
if state == 'shutting-down':
return markup(state, markups=[1, 95])
if state == 'pending':
return markup(state, markups=[1, 93])
if state == 'stopped':
return markup(state, markups=[1, 91])
else:
return markup(state, markups=[1, 91])
def multi_line(lines):
# Utility method for creating multi line table entries...
buf = ""
maxlen = 0
for line in lines:
if len(line) + 2 > maxlen:
maxlen = len(line) + 2
for line in lines:
buf += str(line).ljust(maxlen) + "\n"
buf = buf.rstrip()
return (buf, maxlen)
bdmvol = self.root_device_type
if self.bdm_root_vol:
bdmvol += ":" + self.bdm_root_vol.id
reservation_id = None
if self.reservation:
reservation_id = self.reservation.id
owner_id = self.reservation.owner_id
else:
owner_id = "???"
# Create a multi line field for instance's run info
idlist = [markup("{0} {1}".format('ID:', self.id), markups=[1, 4, 94]),
"{0} {1}".format(markup('TYPE:'), self.instance_type),
"{0} {1}".format(markup('RES:'), reservation_id),
"{0}".format(markup("ACCOUNT ID:")), owner_id]
id_string, idlen = multi_line(idlist)
try:
emi = self.tester.get_emi(self.image_id)
emi_name = str(emi.name[0:18]) + ".."
except:
emi_name = ""
# Create a multi line field for the instance's image info
virt_type = 'PV'
if self.virtualization_type == 'hvm':
virt_type = 'HVM'
emi_string, emilen = multi_line(
[markup("{0} {1}".format('EMI:', self.image_id)),
"{0} {1}".format(markup('OS:'), self.platform or 'linux'),
"{0} {1}".format(markup('VIRT:'), virt_type),
"{0}".format(markup('IMAGE NAME:')),
emi_name])
# Create a multi line field for the instance's state info
if self.age:
age = int(self.age)
state_string, state_len = multi_line(["STATE: " + state_markup(self.laststate),
"{0} {1}".format(markup('AGE:'), age),
"{0} {1}".format(markup("ZONE:"), self.placement),
markup('ROOTDEV:'), bdmvol])
# Create the primary table called pt...
netinfo = 'INSTANCE NETWORK INFO:'
idheader = 'INSTANCE ID'
imageheader = 'INSTANCE IMAGE'
stateheader = 'INSTANCE STATE'
pt = PrettyTable([idheader, imageheader, stateheader, netinfo])
pt.align[netinfo] = 'l'
pt.valign[netinfo] = 'm'
pt.align[idheader] = 'l'
pt.align[imageheader] = 'l'
pt.align[stateheader] = 'l'
pt.max_width[idheader] = idlen
pt.max_width[imageheader] = emilen
pt.max_width[stateheader] = state_len
pt.padding_width = 0
pt.hrules = ALL
# PrettyTable headers do not work with ascii markups, so make a sudo header
new_header = []
for field in pt._field_names:
new_header.append(markup(field, markups=[1, 4]))
pt.add_row(new_header)
pt.header = False
# Create a subtable 'netpt' to summarize and format the networking portion...
# Set the maxwidth of each column so the tables line up when showing multiple instances
vpc_col = ('VPC', 4)
subnet_col = ('SUBNET', 6)
if self.vpc_id:
vpc_col = ('VPC', 12)
subnet_col = ('SUBNET', 15)
secgrp_col = ('SEC GRPS', 11)
privaddr_col = ('P', 1)
privip_col = ('PRIV IP', 15)
pubip_col = ('PUB IP', 15)
net_cols = [vpc_col, subnet_col, secgrp_col, privaddr_col, privip_col, pubip_col]
# Get the Max width of the main tables network summary column...
# Start with 2 to account for beginning and end column borders
netinfo_width = 2
netinfo_header = []
for col in net_cols:
netinfo_width += col[1] + 1
netinfo_header.append(col[0])
pt.max_width[netinfo] = netinfo_width
netpt = PrettyTable([vpc_col[0], subnet_col[0], secgrp_col[0], privaddr_col[0],
privip_col[0], pubip_col[0]])
netpt.padding_width = 0
netpt.vrules = ALL
for col in net_cols:
netpt.max_width[col[0]] = col[1]
sec_grps = []
for grp in self.groups:
sec_grps.append(str(grp.id))
sec_grps = ",".join(sec_grps)
private_addressing = "N"
if self.private_addressing:
private_addressing = "Y"
netpt.add_row([str(self.vpc_id).center(vpc_col[1]),
str(self.subnet_id).center(subnet_col[1]),
str(sec_grps).center(secgrp_col[1]),
str(private_addressing).center(privaddr_col[1]),
str(self.private_ip_address).center(privip_col[1]),
str(self.ip_address).center(pubip_col[1])])
# To squeeze a potentially long keyname under the network summary table, get the length
# and format this column to allow for wrapping a keyname under the table...
# netbuf = netpt.get_string()
netbuf = "{0}:{1} {2}:{3}\n".format(markup("NODE"),
self.tags.get('euca:node', "???").ljust(16),
markup("KEYPAIR"), self.key_name)
netbuf += "\n".join(netpt.get_string().splitlines()[0:-1])
# Create the row in the main table...
pt.add_row([id_string, emi_string, state_string, netbuf])
if printme:
printmethod = printmethod or self.log.debug
printmethod("\n" + str(pt) + "\n")
return pt
def get_password(self,
private_key_path=None,
key=None,
dir=None,
exten=".pem",
encoded=True,
force_update=False):
'''
:param private_key_path: private key file used to decrypt password
:param key: name of private key
:param dir: Path to private key
:param exten: extension of private key
:param encoded: boolean of whether string returned from server is
Base64 encoded
:return: decrypted password
'''
if self.password is None or force_update:
self.password = self.tester.get_windows_instance_password(
self,
private_key_path=private_key_path,
key=key,
dir=dir,
exten=exten,
encoded=encoded)
return self.password
def reset_ssh_connection(self, timeout=None):
# todo: Remove ssh reference from this method, use something like
# reset_instance_connection, etc..
self.debug('Note ssh not implemented at this time, using winrm for '
'shell access instead...')
return self.reset_winrm_connection(timeout=timeout)
def reset_winrm_connection(self, timeout=None, force=False):
# todo:
timeout = timeout or self.timeout
self.debug('reset_winrm_connection for:'+str(self.id))
self.get_password(force_update=True)
if self.username is None or self.password is None:
#Allow but warn here as this may be a valid negative test
self.debug('Warning username and/or password were None in '
'winrm connnection?')
# Create a new winrm interface if this is a new instance or
# an attribute has changed...
try:
#Check the port in order to provide debug if the connection fails
self.test_port_status(port=self.winrm_port, ip=self.ip_address)
except:pass
if force or not (self.winrm and \
self.winrm.hostname == self.ip_address and \
self.winrm.username == self.username and \
self.winrm.password == self.password):
if self.winrm:
self.winrm.close_shell()
self.winrm = winrm_connection.Winrm_Connection(
hostname = self.ip_address,
username = self.username,
password = self.password,
port = self.winrm_port,
protocol = self.winrm_protocol,
debug_method = self.debug,
verbose=True
)
def get_reservation(self):
res = None
try:
res = self.tester.get_reservation_for_instance(self)
except Exception, e:
self.update()
self.debug('Could not get reservation for instance in state:' +
str(self.state) + ", err:" + str(e))
return res
def connect_to_instance(self, wait_for_boot=180, timeout=120):
'''
Attempts to connect to an instance via ssh.
:params wait_for_boot: time to wait, allowing guest to boot before
attempting to poll for ports active status
:params timeout: -optional - time in seconds to wait when polling
port(s) status(s) before failure
'''
self.debug("{0}connect_to_instance starting.\nwait_for_boot:{1} "
"seconds\ntimeout from boot:{2}{3}"
.format(termline, wait_for_boot, timeout, termline))
try:
self.poll_for_port_status_with_boot_delay(waitforboot=wait_for_boot,
timeout=timeout)
except Exception, e:
self.debug('Warning failed to poll port status:' + str(e))
self.debug("Attempting to create connection to instance:" + self.id)
attempts = 0
start = time.time()
elapsed = 0
if self.winrm is not None:
self.winrm.close_shell()
self.winrm = None
while (elapsed < timeout):
attempts += 1
try:
self.update()
self.reset_winrm_connection()
self.debug('Try some sys...')
self.sys("whoami")
except Exception, se:
tb = self.tester.get_traceback()
self.debug('Caught exception attempting to connect '
'winrm shell:\n'+ str(tb) + str(se))
elapsed = int(time.time()-start)
self.debug('connect_to_instance: Attempts:' + str(attempts) +
', elapsed:'+str(elapsed)+'/'+str(timeout))
if self.winrm is not None:
self.winrm.close_shell()
self.winrm = None
time.sleep(5)
pass
else:
break
elapsed = int(time.time()-start)
if self.winrm is None:
self.get_connection_debug()
raise RuntimeError(str(self.id) +
":Failed establishing management connection to "
"instance, elapsed:" + str(elapsed) +
"/" + str(timeout))
self.debug('Connect_to_instance updating attached volumes/disk '
'info for vols: ' + str(self.attached_vols))
if self.brief:
self.update_system_info()
else:
self.update_system_and_disk_info()
self.init_attached_volumes()
self.debug("{0}connect_to_instance completed{1}"
.format(termline, termline))
def get_connection_debug(self):
# Add network debug/diag info here...
# First show arp cache from local machine
# todo Consider getting info from relevant euca components:
# - iptables info
# - route info
# - instance xml
try:
# Show local ARP info...
arp_out = "\nLocal ARP cache for instance ip: " \
+ str(self.ip_address) + "\n"
arp_fd = os.popen('arp ' + str(self.ip_address))
for line in arp_fd:
arp_out += line
self.debug(arp_out)
except Exception as AE:
self.log.debug('Failed to get arp info:' + str(AE))
try:
self.tester.get_console_output(self)
except Exception as CE:
self.log.debug('Failed to get console output:' + str(CE))
def update_root_device_diskdrive(self):
if not self.root_device_type == 'ebs':
return
for disk in self.diskdrives:
if disk.index == 0:
if disk.ebs_volume:
for vol in self.attached_vols:
if vol.id == disk.ebs_volume:
if not disk.md5:
disk.update_md5_info_from_ebs()
return
volume = self.tester.get_volume(volume_id=disk.ebs_volume)
if not isinstance(volume, EuVolume):
volume = EuVolume.make_euvol_from_vol(volume, self.tester)
volume.guestdev = disk.deviceid
volume.md5len = 1024
volume.md5 = self.get_dev_md5(disk.cygwin_scsi_drive, volume.md5len)
if not self.get_volume_from_attached_list_by_id(volume.id):
self.debug("{0} updating with root vol:{1}{2}"
.format(termline,
volume.id,
termline))
self.attached_vols.append(volume)
disk.update_md5_info_from_ebs()
return
def get_volume_from_attached_list_by_id(self, volume_id):
for vol in self.attached_vols:
if vol.id == volume_id:
return vol
def update_system_and_disk_info(self):
try:
self.update_system_info()
except Exception, sie:
tb = self.tester.get_traceback()
self.debug(str(tb) + "\nError updating system info:" + str(sie))
try:
self.update_disk_info()
self.update_root_device_diskdrive()
self.print_partition_summary()
self.print_logicaldisk_summary()
self.print_diskdrive_summary()
except Exception, ude:
tb = self.tester.get_traceback()
self.debug(str(tb) + "\nError updating disk info:" + str(ude))
def has_sudo(self):
return False
def debug(self,msg,traceback=1,method=None,frame=False):
'''
Used to print debug, defaults to print() but over ridden by self.debugmethod if not None
msg - mandatory -string, message to be printed
'''
if ( self.verbose is True ):
self.debugmethod(msg)
def sys(self, cmd, verbose=True, code=None, include_stderr=False, enable_debug=False, timeout=None):
'''
Issues a command against the ssh connection to this instance
Returns a list of the lines from stdout+stderr as a result of the command
cmd - mandatory - string, the command to be executed
verbose - optional - boolean flag to enable debug
timeout - optional - command timeout in seconds
'''
if (self.winrm is None):
raise Exception("WinInstance winrm connection is None")
return self.winrm.sys(command=cmd, include_stderr=include_stderr, timeout=timeout, verbose=verbose, code=code)
def test_rdp_port_status(self, ip=None, port=3389, timeout=10):
'''
Description: Attempts to test that the host is accepting tcp connections to the RDP port
'''
ip = ip or self.ip_address
return self.test_port_status(ip=ip, port=port, timeout=timeout)
def test_port_status(self, port, ip=None, timeout=5, tcp=True, verbose=True):
ip = ip or self.ip_address
return self.tester.test_port_status(ip, int(port), timeout=timeout, tcp=tcp, verbose=verbose)
def poll_for_port_status_with_boot_delay(self, interval=15, ports=[], socktimeout=5,timeout=180, waitforboot=300):
'''
Make sure some time has passed before we test on the guest side before running guest test...
'''
launch_seconds = self.tester.get_instance_time_launched(self)
sleeptime = 0 if launch_seconds > waitforboot else (waitforboot - launch_seconds)
self.debug("Instance was launched "+str(launch_seconds)+" seconds ago, waiting:"+str(sleeptime)+" for instance to boot")
time.sleep(sleeptime)
return self.poll_for_ports_status(ports,
ip=self.ip_address,
interval=interval,
socktimeout=socktimeout,
timeout=timeout)
def wait_for_time_since_launch(self,waitforboot=420):
'''
When using larger instance store images, this can allow for the delays caused by image size/transfer.
'''
boot_seconds = self.tester.get_instance_time_launched(self)
sleeptime = 0 if boot_seconds > waitforboot else (waitforboot - boot_seconds)
self.debug("Instance was launched "+str(boot_seconds)+"/"+str(waitforboot) + " seconds ago, waiting:"+str(sleeptime)+" for instance to boot")
start = time.time()
elapsed = 0
print "Waiting for Windows to fully boot:",
while elapsed < sleeptime:
print "Waiting for Windows to fully boot:"+str(sleeptime-elapsed),
time.sleep(5)
elapsed=int(time.time()-start)
self.debug("test_wait_for_instance_boot: done waiting, instance up for "+str(waitforboot)+" seconds")
def poll_for_ports_status(self, ports=[], ip=None, interval=10, socktimeout=5, timeout=180):
ip = ip or self.ip_address
ports = ports or [self.rdp_port, self.winrm_port]
start = time.time()
elapsed = 0
attempt = 0
while elapsed < timeout:
attempt +=1
self.debug('test_poll_for_ports_status, ports: ' + ",".join(str(x) for x in ports) + ", attempt:" + str(attempt))
for port in ports:
if elapsed < timeout:
try:
self.debug('Trying ip:port:' + str(self.ip_address) + ':' + str(port) + ", elapsed:" + str(elapsed))
self.test_port_status(ip=ip, port=int(port), timeout=5)
return
except socket.error, se:
self.debug('test_ports_status failed socket error:'+str(se[0]))
#handle specific errors here, for now just for debug...
ecode=se[0]
if ecode == socket.errno.ETIMEDOUT or ecode == "timed out":
self.debug("test_poll_for_ports_status: Connect "+str(ip)+":" +str(port)+ " timed out retrying. Time remaining("+str(timeout-elapsed)+")")
except Exception, e:
tb = self.tester.get_traceback()
self.debug(tb)
self.debug('test_poll_for_ports_status:'+str(ip)+':'+str(port)+' FAILED after attempts:'+str(attempt)+', elapsed:'+str(elapsed)+', err:'+str(e) )
elapsed = int(time.time() -start)
if elapsed < timeout:
time.sleep(interval)
raise Exception('test_poll_for_ports_status:'+str(ip)+':'+str(port)+' FAILED after attempts:'+str(attempt)+', elapsed:'+str(elapsed)+' seconds')
def init_attached_volumes(self):
self.debug('init_attahced_volumes... attached_vols: ' + str(self.attached_vols))
syncdict = self.sync_attached_volumes_with_clouds_view()
if syncdict['errors']:
errmsg = 'Errors syncing guest volumes with cloud at init:' + ",".join(str(e) for e in syncdict['errors'])
errmsg += 'Failed to sync guest volumes with cloud at init:' + ",".join(str(x) for x in syncdict['badvols'])
self.debug(errmsg)
time.sleep(60)
raise Exception(errmsg)
def sync_attached_volumes_with_clouds_view(self):
self.debug(termline +
"Starting sync_attached_volumes_with_clouds_view"
+ termline )
badvols = []
errors = []
ret = {'errors':errors, 'badvols':badvols}
#Get a list of volumes that the cloud believes are currently attached
cloud_volumes = self.tester.get_volumes(attached_instance=self.id)
#Make a copy of a list of volumes this instance thinks are currenlty attached
locallist = copy.copy(self.attached_vols)
self.debug('Cloud list:' + str(cloud_volumes))
self.debug('Local list:' + str(locallist))
for vol in cloud_volumes:
for local_vol in locallist:
if local_vol.id == vol.id:
locallist.remove(local_vol)
if not isinstance(vol, EuVolume):
vol = EuVolume.make_euvol_from_vol(vol, self.tester)
try:
self.update_volume_guest_info(volume=vol)
except Exception, e:
badvols.append(vol)
errors.append(vol.id + ' Error syncing with cloud:' + str (e) + '. \n')
for local_vol in locallist:
badvols.append(local_vol)
errors.append(local_vol.id + ' Error unattached volume found in guests attach list. \n')
self.debug(termline +
"Finishing sync_attached_volumes_with_clouds_view"
+ termline )
return ret
def update_system_info(self):
'''
Gather basic system info for this windows instance object and store in self.system_info
Example:
# print wins.system_info.OS_NAME
'Microsoft Windows 7 Professional'
'''
currentkey = None
swap = re.compile('([!@#$%^&*. ])')
info = self.sys('systeminfo')
if self.system_info:
system_info = self.system_info
else:
system_info = type('obj', (object,),{})
if info:
for line in info:
if re.match("^\w.+:", line):
linevals = line.split(':')
currentkey = linevals.pop(0)
#clean up the key string...
currentkey = re.sub('[()]', '', currentkey)
currentkey = re.sub(swap, '_', currentkey)
currentkey = currentkey.lower()
value = ":".join(str(x) for x in linevals) or ""
setattr(system_info, currentkey, str(value).strip())
elif currentkey:
#this is an additional value to our previous key
prev_value = getattr(system_info, currentkey)
if not isinstance(prev_value, types.ListType):
updated_value = [prev_value]
updated_value.append(str(line).strip())
setattr(system_info, currentkey, updated_value)
self.system_info = system_info
def get_cygwin_path(self, prefix="c:\\"):
if self.cygwin_path:
return self.cygwin_path
path = None
self.debug('Trying to find cygwin path...')
out = self.sys('dir ' + str(prefix) + ' /B')
for line in out:
if re.search('cygwin', line):
path = str(prefix) + str(line.strip()) + "\\"
self.cygwin_path = path
break
return path
def cygwin_curl(self, url, connect_timeout=30):
cygpath = self.get_cygwin_path()
if cygpath is None:
raise Exception('Could not find cygwin path on guest for curl?')
curl = cygpath + 'bin\curl.exe --connect-timeout ' + str(connect_timeout) + ' '
return self.sys(curl + str(url), code=0, timeout=connect_timeout)
def get_metadata(self, element_path='', prefix='latest/meta-data/', use_cygwin=True):
"""Return the lines of metadata from the element path provided"""
### If i can reach the metadata service ip use it to get metadata otherwise try the clc directly
try:
if use_cygwin:
return self.cygwin_curl("http://169.254.169.254/"+str(prefix)+str(element_path), connect_timeout=10)
else:
return self.sys("curl --connect-timeout 10 http://169.254.169.254/"+str(prefix)+str(element_path), code=0)
except:
if use_cygwin:
return self.cygwin_curl("http://" + self.tester.get_ec2_ip() + ":8773/"+str(prefix) + str(element_path))
else:
return self.sys("curl http://" + self.tester.get_ec2_ip() + ":8773/"+str(prefix) + str(element_path), code=0)
def print_diskdrive_summary(self,printmethod=None):
printmethod = printmethod or self.debug
if not self.diskdrives:
printmethod('No disk drives to print?')
return
disklist = copy.copy(self.diskdrives)
buf = (disklist.pop()).get_summary()
for disk in disklist:
buf += disk.get_summary(printheader=False)
printmethod(buf)
def print_partition_summary(self,printmethod=None):
printmethod = printmethod or self.debug
if not self.disk_partitions:
printmethod('No disk partitions to print?')
return
partlist = copy.copy(self.disk_partitions)
buf = (partlist.pop()).get_summary()
for part in partlist:
buf += part.get_summary(printheader=False)
printmethod(buf)
def print_logicaldisk_summary(self,printmethod=None):
printmethod = printmethod or self.debug
if not self.logicaldisks:
printmethod('No disk disk_partitions to print?')
return
disklist = copy.copy(self.logicaldisks)
buf = (disklist.pop()).get_summary()
for disk in disklist:
buf += disk.get_summary(printheader=False)
printmethod(buf)
def update_disk_info(self , forceupdate=False):
if self.diskdrives:
if not forceupdate and (time.time() - self.diskdrives[0].last_updated) <= self.disk_update_interval:
return
self.debug('Fetching updated disk info...')
self.diskdrives = []
self.disk_partitions = []
self.logicaldisks = []
self.diskdrives = self.get_updated_diskdrive_info()
self.disk_partitions = self.get_updated_partition_info()
self.logicaldisks = self.get_updated_logicaldisk_info()
self.associate_diskdrives_to_partitions()
self.associate_partitions_to_logicaldrives()
def get_updated_diskdrive_info(self):
'''
Populate self.diskdrives with WinInstanceDisk objects containing info parsed from wmic command.
Since wmic doesn't seem to use delimeters this method attempts to derive the lengh of each column/header
in order to parse out the info per disk.
:pararm force: boolean. Will force an update, otherwise this method will wait a minimum of
self.disk_update_interval before updating again.
'''
#cmd = "wmic diskdrive get /format:textvaluelist.xsl"
self.debug('Getting updated diskdrive info...')
cmd = "wmic diskdrive list full"
diskdrives = []
for disk_dict in self.get_parsed_wmic_command_output(cmd):
try:
diskdrives.append(WinInstanceDiskDrive(self,disk_dict))
except Exception, e:
tb = self.tester.get_traceback()
self.debug('Error attempting to create WinInstanceDiskDrive from following dict:')
self.print_dict(dict=disk_dict)
raise Exception(str(tb) + "\n Error attempting to create WinInstanceDiskDrive:" + str(e))
self.debug('get_updated_diskdrive_info, Done')
return diskdrives
def get_updated_partition_info(self):
'''
Populate self.diskdrives with WinInstanceDisk objects containing info parsed from wmic command.
Since wmic doesn't seem to use delimeters this method attempts to derive the lengh of each column/header
in order to parse out the info per disk.
:pararm force: boolean. Will force an update, otherwise this method will wait a minimum of
self.disk_update_interval before updating again.
'''
self.debug('Getting udpated partition info...')
cmd = "wmic partition list brief /format:textvaluelist.xsl"
disk_partitions = []
for part_dict in self.get_parsed_wmic_command_output(cmd):
try:
disk_partitions.append(WinInstanceDiskPartition(self,part_dict))
except Exception, e:
tb = self.tester.get_traceback()
self.debug('Error attempting to create WinInstanceDiskPartition from following dict:')
self.print_dict(dict=part_dict)
raise Exception(str(tb) + "\n Error attempting to create WinInstanceDiskPartition:" + str(e))
self.debug('get_updated_partition_info, Done')
return disk_partitions
def get_updated_logicaldisk_info(self):
self.debug('Getting updated logicaldisk info...')
cmd ='wmic logicaldisk list /format:textvaluelist.xsl'
logicaldisks = []
for part_dict in self.get_parsed_wmic_command_output(cmd):
try:
logicaldisks.append(WinInstanceLogicalDisk(self,part_dict))
except Exception, e:
tb = self.tester.get_traceback()
self.debug('Error attempting to create WinInstanceLogicalDisk from following dict:')
self.print_dict(dict=part_dict)
raise Exception(str(tb) + "\n Error attempting to create WinInstanceLogicalDisk:" + str(e))
self.debug('get_updated_logicaldisk_info, Done')
return logicaldisks
def associate_diskdrives_to_partitions(self):
for disk in self.diskdrives:
disk.disk_partitions = []
for part in self.disk_partitions:
if part.diskindex == disk.index:
disk.disk_partitions.append(part)
def associate_partitions_to_logicaldrives(self, verbose=False):
for part in self.disk_partitions:
drive_id = None
part.logicaldisks = []
cmd = 'wmic partition where (DeviceID="Disk #' + str(part.diskindex) + \
', Partition #' + str(part.index) + '") assoc /assocclass:Win32_LogicalDiskToPartition'
output = self.sys(cmd, verbose=verbose, code=0)
for line in output:
if re.search('Win32_LogicalDisk.DeviceID',line):
try:
drive_id = str(line.split()[0].split('=')[1]).replace('"','').strip()
except Exception, e:
tb = self.tester.get_traceback()
self.debug(str(tb)+ "\nError getting logical drive info:" + str(e))
if drive_id:
for disk in self.logicaldisks:
if re.match(disk.deviceid, drive_id):
part.logicaldisks.append(disk)
disk.partition = part
break
def get_cygwin_scsi_dev_for_windows_drive(self, windisk=None, drive_id=""):
'''
param windisk: WinInstanceDiskType object. windisk.deviceid is used to look up the associated cygwin device
param drive_id: String representing the deviceid. Can be used instead of passing a WinInstanceDiskType
'''
windisk_classname = ""
update = False
retries = 2
if windisk:
drive_id = windisk.deviceid
windisk_classname = str(windisk.__class__).split('.').pop()
#If this is a disk drive allow a retry which set the force update flag, otherwise don't force and retry
if isinstance(windisk,WinInstanceDiskDrive):
update = True
if not drive_id:
raise Exception('WinInstanceDiskType or string w/ device id not provided')
self.debug('Attempting to get cygwin dev for windows drive:' + str(drive_id))
self.update_cygwin_windows_device_map()
for retry in xrange(0, retries):
for device in self.cygwin_dev_map:
if re.search("dev", device):
win_dev = str(self.cygwin_dev_map[device].split('\\').pop()).strip().upper()
formated_drive_id = str(drive_id.split('\\').pop()).strip().upper()
#self.debug('Attempt to match:"' + str(win_dev) + '" with "' + str(formated_drive_id) + '"')
if formated_drive_id == win_dev:
#self.debug('Found match')
return device
if update:
self.update_cygwin_windows_device_map(force_update=True)
else:
break
self.debug('WARNING: Could not find cygwin device for type:"' + str(windisk_classname) + '", deviceid:' + str(drive_id))
return ""
def get_parsed_wmic_command_output(self, wmic_command, verbose=False):
'''
Attempts to parse a wmic command using "/format:textvaluelist.xsl" for key value format into a list of
dicts.
:param wmic_command: string representing the remote wmic command to be run
:returns : list of dict(s) created from the parsed key value output of the command.
Note keys will be in lowercase
'''
self.debug('get_parsed_wmic_command_output, command:' + str(wmic_command))
ret_dicts = []
output = self.sys(wmic_command, verbose=verbose, code=0)
newdict = {}
for line in output:
if not re.match(r"^\w",line):
#If there is a blank line(s) then the previous object is complete
if newdict:
ret_dicts.append(newdict)
newdict = {}
else:
splitline = line.split('=')
key = str(splitline.pop(0)).lower()
if len(splitline) > 1:
value = "=".join(str(x) for x in splitline)
else:
if splitline:
value = splitline.pop()
else:
value = ''
newdict[key] = value
return ret_dicts
def get_logicaldisk_ids(self, forceupdate=False):
'''
:param forceupdate: boolean, to force an update of logical disks detected on the guest. Otherwise updates are
throttled to self.disk_update_interval
:returns list of device ids (ie: [A:,C:,D:]
'''
ret = []
self.update_disk_info(forceupdate=forceupdate)
for disk in self.logicaldisks:
ret.append(disk.deviceid)
return ret
def get_diskdrive_ids(self, drivelist=None, forceupdate=False):
'''
:param forceupdate: boolean, to force an update of logical disks detected on the guest. Otherwise updates are
throttled to self.disk_update_interval
:returns list of device ids ie: ['\\.\PHYSICALDRIVE0','\\.\PHYSICALDRIVE1,'\\.\PHYSICALDRIVE2']
'''
ret = []
if not drivelist:
self.update_disk_info(forceupdate=forceupdate)
drivelist = self.diskdrives
for disk in drivelist:
ret.append(disk.deviceid)
return ret
def get_diskdrive_by_deviceid(self, deviceid):
for disk in self.diskdrives:
if disk.deviceid == deviceid:
return disk
def found(self, command, regex):
""" Returns a Boolean of whether the result of the command contains the regex"""
result = self.sys(command)
for line in result:
found = re.search(regex,line)
if found:
return True
return False
def assertFilePresent(self,filepath):
'''
Raise exception if file not found at filepath on remote guest. dirs '\' need to be represented as '\\'
'''
self.sys('dir ' + str(filepath), code=0)
def assertCygwinFilePresent(self, filepath):
self.cygwin_cmd('ls ' + str(filepath), code=0)
def attach_volume(self, volume, dev=None, timeout=180, overwrite=False):
'''
Method used to attach a volume to an instance and track it's use by that instance
required - euvolume - the euvolume object being attached
required - tester - the eucaops/nephoria object/connection for this cloud
optional - dev - string to specify the dev path to 'request' when attaching the volume to
optional - timeout - integer- time allowed before failing
optional - overwrite - flag to indicate whether to overwrite head data of a non-zero filled volume upon attach for md5
'''
if not isinstance(volume, EuVolume):
volume = EuVolume.make_euvol_from_vol(volume)
return self.attach_euvolume(volume, dev=dev, timeout=timeout, overwrite=overwrite)
def attach_euvolume(self, euvolume, dev=None, timeout=180, overwrite=False):
'''
Method used to attach a volume to an instance and track it's use by that instance
required - euvolume - the euvolume object being attached
required - tester - the eucaops/nephoria object/connection for this cloud
optional - dev - string to specify the dev path to 'request' when attaching the volume to
optional - timeout - integer- time allowed before failing
optional - overwrite - flag to indicate whether to overwrite head data of a non-zero filled volume upon attach for md5
'''
if not isinstance(euvolume, EuVolume):
raise Exception("Volume needs to be of type euvolume, try attach_volume() instead?")
self.debug('Disk drive summary before attach attempt:')
self.print_logicaldisk_summary()
self.print_diskdrive_summary()
self.debug("Attempting to attach volume:"+str(euvolume.id)+" to instance:" +str(self.id)+" to dev:"+ str(dev))
#grab a snapshot of our devices before attach for comparison purposes
diskdrive_list_before = self.get_diskdrive_ids()
use_serial = False
for disk in self.diskdrives:
if re.search('vol-', disk.serialnumber):
use_serial = True
break
attached_dev = None
start= time.time()
elapsed = 0
if dev is None:
#update our block device prefix
dev = self.get_free_scsi_dev()
if (self.tester.attach_volume(self, euvolume, dev, pause=10,timeout=timeout)):
if euvolume.attach_data.device != dev:
raise Exception('Attached device:' + str(euvolume.attach_data.device) +
", does not equal requested dev:" + str(dev))
#Find device this volume is using on guest...
euvolume.guestdev = None
while (not euvolume.guestdev and elapsed < timeout):
#Since all hypervisors may not support serial number info, check for an incremental diff in the
# list of physical diskdrives on this guest.
self.debug("Checking for volume attachment on guest, elapsed time("+str(elapsed)+")")
diskdrive_list_after = self.get_diskdrive_ids(forceupdate=True)
self.print_logicaldisk_summary()
self.print_diskdrive_summary()
self.debug("dev_list_after:"+" ".join(diskdrive_list_after))
diff =list( set(diskdrive_list_after) - set(diskdrive_list_before) )
if len(diff) > 0:
self.debug('Got Diff in drives:' + str(diff))
for disk in self.diskdrives:
if re.search('vol-', disk.serialnumber):
use_serial = True
if euvolume.id == disk.ebs_volume:
attached_dev = disk.deviceid
euvolume.guestdev = attached_dev
self.debug("Volume:"+str(euvolume.id)+" guest device by serialnumber:"+str(euvolume.guestdev))
break
if not use_serial:
attached_dev = str(diff[0])
euvolume.guestdev = attached_dev.strip()
self.debug("Volume:"+str(euvolume.id)+"found guest device by diff:"+str(euvolume.guestdev))
if attached_dev:
euvolume.guestdev = attached_dev
attached_vol = self.get_volume_from_attached_list_by_id(euvolume.id)
self.attached_vols.append(euvolume)
self.debug(euvolume.id+": Requested dev:"+str(euvolume.attach_data.device)+", attached to guest device:"+str(euvolume.guestdev))
break
elapsed = int(time.time() - start)
time.sleep(2)
if not euvolume.guestdev or not attached_dev:
raise Exception('Device not found on guest after '+str(elapsed)+' seconds')
else:
self.debug('Failed to attach volume:'+str(euvolume.id)+' to instance:'+self.id)
raise Exception('Failed to attach volume:'+str(euvolume.id)+' to instance:'+self.id)
if (attached_dev is None):
self.debug("List after\n"+" ".join(diskdrive_list_after))
raise Exception('Volume:'+str(euvolume.id)+' attached, but not found on guest'+str(self.id)+' after '+str(elapsed)+' seconds?')
#Store the md5sum of this diskdrive in the euvolume...
disk = self.get_diskdrive_by_deviceid(attached_dev)
euvolume.md5len = 1024
euvolume.md5 = self.get_dev_md5(devpath=disk.cygwin_scsi_drive, length=euvolume.md5len)
#update the volume and instances information about the attachment...
self.update_volume_guest_info(volume=euvolume,md5=euvolume.md5, md5len=euvolume.md5len, guestdev=euvolume.guestdev)
self.debug('Success attaching volume:'+str(euvolume.id)+' to instance:'+self.id +
', cloud dev:'+str(euvolume.attach_data.device)+', attached dev:'+str(attached_dev) +
", elapsed:" + str(elapsed))
try:
self.rescan_disks(timeout=20)
except Exception, e:
self.debug('Warning. Error while trying to rescan disks after attaching volume. Error: ' + str(e))
euvolume.printself(printmethod=self.debug)
disk.print_self()
return attached_dev
def get_guest_dev_for_volume(self, volume, forceupdate=False):
use_serial = False
self.update_disk_info(forceupdate=forceupdate)
for disk in self.diskdrives:
if re.search('vol-', disk.serialnumber):
use_serial = True
break
if not isinstance(volume, EuVolume):
volume = EuVolume.make_euvol_from_vol(volume=volume, tester=self.tester)
def get_disk_drive_by_id(self, deviceid):
self.update_system_info()
for disk in self.diskdrives:
if disk.deviceid == deviceid:
return disk
return None
def get_guestdevs_inuse_by_vols(self):
retlist = []
for vol in self.attached_vols:
retlist.append(vol.guestdev)
return retlist
def get_free_scsi_dev(self, prefix=None,maxdevs=16):
'''
The volume attach command requires a cloud level device name that is not currently associated with a volume
Note: This is the device name from the clouds perspective, not necessarily the guest's
This method attempts to find a free device name to use in the command
optional - prefix - string, pre-pended to the the device search string
optional - maxdevs - number use to specify the max device names to iterate over.Some virt envs have a limit of 16 devs.
'''
d='e'
in_use_cloud = ""
in_use_guest = ""
dev = None
if prefix is None:
prefix = self.block_device_prefix
cloudlist=self.tester.get_volumes(attached_instance=self.id)
for x in xrange(0,maxdevs):
inuse=False
#double up the letter identifier to avoid exceeding z
if d == 'z':
prefix= prefix+'e'
dev = "/dev/"+prefix+str(d)
for avol in self.attached_vols:
if avol.attach_data.device == dev:
inuse = True
in_use_guest += str(avol.id)+", "
continue
#Check to see if the cloud has a conflict with this device name...
for vol in cloudlist:
vol.update()
if (vol.attach_data is not None) and (vol.attach_data.device == dev):
inuse = True
in_use_cloud += str(vol.id)+", "
continue
if inuse is False:
self.debug("Instance:"+str(self.id)+" returning available cloud scsi dev:"+str(dev))
return str(dev)
else:
d = chr(ord('e') + x) #increment the letter we append to the device string prefix
dev = None
if dev is None:
raise Exception("Could not find a free scsi dev on instance:"+self.id+", maxdevs:"+str(maxdevs)+"\nCloud_devs:"+str(in_use_cloud)+"\nGuest_devs:"+str(in_use_guest))
def detach_euvolume(self, euvolume, waitfordev=True, timeout=180):
'''
Method used to detach detach a volume to an instance and track it's use by that instance
required - euvolume - the euvolume object being deattached
waitfordev - boolean to indicate whether or no to poll guest instance for local device to be removed
optional - timeout - integer seconds to wait before timing out waiting for the volume to detach
'''
start = time.time()
elapsed = 0
found = True
for vol in self.attached_vols:
if vol.id == euvolume.id:
dev = vol.guestdev
if (self.tester.detach_volume(euvolume,timeout=timeout)):
if waitfordev:
self.debug("Cloud has detached" + str(vol.id) + ", Wait for device:"+str(dev)+" to be removed on guest...")
while (elapsed < timeout):
diskdrive_ids = []
try:
disk_drives = self.get_updated_diskdrive_info()
for disk in disk_drives:
if dev == disk.deviceid:
found = True
break
found = False
self.debug('Diskdrive associated with ' + str(vol.id) + ' has been removed from guest.')
#if device is not present remove it
self.attached_vols.remove(vol)
except Exception, de:
self.debug('Warning, error getting diskdrive id during detach:' + str(de))
if not found:
try:
self.rescan_disks(timeout=20)
except Exception, re:
self.debug('Warning: Error while trying to rescan disks after detaching volume:' + str(re))
try:
self.update_disk_info()
except Exception, ue:
self.debug('Warning: Error while trying to update disk info:' + str(ue))
try:
self.print_diskdrive_summary()
except: pass
self.debug('Volume:' + str(vol.id) + ', detached, and no longer found on guest at:' + str(dev))
vol.set_volume_detached_tags()
return True
time.sleep(10)
elapsed = int(time.time()-start)
diskdrive_ids = self.get_diskdrive_ids(drivelist=disk_drives)
self.debug('Current disk drives on guest:' + ",".join(str(x) for x in diskdrive_ids))
self.debug("Waiting for device '"+str(dev)+"' on guest to be removed.Elapsed:"+str(elapsed))
else:
self.attached_vols.remove(vol)
vol.set_volume_detached_tags()
return True
else:
raise Exception("Volume("+str(vol.id)+") failed to detach from device("+str(dev)+") on ("+str(self.id)+")")
raise Exception("Detach Volume("+str(euvolume.id)+") not found on ("+str(self.id)+")")
return False
def check_hostname(self):
if not hasattr(self, 'system_info'):
self.update_system_info()
if hasattr(self, 'system_info') and hasattr(self.system_info, 'host_name'):
if self.id.upper() == self.system_info.host_name.upper():
self.debug('Hostname:' + str(self.id) + ", instance.id:" + str(self.system_info.host_name))
else:
raise Exception('check_hostname failed: hostname:' + str(self.system_info.host_name).upper() +
" != id:" + str(self.id).upper())
else:
raise Exception('check_hostname failed: System_info.hostname not populated')
def get_process_list_brief(self):
'''
Returns a list of dicts representing the processes running on the remote guest. Each service is represented by a
dict containing information about the service.
'''
cmd = "wmic process list brief /format:textvaluelist.xsl"
return self.get_parsed_wmic_command_output(cmd)
def get_process_list_full(self):
'''
Returns a list of dicts representing the processes running on the remote guest. Each service is represented by a
dict containing information about the service.
'''
cmd = "wmic process list full"
return self.get_parsed_wmic_command_output(cmd)
def get_process_by_name(self,process_name):
'''
Attempts to lookup a service on the remote guest.
param service_name: string. The name of the service to get info
returns a dict representing the information returned from the remote guest
'''
cmd = 'wmic process ' + str(process_name) + ' get /format:textvaluelist.xsl'
result = self.get_parsed_wmic_command_output(cmd)
if result:
return result[0]
def get_services_list_brief(self):
'''
Returns a list of dicts representing the services from the remote guest. Each service is represented by a
dict containing information about the service.
'''
cmd = 'wmic service list brief /format:textvaluelist.xsl'
return self.get_parsed_wmic_command_output(cmd)
def get_services_list_full(self):
'''
Returns a list of dicts representing the services from the remote guest. Each service is represented by a
dict containing information about the service.
'''
cmd = 'wmic service list full'
return self.get_parsed_wmic_command_output(cmd)
def get_service_by_name(self,service_name):
'''
Attempts to lookup a service on the remote guest.
param service_name: string. The name of the service to get info
returns a dict representing the information returned from the remote guest
'''
cmd = 'wmic service ' + str(service_name) + ' get /format:textvaluelist.xsl'
result = self.get_parsed_wmic_command_output(cmd)
if result:
return result[0]
def get_memtotal_in_mb(self):
return long(self.system_info.total_physical_memory.split()[0].replace(',',''))
def get_memtotal_in_gb(self):
return long(self.get_memtotal_in_mb()/1024)
def check_ram_against_vmtype(self, pad=32):
total_ram = self.get_memtotal_in_mb()
self.debug('Ram check: vm_ram:' + str(self.vmtype_info.ram)
+ "mb vs memtotal:" + str(total_ram)
+ "mb. Diff:" + str(self.vmtype_info.ram - total_ram)
+ "mb, pad:" + str(pad) + "mb")
if not ((self.vmtype_info.ram - total_ram) <= pad):
raise Exception('Ram check failed. vm_ram:' + str(self.vmtype_info.ram)
+ " vs memtotal:" + str(total_ram) + ". Diff is greater than allowed pad:" + str(pad) + "mb")
else:
self.debug('check_ram_against_vmtype, passed')
def check_ephemeral_against_vmtype(self):
gb = self.gigabyte
size = self.vmtype_info.disk
ephemeral_dev = self.get_ephemeral_dev()
block_size = self.get_blockdev_size_in_bytes(ephemeral_dev)
gbs = block_size / gb
self.debug('Ephemeral check: ephem_dev:'
+ str(ephemeral_dev)
+ ", bytes:"
+ str(block_size)
+ ", gbs:"
+ str(gbs)
+ ", vmtype size:"
+ str(size))
if gbs != size:
raise Exception('Ephemeral check failed. ' + str(ephemeral_dev) + ' Blocksize: '
+ str(gbs) + "gb (" + str(block_size) + "bytes)"
+ ' != vmtype size:' +str(size) + "gb")
else:
self.debug('check_ephemeral_against_vmtype, passed')
return ephemeral_dev
def get_ephemeral_dev(self):
"""
Attempts to find the block device path on this instance
:return: string representing path to ephemeral block device
"""
ephem_name = None
dev_prefixs = ['s','v','xd','xvd']
if not self.root_device_type == 'ebs':
try:
self.assertFilePresent('/dev/' + str(self.rootfs_device))
return self.rootfs_device
except:
ephem_name = 'da'
else:
ephem_name = 'db'
devs = self.get_dev_dir()
for prefix in dev_prefixs:
if str(prefix+ephem_name) in devs:
return str('/dev/'+prefix+ephem_name)
raise Exception('Could not find ephemeral device?')
def cygwin_cmd(self, cmd, timeout=120, verbose=False, code=None):
cmd = self.get_cygwin_path() + '\\bin\\bash.exe --login -c "' + str(cmd) + '"'
return self.sys(cmd,timeout=timeout, verbose=verbose, code=code)
def get_dev_md5(self, devpath, length, timeout=60):
self.assertCygwinFilePresent(devpath)
if length == 0:
md5 = str(self.cygwin_cmd('md5sum ' + devpath, timeout=timeout)[0]).split(' ')[0].strip()
else:
md5 = str(self.cygwin_cmd("head -c " + str(length) + " " + str(devpath) + " | md5sum")[0]).split(' ')[0].strip()
return md5
def update_cygwin_windows_device_map(self, prefix='/dev/*', force_update=False):
cygwin_dev_map = {}
if not force_update:
if self.cygwin_dev_map:
if time.time() - self.cygwin_dev_map['last_updated'] <= 30:
cygwin_dev_map = self.cygwin_dev_map
if not cygwin_dev_map:
self.debug('Updating cygwin to windows device mapping...')
output = self.cygwin_cmd("for DEV in " + prefix + " ; do printf $DEV=$(cygpath -w $DEV); echo ''; done",
verbose=False, code=0)
for line in output:
if re.match(prefix, line):
split = line.split('=')
key = split.pop(0)
if split:
value = split.pop()
else:
value = ''
cygwin_dev_map[key]=value
cygwin_dev_map['last_updated'] = time.time()
self.cygwin_dev_map = cygwin_dev_map
self.debug('Updated cygwin to windows device mapping')
return cygwin_dev_map
def rescan_disks(self, timeout=20):
'''
Attempts to rescan disks on the guest. This may help expedite updates/discovery when attaching/detaching
volumes to the guest. This has also been found to hang post device removal so is used with a 20 second
command timeout as the default.
param timeout: integer. Seconds to wait on command before failing
'''
scriptname = 'eutester_diskpart_script'
self.sys('(echo rescan && echo list disk ) > ' + str(scriptname), code=0)
self.sys('diskpart /s ' + str(scriptname), code=0, timeout=timeout)
def get_diskdrive_for_volume(self, volume):
if not self.is_volume_attached_to_this_instance(volume):
return None
ret_disk = None
for disk in self.diskdrives:
disk.update_ebs_info()
if disk.ebs_volume == volume.id:
ret_disk = disk
if not ret_disk:
ret_disk = self.find_diskdrive_for_volume_by_serial_number(volume, force_check=True)
if not ret_disk:
if hasattr(volume,'md5') and volume.md5:
ret_disk = self.find_diskdrive_for_volume_by_md5(volume, force_check=True)
return ret_disk
def find_diskdrive_for_volume_by_md5(self, volume, md5=None, length=None, force_check=False):
if not force_check and not self.is_volume_attached_to_this_instance(volume):
return None
if not isinstance(volume, EuVolume):
volume = EuVolume.make_euvol_from_vol(volume=volume,tester=self.tester)
md5 = md5 or volume.md5
if not md5:
return None
length = length or volume.md5len
for disk in self.diskdrives:
if disk.cygwin_scsi_drive:
disk_md5 = self.get_dev_md5(disk.cygwin_scsi_drive, length=length)
if disk_md5 == md5:
volume.guestdev = disk.deviceid
volume.md5 = disk_md5
volume.md5len = length
disk.ebs_volume = volume.id
return disk
return None
def find_diskdrive_for_volume_by_serial_number(self, volume, serial_number=None, force_check=False):
'''
Attempt to iterate through all the diskdrives were aware of. If a diskdrive is found with a serial_number
associated with the volume, return that diskdrive obj..
example serial number format: vol-81C13EA4-dev-sdg
:param volume: volume obj to use for deriving the serial_number
:param serial_number: string. Optional. The string representing the serial # to match.
:returns WinInstanceDiskDrive if found, else None
'''
if not force_check and not self.is_volume_attached_to_this_instance(volume):
return None
if not serial_number:
serial_number = volume.id + volume.attach_data.device.replace('/','-')
for disk in self.diskdrives:
if disk.serialnumber == serial_number:
return disk
return None
def is_volume_attached_to_this_instance(self, volume):
'''
Attempts to look up volume state per cloud to confirm the cloud believe the state of this volume is attached
to this instance. This does not verify the guest/hypervisor also belives the volume is attached.
:param volume: volume obj.
:returns boolean
'''
volume.update()
if hasattr(volume, 'attach_data') and volume.attach_data and (volume.attach_data.instance_id == self.id):
self.debug('Volume:' + str(volume.id) + " is attached to this instance: " + str(self.id) + " per cloud perspective")
return True
else:
self.debug('Volume:' + str(volume.id) + " is NOT attached to this instance: " + str(self.id) + " per cloud perspective")
return False
def update_volume_guest_info(self, volume, md5=None, md5len=None, guestdev=None):
self.debug("{0} update_volume_guest_info: {1} {2}"
.format(termline, volume, termline))
if not self.is_volume_attached_to_this_instance(volume):
raise Exception('Volume not attached to this instance')
disk = None
if not self.get_volume_from_attached_list_by_id(volume.id):
self.attached_vols.append(volume)
volume.guestdev = guestdev or volume.guestdev
if md5:
if not md5len:
raise Exception('Must provide md5len if providing the md5')
volume.md5 = md5
volume.md5len = md5len
else:
disk = self.get_diskdrive_for_volume(volume)
if not disk:
raise Exception('Could not find diskdrive for volume when attempting to update volume guest info:' + str(volume))
volume.md5len = md5len or 1024
volume.md5 = self.get_dev_md5(disk.cygwin_scsi_drive, volume.md5len)
if not guestdev:
volume.guestdev = disk.deviceid
disk = disk or self.get_diskdrive_for_volume(volume)
disk.update_ebs_info()
volume.update_volume_attach_info_tags(md5=volume.md5, md5len=volume.md5len, instance_id=self.id, guestdev=volume.guestdev)
return volume
def get_unsynced_volumes(self, check_md5=True):
'''
Description: Returns list of volumes which are:
-in a state the cloud believes the vol is no longer attached
-the attached device has changed, or is not found.
If all euvols are shown as attached to this instance, and the last known local dev is present and/or a local device is found with matching md5 checksum
then the list will return 'None' as all volumes are successfully attached and state is in sync.
By default this method will iterate through all the known euvolumes attached to this euinstance.
A subset can be provided in the list argument 'euvol_list'.
Returns a list of euvolumes for which a corresponding guest device could not be found, or the cloud no longer believes is attached.
:param euvol_list: - optional - euvolume object list. Defaults to all self.attached_vols
:param md5length: - optional - defaults to the length given in each euvolume. Used to calc md5 checksum of devices
:param timerpervolume: -optional - time to wait for device to appear, per volume before failing
:param min_polls: - optional - minimum iterations to check guest devs before failing, despite timeout
:param check_md5: - optional - find devices by md5 comparision. Default is to only perform this check when virtio_blk is in use.
'''
bad_list = []
retdict = self.sync_attached_volumes_with_clouds_view()
bad_list.extend(retdict['badvols'])
return bad_list
def reboot_instance_and_verify(self,
waitconnect=60,
timeout=600,
wait_for_ports=180,
connect=True,
checkvolstatus=False,
pad=5,
uptime_retries=3):
'''
Attempts to reboot an instance and verify it's state post reboot.
waitconnect-optional-integer representing seconds to wait before attempting to connect to instance after reboot
timeout-optional-integer, seconds. If a connection has failed, this timer is used to determine a retry
connect- optional - boolean to indicate whether an ssh session should be established once the expected state has been reached
checkvolstatus - optional -boolean to be used to check volume status post start up
'''
msg=""
newuptime = None
attempt = 0
def get_safe_uptime():
uptime = None
try:
uptime = self.get_uptime()
except: pass
return uptime
self.debug('Attempting to reboot instance:'+str(self.id)+', check attached volume state first')
uptime = self.tester.wait_for_result( get_safe_uptime, None, oper=operator.ne)
elapsed = 0
start = time.time()
if checkvolstatus:
#update the md5sums per volume before reboot
bad_vols=self.get_unsynced_volumes()
if bad_vols != []:
for bv in bad_vols:
self.debug(str(self.id)+'Unsynced volume found:'+str(bv.id))
raise Exception(str(self.id)+"Could not reboot using checkvolstatus flag due to unsync'd volumes")
self.debug('Rebooting now...')
self.reboot()
time.sleep(waitconnect)
try:
self.poll_for_ports_status(ports=[3389,5589], timeout=wait_for_ports)
except:
self.debug('Failed to poll winrm and rdp ports after ' + str(wait_for_ports) + ' seconds, try to connect anyways...')
timeout=timeout - int(time.time()-start)
while (elapsed < timeout):
self.connect_to_instance(timeout=timeout)
#Wait for the system to provide a valid response for uptime, early connections may not
newuptime = self.tester.wait_for_result( get_safe_uptime, None, oper=operator.ne)
elapsed = int(time.time()-start)
#Check to see if new uptime is at least 'pad' less than before, allowing for some pad
if (newuptime - (uptime+elapsed)) > pad:
err_msg = "Instance uptime does not represent a reboot. Orig:"+str(uptime)+\
", New:"+str(newuptime)+", elapsed:"+str(elapsed)+"/"+str(timeout)
if elapsed > timeout:
raise Exception(err_msg)
else:
self.debug(err_msg)
else:
self.debug("Instance uptime indicates a reboot. Orig:"+str(uptime)+\
", New:"+str(newuptime)+", elapsed:"+str(elapsed))
break
if checkvolstatus:
badvols= self.get_unsynced_volumes()
if badvols != []:
for vol in badvols:
msg = msg+"\nVolume:"+vol.id+" Local Dev:"+vol.guestdev
raise Exception("Missing volumes post reboot:"+str(msg)+"\n")
self.debug(self.id+" reboot_instance_and_verify Success")
def get_uptime(self):
if not hasattr(self, 'system_info'):
self.update_system_info()
if hasattr(self.system_info, 'system_boot_time'):
return self._get_uptime_from_system_boot_time()
elif hasattr(self.system_info, 'system_up_time'):
return self._get_uptime_from_system_up_time()
else:
tb = self.tester.get_traceback()
raise Exception(str(tb) + '\nCould not get system boot or up time from system_info')
def _get_uptime_from_system_boot_time(self):
#11/18/2013, 3:15:39 PM
if not hasattr(self, 'system_info'):
self.update_system_info()
splitdate = self.system_info.system_boot_time.split()
datestring = splitdate[0]
timestring = splitdate[1]
ampm = splitdate[2]
month, day, year = datestring.replace(',',"").split('/')
hours, minutes, seconds = timestring.split(':')
if ampm == 'PM':
hours = int(hours) + 12
datetimestring = str(year) + " " + \
str(month) + " " + \
str(day) + " " + \
str(hours) + " " + \
str(minutes) + " " + \
str(seconds)
dt = datetime.strptime(datetimestring, "%Y %m %d %H %M %S")
return int(time.time() - time.mktime(dt.timetuple()))
def _get_uptime_from_system_up_time(self):
#0 Days, 0 Hours, 6 Minutes, 39 Seconds
if not hasattr(self, 'system_info'):
self.update_system_info()
uptime_string = self.system_info.system_up_time
days = 0
hours = 0
minutes = 0
seconds = 0
split = uptime_string.split(',')
for part in split:
time_string = ""
if re.search('Days', part, re.IGNORECASE):
time_string = str(part.split()[0]).strip()
days = int(time_string or 0)
elif re.search('Hours', part, re.IGNORECASE):
time_string = str(part.split()[0]).strip()
hours = int(time_string or 0)
elif re.search('Minutes', part, re.IGNORECASE):
time_string = str(part.split()[0]).strip()
minutes = int(time_string or 0)
elif re.search('Seconds', part, re.IGNORECASE):
time_string = str(part.split()[0]).strip()
seconds = int(time_string or 0)
self.debug("Days:" +str(days)+', Hours:'+ str(hours) + ", Minutes:" + str(minutes) + ", Seconds:" + str(seconds))
uptime = (days * 86400) + (hours * 3600) + (minutes * 60) + seconds
return uptime
def stop_instance_and_verify(self, timeout=200, state='stopped',
failstate='terminated', check_vols=True):
'''
Attempts to stop instance and verify the state has gone to
stopped state
:param timeout; -optional-time to wait on instance to go to state 'state' before failing
:param state: -optional-the expected state to signify success, default is stopped
:param failstate: -optional-a state transition that indicates failure, default is terminated
'''
self.debug(self.id+" Attempting to stop instance...")
start = time.time()
elapsed = 0
self.stop()
while (elapsed < timeout):
time.sleep(2)
self.update()
if self.state == state:
break
if self.state == failstate:
raise Exception(str(self.id) + " instance went to state:" +
str(self.state) + " while stopping")
elapsed = int(time.time()- start)
if elapsed % 10 == 0 :
self.debug(str(self.id) + " wait for stop, in state:" +
str(self.state) + ",time remaining:" +
str(elapsed) + "/" + str(timeout) )
if self.state != state:
raise Exception(self.id + " state: " + str(self.state) +
" expected:" + str(state) +
", after elapsed:" + str(elapsed))
if check_vols:
for volume in self.attached_vols:
volume.update
if volume.status != 'in-use':
raise Exception(str(self.id) + ', Volume ' +
str(volume.id) + ':' + str(volume.status)
+ ' state did not remain in-use '
'during stop')
self.debug(self.id + " stop_instance_and_verify Success")
def start_instance_and_verify(self, timeout=300, state = 'running',
failstates=['terminated'], failfasttime=30,
connect=True, checkvolstatus=True):
'''
Attempts to start instance and verify state, and reconnects ssh session
:param timeout: -optional-time to wait on instance to go to state
'state' before failing
:param state: -optional-the expected state to signify success,
default is running
:param failstate: -optional-a state transition that indicates failure,
default is terminated
:param connect: -optional - boolean to indicate whether an ssh
session should be established once the expected state
has been reached
:param checkvolstatus: -optional -boolean to be used to check volume
status post start up
'''
self.debug(self.id+" Attempting to start instance...")
if checkvolstatus:
for volume in self.attached_vols:
volume.update
if checkvolstatus:
if volume.status != 'in-use':
raise Exception(str(self.id) + ', Volume ' + str(volume.id) + ':' + str(volume.status)
+ ' state did not remain in-use during stop' )
self.debug("\n"+ str(self.id) + ": Printing Instance 'attached_vol' list:\n")
self.tester.show_volumes(self.attached_vols)
msg=""
start = time.time()
elapsed = 0
self.update()
#Add fail fast states...
if self.state == 'stopped':
failstates.extend(['stopped','stopping'])
self.start()
while (elapsed < timeout):
elapsed = int(time.time()- start)
self.update()
self.debug(str(self.id) + " wait for start, in state:" +
str(self.state) + ",time remaining:" + str(elapsed) +
"/"+str(timeout) )
if self.state == state:
break
if elapsed >= failfasttime:
for failstate in failstates:
if self.state == failstate:
raise Exception(str(self.id) +
" instance went to state:" +
str(self.state) + " while starting")
time.sleep(10)
if self.state != state:
raise Exception(self.id + " not in " + str(state) +
" state after elapsed:" + str(elapsed))
else:
self.debug(self.id + " went to state:" + str(state))
if connect:
self.connect_to_instance(timeout=timeout)
if checkvolstatus:
badvols= self.get_unsynced_volumes(check_md5=True)
if badvols != []:
for vol in badvols:
msg = msg + "\nVolume:" + vol.id + " Local Dev:" +\
vol.guestdev
raise Exception("Missing volumes post reboot:" + str(msg) +
"\n")
self.debug(self.id+" start_instance_and_verify Success")
|
[
"# Software License Agreement (BSD License)\n#\n# Copyright (c) 2009-2011, Eucalyptus Systems, Inc.\n# All rights reserved.\n#\n# Redistribution and use of this software in source and binary forms, with or\n# without modification, are permitted provided that the following conditions\n# are met:\n#\n# Redistributions of source code must retain the above\n# copyright notice, this list of conditions and the\n# following disclaimer.\n#\n# Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the\n# following disclaimer in the documentation and/or other\n# materials provided with the distribution.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\n# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE\n# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\n# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\n# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\n# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\n# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\n# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\n# POSSIBILITY OF SUCH DAMAGE.\n#\n# Author: [email protected]\n\n\n'''\n@author: clarkmatthew\nextension of the boto instance class, with added convenience methods + objects\nAdd common instance test routines to this class\n\nExamples:\nfrom eucaops import Eucaops\nfrom nephoria.windows_instance import WinInstance\ntester = Eucaops(credpath='eucarc-10.111.5.80-eucalyptus-sys_admin')\nwins = WinInstance.make_euinstance_from_instance(tester.get_instances(idstring='i-89E13DA8')[0], tester=tester, keypair='test')\nvol = tester.get_volume(status='available', zone=wins.placement)\nwins.attach_volume(vol)\n\n\n\n'''\n\nimport socket\nimport os\nimport re\nimport time\nimport copy\nimport types\nimport operator\nfrom prettytable import PrettyTable, ALL\nfrom boto.ec2.instance import Instance\nfrom nephoria.aws.ec2.euvolume import EuVolume\nfrom cloud_utils.log_utils import eulogger, get_line, markup\nfrom nephoria.euca.taggedresource import TaggedResource\nfrom boto.ec2.instance import InstanceState\nfrom datetime import datetime\nfrom cloud_utils.net_utils import winrm_connection\n\ntermline = get_line()\n\nclass WinInstanceDiskType():\n gigabyte = 1073741824\n megabyte = 1048576\n def __init__(self, win_instance, wmic_dict):\n self.check_dict_requires(wmic_dict)\n self.__dict__ = self.convert_numbers_in_dict(copy.copy(wmic_dict))\n self.win_instance = win_instance\n self.size_in_gb = self.get_size_in_gb()\n self.size_in_mb = self.get_size_in_mb()\n self.size = long(self.size or 0)\n self.last_updated = time.time()\n self.setup()\n\n def setup(self):\n raise Exception('Not Implemented')\n\n def check_dict_requires(self, wmic_dict):\n raise Exception('Not Implemented')\n\n def convert_numbers_in_dict(self, dict):\n #convert strings representing numbers to ints\n for key in dict:\n value = str(dict[key])\n if (re.search(\"\\S\", str(dict[key])) and not re.search(\"\\D\", str(dict[key]))):\n dict[key] = long(dict[key])\n return dict\n\n def get_partition_ids(self):\n retlist = []\n for part in self.disk_partitions:\n retlist.append(part.deviceid)\n return retlist\n\n def get_logicaldisk_ids(self):\n retlist = []\n for part in self.disk_partitions:\n retlist.extend(part.get_logicaldisk_ids())\n return retlist\n\n def get_size_in_gb(self):\n '''\n Attempts to convert self.size from bytes to gigabytes as well as round up > .99 to account for a differences\n in how the size is represented\n '''\n self.size = int(self.size or 0)\n gigs = self.size / self.gigabyte\n if (self.size % self.gigabyte) /float(self.gigabyte) > .99:\n gigs += 1\n return gigs\n\n def get_size_in_mb(self):\n '''\n Attempts to convert self.size from bytes to gigabytes as well as round up > .99 to account for a differences\n in how the size is represented\n '''\n self.size = int(self.size or 0)\n mb = self.size / self.megabyte\n if (self.size % self.megabyte) /float(self.megabyte) > .99:\n mb += 1\n return mb\n\n def print_self(self):\n self.get_summary(printmethod=self.win_instance.debug)\n\n def get_summary(self, printheader=True, printmethod=None):\n raise Exception('Method not implemented')\n\n\n def print_self_full(self, printmethod=None):\n '''\n formats and prints self.dict\n '''\n self.win_instance.print_dict(dict=self.__dict__, printmethod=printmethod)\n\n\n\nclass WinInstanceDiskDrive(WinInstanceDiskType):\n\n def setup(self):\n if not hasattr(self, 'serialnumber'):\n self.serialnumber = ''\n if not hasattr(self, 'caption'):\n self.caption = ''\n if hasattr(self, 'model'):\n self.caption = self.model\n else:\n self.model = self.caption\n self.cygwin_scsi_drive = self.win_instance.get_cygwin_scsi_dev_for_windows_drive(windisk=self)\n self.update_ebs_info()\n self.disk_partitions = []\n\n def check_dict_requires(self, wmic_dict):\n if not ('deviceid' in wmic_dict and\n 'size' in wmic_dict and\n ('caption' in wmic_dict or 'model in wmic_dict') and\n 'index' in wmic_dict):\n raise Exception('wmic_dict passed does not contain needed attributes; deviceid, size, caption, and index')\n\n\n def get_partition_ids(self):\n retlist = []\n for part in self.disk_partitions:\n retlist.append(part.deviceid)\n return retlist\n\n def get_logicaldisk_ids(self):\n retlist = []\n for part in self.disk_partitions:\n retlist.extend(part.get_logicaldisk_ids())\n return retlist\n\n def update_md5_info_from_ebs(self):\n self.md5 = None\n self.md5len = None\n for vol in self.win_instance.attached_vols:\n if vol.guestdev == self.deviceid:\n if not vol.md5:\n vol.md5len = 1024\n vol.md5 = self.win_instance.get_dev_md5(self.cygwin_scsi_drive, vol.md5len)\n self.md5 = vol.md5\n self.md5len = vol.md5len\n break\n\n def update_ebs_info_from_serial_number(self):\n '''\n Attempts to parse the serial number field from an EBS volume and find the correlating ebs volume\n example format: vol-81C13EA4-dev-sdg\n '''\n if re.match(\"^vol-\", self.serialnumber):\n split = self.serialnumber.split('-')\n self.ebs_volume = str(split[0]) + \"-\" + str(split[1])\n self.ebs_cloud_dev = \"/\" + str(split[2]) + \"/\" + str(split[3])\n else:\n self.ebs_volume = ''\n self.ebs_cloud_dev = ''\n\n\n def update_ebs_info(self):\n self.update_ebs_info_from_serial_number()\n if not self.ebs_volume:\n if self.index == 0 and self.win_instance.root_device_type == 'ebs':\n bdm = self.win_instance.block_device_mapping[self.win_instance.root_device_name]\n self.ebs_volume = bdm.volume_id\n else:\n for vol in self.win_instance.attached_vols:\n if vol.guestdev == self.deviceid:\n self.ebs_volume = vol.id\n break\n if not self.ebs_cloud_dev and self.ebs_volume:\n volume = self.win_instance.tester.get_volume(volume_id=self.ebs_volume)\n if hasattr(volume,'attach_data') and volume.attach_data:\n self.ebs_cloud_dev = volume.attach_data.device\n self.update_md5_info_from_ebs()\n\n\n\n\n def get_summary(self, printheader=True, printmethod=None):\n buf = \"\"\n deviceid = 20\n size = 16\n sizegb = 7\n ebsvol = 12\n serialnumber = 24\n caption = 36\n part_count = 6\n logical_ids = 8\n cygdrive = 10\n md5 = 32\n header = \"DISKDRIVE DEV ID\".center(deviceid) + \"|\" + \\\n \"SIZE B\".center(size) + \"|\" + \\\n \"SIZE GB\".center(sizegb) + \"|\" + \\\n \"EBS VOL\".center(ebsvol) + \"|\" + \\\n \"CAPTION\".center(caption) + \"|\" + \\\n \"PARTS\".center(part_count) + \"|\" + \\\n \"LOGICAL\".center(logical_ids) + \"|\" + \\\n \"CYGDRIVE\".center(cygdrive) + \"|\" + \\\n \"SERIAL NUMBER\".center(serialnumber) + \"|\" + \\\n \"MD5 CHECK SUM\".center(md5) + \"|\"\n\n summary = str(self.deviceid).center(deviceid) + \"|\" + \\\n str(self.size).center(size) + \"|\" + \\\n str(self.size_in_gb).center(sizegb) + \"|\" + \\\n str(self.ebs_volume).center(ebsvol) + \"|\" + \\\n str(self.caption).center(caption) + \"|\" + \\\n str(self.partitions).center(part_count) + \"|\" + \\\n str(\",\".join(str(x) for x in self.get_logicaldisk_ids())).center(logical_ids) + \"|\" + \\\n str(self.cygwin_scsi_drive).center(cygdrive) + \"|\" + \\\n str(self.serialnumber).center(serialnumber) + \"|\" + \\\n str(self.md5).center(md5) + \"|\"\n\n length = len(header)\n if len(summary) > length:\n length = len(summary)\n line = get_line(length)\n if printheader:\n buf += line + header + line\n buf += summary + line\n if printmethod:\n printmethod(buf)\n return buf\n\n\nclass WinInstanceDiskPartition(WinInstanceDiskType):\n\n def setup(self):\n #self.cygwin_scsi_drive = self.win_instance.get_cygwin_scsi_dev_for_windows_drive(drive_id=self.deviceid)\n self.logicaldisks = []\n #Set values in case 'brief' was used when fetching partitions\n if not hasattr(self,'deviceid'):\n self.deviceid = self.name\n if not hasattr(self,'bootable'):\n self.bootable = self.bootpartition\n if not hasattr(self,'diskindex'):\n self.diskindex = self.get_disk_index_from_name()\n\n def check_dict_requires(self, wmic_dict):\n if not ('name' in wmic_dict and\n 'size' in wmic_dict and\n 'bootpartition' in wmic_dict and\n 'index' in wmic_dict):\n raise Exception('wmic_dict passed does not contain needed attributes; deviceid, size, index and bootable')\n\n\n def get_disk_index_from_name(self):\n diskindex = None\n diskindexstring = self.name.split(',')[0]\n if re.search('disk', diskindexstring, re.IGNORECASE):\n diskindex = int(diskindexstring.split('#')[1])\n return diskindex\n\n def get_logicaldisk_ids(self):\n retlist = []\n for disk in self.logicaldisks:\n retlist.append(disk.deviceid)\n return retlist\n\n def get_summary(self, printheader=True, printmethod=None):\n buf = \"\"\n deviceid = 24\n size = 16\n sizegb = 12\n sizemb = 12\n bootable = 10\n header = \"PARTITION DEV ID\".center(deviceid) + \"|\" + \\\n \"SIZE B\".center(size) + \"|\" + \\\n \"SIZE GB\".center(sizegb) + \"|\" + \\\n \"SIZE MB\".center(sizemb) + \"|\" + \\\n \"BOOTABLE\".center(bootable) + \"|\"\n\n summary = str(self.deviceid).center(deviceid) + \"|\" + \\\n str(self.size).center(size) + \"|\" + \\\n str(self.size_in_gb).center(sizegb) + \"|\" + \\\n str(self.size_in_mb).center(sizemb) + \"|\" + \\\n str(self.bootable).center(bootable) + \"|\"\n\n length = len(header)\n if len(summary) > length:\n length = len(summary)\n line = get_line(length)\n if printheader:\n buf += line + header + line\n buf += summary + line\n if printmethod:\n printmethod(buf)\n return buf\n\n\nclass WinInstanceLogicalDisk(WinInstanceDiskType):\n\n def setup(self):\n self.cygwin_scsi_drive = self.win_instance.get_cygwin_scsi_dev_for_windows_drive(windisk=self)\n self.partition = None\n\n def check_dict_requires(self, wmic_dict):\n if not ('deviceid' in wmic_dict and\n 'size' in wmic_dict and\n 'description' in wmic_dict and\n 'freespace' in wmic_dict and\n 'filesystem' in wmic_dict):\n raise Exception('wmic_dict passed does not contain needed attributes; deviceid, size, and description')\n\n def get_summary(self, printheader=True, printmethod=None):\n buf = \"\"\n deviceid = 24\n size = 16\n freespace = 16\n filesystem = 24\n description = 30\n cygdrive = 10\n header = \"LOGICAL DEV ID\".center(deviceid) + \"|\" + \\\n \"SIZE\".center(size) + \"|\" + \\\n \"FREE SPACE\".center(freespace) + \"|\" + \\\n \"FILE SYSTEM\".center(filesystem) + \"|\" + \\\n \"DESCRIPTION\".center(description) + \"|\" + \\\n \"CYGDRIVE\".center(cygdrive) + \"|\"\n summary = str(self.deviceid).center(deviceid) + \"|\" + \\\n str(self.size).center(size) + \"|\" + \\\n str(self.freespace).center(freespace) + \"|\" + \\\n str(self.filesystem).center(filesystem) + \"|\" + \\\n str(self.description).center(description) + \"|\" + \\\n str(self.cygwin_scsi_drive).center(cygdrive) + \"|\"\n length = len(header)\n if len(summary) > length:\n length = len(summary)\n line = get_line(length)\n if printheader:\n buf += line + header + line\n buf += summary + line\n if printmethod:\n printmethod(buf)\n return buf\n\n\nclass WinInstance(Instance, TaggedResource):\n gigabyte = 1073741824\n megabyte = 1048576\n\n @classmethod\n def make_euinstance_from_instance(cls,\n instance,\n tester,\n debugmethod = None,\n keypair=None,\n keypath=None,\n password=None,\n username=\"Administrator\",\n auto_connect = True,\n verbose=True,\n timeout=120,\n private_addressing = False,\n reservation = None,\n cmdstart=None,\n try_non_root_exec=True,\n winrm_port='5985',\n winrm_protocol='http',\n rdp_port='3389',\n rootfs_device = \"sda\",\n block_device_prefix = \"sd\",\n bdm_root_vol = None,\n virtio_blk = True,\n cygwin_path = None,\n disk_update_interval=10,\n retry=2,\n brief=False\n ):\n '''\n Primary constructor for this class. Note: to avoid an ssh session within this method, provide keys, username/pass later.\n Arguments:\n instance - mandatory- a Boto instance object used to build this euinstance object\n keypair - optional- a boto keypair object used for creating ssh connection to the instance\n username - optional- string used to create ssh connection as an alternative to keypair\n password - optional- string used to create ssh connection to this instance as an alternative to keypair\n exec_password -optional -string used for su or sudo where prompted for password, will default to 'password'\n auto_connect -optional -boolean, if True will attempt to automatically create an ssh session for this instance\n try_non_root_exec -optional -boolean, if True will attempt to use sudo if available else su -c to execute privileged commands\n timeout - optional- integer used for ssh connection timeout\n debugmethod - optional - method, used for debug output\n verbose - optional - boolean to determine if debug is to be printed using debug()\n retry - optional - integer, ssh connection attempts for non-authentication failures\n '''\n newins = WinInstance(instance.connection)\n newins.__dict__ = instance.__dict__\n newins.tester = tester\n newins.winrm_port = winrm_port\n newins.rdp_port = rdp_port\n newins.bdm_root_vol = None\n newins.winrm_protocol = winrm_protocol\n newins.debugmethod = debugmethod\n if newins.debugmethod is None:\n newins.log = eulogger.Eulogger(identifier= str(instance.id))\n newins.debugmethod= newins.log.debug\n\n if (keypair is not None):\n if isinstance(keypair,types.StringTypes):\n keyname = keypair\n keypair = tester.get_keypair(keyname)\n else:\n keyname = keypair.name\n newins.keypath = keypath or os.getcwd() + \"/\" + keyname + \".pem\"\n newins.keypair = keypair\n newins.password = password\n newins.username = username\n newins.verbose = verbose\n newins.attached_vols=[]\n newins.timeout = timeout\n newins.virtio_blk = virtio_blk\n newins.disk_update_interval = disk_update_interval\n newins.retry = retry\n newins.brief = brief\n newins.rootfs_device = rootfs_device\n newins.block_device_prefix = block_device_prefix\n newins.private_addressing = private_addressing\n newins.reservation = reservation or newins.get_reservation()\n if newins.reservation:\n newins.security_groups = newins.tester.get_instance_security_groups(newins)\n else:\n newins.security_groups = None\n newins.laststate = newins.state\n newins.cmdstart = cmdstart\n newins.auto_connect = auto_connect\n newins.set_last_status()\n newins.update_vm_type_info()\n newins.cygwin_path = cygwin_path\n newins.system_info = None\n newins.diskdrives = []\n newins.disk_partitions = []\n newins.logicaldisks = []\n newins.cygwin_dev_map = {}\n #newins.set_block_device_prefix()\n if newins.root_device_type == 'ebs':\n try:\n volume = newins.tester.get_volume(volume_id = newins.block_device_mapping.get(newins.root_device_name).volume_id)\n newins.bdm_root_vol = EuVolume.make_euvol_from_vol(volume, tester=newins.tester,cmdstart=newins.cmdstart)\n except:pass\n newins.winrm = None\n if newins.auto_connect and newins.state == 'running':\n newins.connect_to_instance(timeout=timeout)\n return newins\n\n @property\n def age(self):\n launchtime = self.tester.get_datetime_from_resource_string(self.launch_time)\n # return the elapsed time in seconds\n return (time.mktime(datetime.utcnow().utctimetuple()) -\n time.mktime(launchtime.utctimetuple()))\n\n def update(self, validate=False, dry_run=False,\n err_state='terminated', err_code=-1):\n ret = None\n tb = \"\"\n retries = 2\n for x in xrange(0, retries):\n try:\n #send with validation True, fail later...\n ret = super(WinInstance, self).update(validate=True,\n dry_run=dry_run)\n break\n except ValueError:\n if validate:\n raise\n tb = self.tester.get_traceback()\n self.debug('Failed to update instance. Attempt:{0}/{1}'\n .format(x, retries))\n if not ret:\n failmsg = 'Failed to update instance. Instance may no longer ' \\\n 'be present on system\"{0}\"'.format(self.id)\n self.debug('{0}\\n{1}'.format(tb, failmsg))\n self.debug('{0} setting fake state to:\"{1}\"'.format(self.id,\n err_state))\n state = InstanceState(name=err_state, code=err_code)\n self._state = state\n ret = self.state\n self.set_last_status()\n return ret\n\n\n def update_vm_type_info(self):\n self.vmtype_info = self.tester.get_vm_type_from_zone(self.placement,self.instance_type)\n return self.vmtype_info\n\n\n def set_last_status(self,status=None):\n self.laststate = self.state\n self.laststatetime = time.time()\n self.age_at_state = self.tester.get_instance_time_launched(self)\n #Also record age from user's perspective, ie when they issued the run instance request (if this is available)\n if self.cmdstart:\n self.age_from_run_cmd = \"{0:.2f}\".format(time.time() - self.cmdstart)\n else:\n self.age_from_run_cmd = None\n\n def print_dict(self, dict=None, printmethod=None):\n '''\n formats and prints\n '''\n printmethod = printmethod or self.debug\n buf = \"\\n\"\n dict = dict or self.__dict__\n longest_key = 0\n for key in dict:\n if len(key) > longest_key:\n longest_key = len(key)\n for key in dict:\n buf += str(key).ljust(longest_key) + \" -----> :\" + str(dict[key]) + \"\\n\"\n printmethod(buf)\n\n def printself(self, title=True, footer=True, printmethod=None, printme=True):\n\n def state_markup(state):\n # Markup instance state...\n if state == 'running':\n return markup(state, markups=[1, 92])\n if state == 'terminated':\n return markup(state, markups=[1, 97])\n if state == 'shutting-down':\n return markup(state, markups=[1, 95])\n if state == 'pending':\n return markup(state, markups=[1, 93])\n if state == 'stopped':\n return markup(state, markups=[1, 91])\n else:\n return markup(state, markups=[1, 91])\n\n def multi_line(lines):\n # Utility method for creating multi line table entries...\n buf = \"\"\n maxlen = 0\n for line in lines:\n if len(line) + 2 > maxlen:\n maxlen = len(line) + 2\n for line in lines:\n buf += str(line).ljust(maxlen) + \"\\n\"\n buf = buf.rstrip()\n return (buf, maxlen)\n\n bdmvol = self.root_device_type\n if self.bdm_root_vol:\n bdmvol += \":\" + self.bdm_root_vol.id\n reservation_id = None\n if self.reservation:\n reservation_id = self.reservation.id\n owner_id = self.reservation.owner_id\n else:\n owner_id = \"???\"\n # Create a multi line field for instance's run info\n idlist = [markup(\"{0} {1}\".format('ID:', self.id), markups=[1, 4, 94]),\n \"{0} {1}\".format(markup('TYPE:'), self.instance_type),\n \"{0} {1}\".format(markup('RES:'), reservation_id),\n \"{0}\".format(markup(\"ACCOUNT ID:\")), owner_id]\n id_string, idlen = multi_line(idlist)\n try:\n emi = self.tester.get_emi(self.image_id)\n emi_name = str(emi.name[0:18]) + \"..\"\n except:\n emi_name = \"\"\n # Create a multi line field for the instance's image info\n virt_type = 'PV'\n if self.virtualization_type == 'hvm':\n virt_type = 'HVM'\n emi_string, emilen = multi_line(\n [markup(\"{0} {1}\".format('EMI:', self.image_id)),\n \"{0} {1}\".format(markup('OS:'), self.platform or 'linux'),\n \"{0} {1}\".format(markup('VIRT:'), virt_type),\n \"{0}\".format(markup('IMAGE NAME:')),\n emi_name])\n\n # Create a multi line field for the instance's state info\n if self.age:\n age = int(self.age)\n state_string, state_len = multi_line([\"STATE: \" + state_markup(self.laststate),\n \"{0} {1}\".format(markup('AGE:'), age),\n \"{0} {1}\".format(markup(\"ZONE:\"), self.placement),\n markup('ROOTDEV:'), bdmvol])\n # Create the primary table called pt...\n netinfo = 'INSTANCE NETWORK INFO:'\n idheader = 'INSTANCE ID'\n imageheader = 'INSTANCE IMAGE'\n stateheader = 'INSTANCE STATE'\n pt = PrettyTable([idheader, imageheader, stateheader, netinfo])\n pt.align[netinfo] = 'l'\n pt.valign[netinfo] = 'm'\n pt.align[idheader] = 'l'\n pt.align[imageheader] = 'l'\n pt.align[stateheader] = 'l'\n pt.max_width[idheader] = idlen\n pt.max_width[imageheader] = emilen\n pt.max_width[stateheader] = state_len\n pt.padding_width = 0\n pt.hrules = ALL\n # PrettyTable headers do not work with ascii markups, so make a sudo header\n new_header = []\n for field in pt._field_names:\n new_header.append(markup(field, markups=[1, 4]))\n pt.add_row(new_header)\n pt.header = False\n # Create a subtable 'netpt' to summarize and format the networking portion...\n # Set the maxwidth of each column so the tables line up when showing multiple instances\n vpc_col = ('VPC', 4)\n subnet_col = ('SUBNET', 6)\n if self.vpc_id:\n vpc_col = ('VPC', 12)\n subnet_col = ('SUBNET', 15)\n secgrp_col = ('SEC GRPS', 11)\n privaddr_col = ('P', 1)\n privip_col = ('PRIV IP', 15)\n pubip_col = ('PUB IP', 15)\n net_cols = [vpc_col, subnet_col, secgrp_col, privaddr_col, privip_col, pubip_col]\n # Get the Max width of the main tables network summary column...\n # Start with 2 to account for beginning and end column borders\n netinfo_width = 2\n netinfo_header = []\n for col in net_cols:\n netinfo_width += col[1] + 1\n netinfo_header.append(col[0])\n pt.max_width[netinfo] = netinfo_width\n netpt = PrettyTable([vpc_col[0], subnet_col[0], secgrp_col[0], privaddr_col[0],\n privip_col[0], pubip_col[0]])\n netpt.padding_width = 0\n netpt.vrules = ALL\n for col in net_cols:\n netpt.max_width[col[0]] = col[1]\n sec_grps = []\n for grp in self.groups:\n sec_grps.append(str(grp.id))\n sec_grps = \",\".join(sec_grps)\n private_addressing = \"N\"\n if self.private_addressing:\n private_addressing = \"Y\"\n netpt.add_row([str(self.vpc_id).center(vpc_col[1]),\n str(self.subnet_id).center(subnet_col[1]),\n str(sec_grps).center(secgrp_col[1]),\n str(private_addressing).center(privaddr_col[1]),\n str(self.private_ip_address).center(privip_col[1]),\n str(self.ip_address).center(pubip_col[1])])\n # To squeeze a potentially long keyname under the network summary table, get the length\n # and format this column to allow for wrapping a keyname under the table...\n # netbuf = netpt.get_string()\n netbuf = \"{0}:{1} {2}:{3}\\n\".format(markup(\"NODE\"),\n self.tags.get('euca:node', \"???\").ljust(16),\n markup(\"KEYPAIR\"), self.key_name)\n netbuf += \"\\n\".join(netpt.get_string().splitlines()[0:-1])\n # Create the row in the main table...\n pt.add_row([id_string, emi_string, state_string, netbuf])\n if printme:\n printmethod = printmethod or self.log.debug\n printmethod(\"\\n\" + str(pt) + \"\\n\")\n return pt\n\n\n\n def get_password(self,\n private_key_path=None,\n key=None,\n dir=None,\n exten=\".pem\",\n encoded=True,\n force_update=False):\n '''\n :param private_key_path: private key file used to decrypt password\n :param key: name of private key\n :param dir: Path to private key\n :param exten: extension of private key\n :param encoded: boolean of whether string returned from server is\n Base64 encoded\n :return: decrypted password\n '''\n if self.password is None or force_update:\n self.password = self.tester.get_windows_instance_password(\n self,\n private_key_path=private_key_path,\n key=key,\n dir=dir,\n exten=exten,\n encoded=encoded)\n return self.password\n\n\n def reset_ssh_connection(self, timeout=None):\n # todo: Remove ssh reference from this method, use something like\n # reset_instance_connection, etc..\n self.debug('Note ssh not implemented at this time, using winrm for '\n 'shell access instead...')\n return self.reset_winrm_connection(timeout=timeout)\n\n def reset_winrm_connection(self, timeout=None, force=False):\n # todo:\n timeout = timeout or self.timeout\n self.debug('reset_winrm_connection for:'+str(self.id))\n self.get_password(force_update=True)\n if self.username is None or self.password is None:\n #Allow but warn here as this may be a valid negative test\n self.debug('Warning username and/or password were None in '\n 'winrm connnection?')\n # Create a new winrm interface if this is a new instance or\n # an attribute has changed...\n try:\n #Check the port in order to provide debug if the connection fails\n self.test_port_status(port=self.winrm_port, ip=self.ip_address)\n except:pass\n if force or not (self.winrm and \\\n self.winrm.hostname == self.ip_address and \\\n self.winrm.username == self.username and \\\n self.winrm.password == self.password):\n if self.winrm:\n self.winrm.close_shell()\n self.winrm = winrm_connection.Winrm_Connection(\n hostname = self.ip_address,\n username = self.username,\n password = self.password,\n port = self.winrm_port,\n protocol = self.winrm_protocol,\n debug_method = self.debug,\n verbose=True\n )\n\n\n def get_reservation(self):\n res = None\n try:\n res = self.tester.get_reservation_for_instance(self)\n except Exception, e:\n self.update()\n self.debug('Could not get reservation for instance in state:' +\n str(self.state) + \", err:\" + str(e))\n return res\n\n\n def connect_to_instance(self, wait_for_boot=180, timeout=120):\n '''\n Attempts to connect to an instance via ssh.\n :params wait_for_boot: time to wait, allowing guest to boot before\n attempting to poll for ports active status\n :params timeout: -optional - time in seconds to wait when polling\n port(s) status(s) before failure\n\n '''\n self.debug(\"{0}connect_to_instance starting.\\nwait_for_boot:{1} \"\n \"seconds\\ntimeout from boot:{2}{3}\"\n .format(termline, wait_for_boot, timeout, termline))\n try:\n self.poll_for_port_status_with_boot_delay(waitforboot=wait_for_boot,\n timeout=timeout)\n except Exception, e:\n self.debug('Warning failed to poll port status:' + str(e))\n self.debug(\"Attempting to create connection to instance:\" + self.id)\n attempts = 0\n start = time.time()\n elapsed = 0\n if self.winrm is not None:\n self.winrm.close_shell()\n self.winrm = None\n while (elapsed < timeout):\n attempts += 1\n try:\n self.update()\n self.reset_winrm_connection()\n self.debug('Try some sys...')\n self.sys(\"whoami\")\n except Exception, se:\n tb = self.tester.get_traceback()\n self.debug('Caught exception attempting to connect '\n 'winrm shell:\\n'+ str(tb) + str(se))\n elapsed = int(time.time()-start)\n self.debug('connect_to_instance: Attempts:' + str(attempts) +\n ', elapsed:'+str(elapsed)+'/'+str(timeout))\n if self.winrm is not None:\n self.winrm.close_shell()\n self.winrm = None\n time.sleep(5)\n pass\n else:\n break\n elapsed = int(time.time()-start)\n if self.winrm is None:\n self.get_connection_debug()\n raise RuntimeError(str(self.id) +\n \":Failed establishing management connection to \"\n \"instance, elapsed:\" + str(elapsed) +\n \"/\" + str(timeout))\n self.debug('Connect_to_instance updating attached volumes/disk '\n 'info for vols: ' + str(self.attached_vols))\n if self.brief:\n self.update_system_info()\n else:\n self.update_system_and_disk_info()\n self.init_attached_volumes()\n self.debug(\"{0}connect_to_instance completed{1}\"\n .format(termline, termline))\n\n def get_connection_debug(self):\n # Add network debug/diag info here...\n # First show arp cache from local machine\n # todo Consider getting info from relevant euca components:\n # - iptables info\n # - route info\n # - instance xml\n try:\n # Show local ARP info...\n arp_out = \"\\nLocal ARP cache for instance ip: \" \\\n + str(self.ip_address) + \"\\n\"\n arp_fd = os.popen('arp ' + str(self.ip_address))\n for line in arp_fd:\n arp_out += line\n self.debug(arp_out)\n except Exception as AE:\n self.log.debug('Failed to get arp info:' + str(AE))\n try:\n self.tester.get_console_output(self)\n except Exception as CE:\n self.log.debug('Failed to get console output:' + str(CE))\n\n def update_root_device_diskdrive(self):\n if not self.root_device_type == 'ebs':\n return\n for disk in self.diskdrives:\n if disk.index == 0:\n if disk.ebs_volume:\n for vol in self.attached_vols:\n if vol.id == disk.ebs_volume:\n if not disk.md5:\n disk.update_md5_info_from_ebs()\n return\n volume = self.tester.get_volume(volume_id=disk.ebs_volume)\n if not isinstance(volume, EuVolume):\n volume = EuVolume.make_euvol_from_vol(volume, self.tester)\n volume.guestdev = disk.deviceid\n volume.md5len = 1024\n volume.md5 = self.get_dev_md5(disk.cygwin_scsi_drive, volume.md5len)\n if not self.get_volume_from_attached_list_by_id(volume.id):\n self.debug(\"{0} updating with root vol:{1}{2}\"\n .format(termline,\n volume.id,\n termline))\n self.attached_vols.append(volume)\n disk.update_md5_info_from_ebs()\n return\n\n def get_volume_from_attached_list_by_id(self, volume_id):\n for vol in self.attached_vols:\n if vol.id == volume_id:\n return vol\n\n\n def update_system_and_disk_info(self):\n try:\n self.update_system_info()\n except Exception, sie:\n tb = self.tester.get_traceback()\n self.debug(str(tb) + \"\\nError updating system info:\" + str(sie))\n try:\n self.update_disk_info()\n self.update_root_device_diskdrive()\n self.print_partition_summary()\n self.print_logicaldisk_summary()\n self.print_diskdrive_summary()\n except Exception, ude:\n tb = self.tester.get_traceback()\n self.debug(str(tb) + \"\\nError updating disk info:\" + str(ude))\n\n\n def has_sudo(self):\n return False\n\n\n def debug(self,msg,traceback=1,method=None,frame=False):\n '''\n Used to print debug, defaults to print() but over ridden by self.debugmethod if not None\n msg - mandatory -string, message to be printed\n '''\n if ( self.verbose is True ):\n self.debugmethod(msg)\n\n def sys(self, cmd, verbose=True, code=None, include_stderr=False, enable_debug=False, timeout=None):\n '''\n Issues a command against the ssh connection to this instance\n Returns a list of the lines from stdout+stderr as a result of the command\n cmd - mandatory - string, the command to be executed\n verbose - optional - boolean flag to enable debug\n timeout - optional - command timeout in seconds\n '''\n if (self.winrm is None):\n raise Exception(\"WinInstance winrm connection is None\")\n return self.winrm.sys(command=cmd, include_stderr=include_stderr, timeout=timeout, verbose=verbose, code=code)\n\n\n\n\n def test_rdp_port_status(self, ip=None, port=3389, timeout=10):\n '''\n Description: Attempts to test that the host is accepting tcp connections to the RDP port\n '''\n ip = ip or self.ip_address\n return self.test_port_status(ip=ip, port=port, timeout=timeout)\n\n\n def test_port_status(self, port, ip=None, timeout=5, tcp=True, verbose=True):\n ip = ip or self.ip_address\n return self.tester.test_port_status(ip, int(port), timeout=timeout, tcp=tcp, verbose=verbose)\n\n def poll_for_port_status_with_boot_delay(self, interval=15, ports=[], socktimeout=5,timeout=180, waitforboot=300):\n '''\n Make sure some time has passed before we test on the guest side before running guest test...\n\n '''\n launch_seconds = self.tester.get_instance_time_launched(self)\n sleeptime = 0 if launch_seconds > waitforboot else (waitforboot - launch_seconds)\n self.debug(\"Instance was launched \"+str(launch_seconds)+\" seconds ago, waiting:\"+str(sleeptime)+\" for instance to boot\")\n time.sleep(sleeptime)\n return self.poll_for_ports_status(ports,\n ip=self.ip_address,\n interval=interval,\n socktimeout=socktimeout,\n timeout=timeout)\n\n def wait_for_time_since_launch(self,waitforboot=420):\n '''\n When using larger instance store images, this can allow for the delays caused by image size/transfer.\n '''\n boot_seconds = self.tester.get_instance_time_launched(self)\n sleeptime = 0 if boot_seconds > waitforboot else (waitforboot - boot_seconds)\n self.debug(\"Instance was launched \"+str(boot_seconds)+\"/\"+str(waitforboot) + \" seconds ago, waiting:\"+str(sleeptime)+\" for instance to boot\")\n start = time.time()\n elapsed = 0\n print \"Waiting for Windows to fully boot:\",\n while elapsed < sleeptime:\n print \"Waiting for Windows to fully boot:\"+str(sleeptime-elapsed),\n time.sleep(5)\n elapsed=int(time.time()-start)\n self.debug(\"test_wait_for_instance_boot: done waiting, instance up for \"+str(waitforboot)+\" seconds\")\n\n def poll_for_ports_status(self, ports=[], ip=None, interval=10, socktimeout=5, timeout=180):\n ip = ip or self.ip_address\n ports = ports or [self.rdp_port, self.winrm_port]\n start = time.time()\n elapsed = 0\n attempt = 0\n while elapsed < timeout:\n attempt +=1\n self.debug('test_poll_for_ports_status, ports: ' + \",\".join(str(x) for x in ports) + \", attempt:\" + str(attempt))\n for port in ports:\n if elapsed < timeout:\n try:\n self.debug('Trying ip:port:' + str(self.ip_address) + ':' + str(port) + \", elapsed:\" + str(elapsed))\n self.test_port_status(ip=ip, port=int(port), timeout=5)\n return\n except socket.error, se:\n self.debug('test_ports_status failed socket error:'+str(se[0]))\n #handle specific errors here, for now just for debug...\n ecode=se[0]\n if ecode == socket.errno.ETIMEDOUT or ecode == \"timed out\":\n self.debug(\"test_poll_for_ports_status: Connect \"+str(ip)+\":\" +str(port)+ \" timed out retrying. Time remaining(\"+str(timeout-elapsed)+\")\")\n except Exception, e:\n tb = self.tester.get_traceback()\n self.debug(tb)\n self.debug('test_poll_for_ports_status:'+str(ip)+':'+str(port)+' FAILED after attempts:'+str(attempt)+', elapsed:'+str(elapsed)+', err:'+str(e) )\n elapsed = int(time.time() -start)\n if elapsed < timeout:\n time.sleep(interval)\n\n raise Exception('test_poll_for_ports_status:'+str(ip)+':'+str(port)+' FAILED after attempts:'+str(attempt)+', elapsed:'+str(elapsed)+' seconds')\n\n def init_attached_volumes(self):\n self.debug('init_attahced_volumes... attached_vols: ' + str(self.attached_vols))\n syncdict = self.sync_attached_volumes_with_clouds_view()\n if syncdict['errors']:\n errmsg = 'Errors syncing guest volumes with cloud at init:' + \",\".join(str(e) for e in syncdict['errors'])\n errmsg += 'Failed to sync guest volumes with cloud at init:' + \",\".join(str(x) for x in syncdict['badvols'])\n self.debug(errmsg)\n time.sleep(60)\n raise Exception(errmsg)\n\n def sync_attached_volumes_with_clouds_view(self):\n self.debug(termline +\n \"Starting sync_attached_volumes_with_clouds_view\"\n + termline )\n badvols = []\n errors = []\n ret = {'errors':errors, 'badvols':badvols}\n #Get a list of volumes that the cloud believes are currently attached\n cloud_volumes = self.tester.get_volumes(attached_instance=self.id)\n #Make a copy of a list of volumes this instance thinks are currenlty attached\n locallist = copy.copy(self.attached_vols)\n self.debug('Cloud list:' + str(cloud_volumes))\n self.debug('Local list:' + str(locallist))\n\n for vol in cloud_volumes:\n for local_vol in locallist:\n if local_vol.id == vol.id:\n locallist.remove(local_vol)\n if not isinstance(vol, EuVolume):\n vol = EuVolume.make_euvol_from_vol(vol, self.tester)\n try:\n self.update_volume_guest_info(volume=vol)\n except Exception, e:\n badvols.append(vol)\n errors.append(vol.id + ' Error syncing with cloud:' + str (e) + '. \\n')\n for local_vol in locallist:\n badvols.append(local_vol)\n errors.append(local_vol.id + ' Error unattached volume found in guests attach list. \\n')\n self.debug(termline +\n \"Finishing sync_attached_volumes_with_clouds_view\"\n + termline )\n return ret\n\n\n\n def update_system_info(self):\n '''\n Gather basic system info for this windows instance object and store in self.system_info\n Example:\n # print wins.system_info.OS_NAME\n 'Microsoft Windows 7 Professional'\n '''\n currentkey = None\n swap = re.compile('([!@#$%^&*. ])')\n info = self.sys('systeminfo')\n if self.system_info:\n system_info = self.system_info\n else:\n system_info = type('obj', (object,),{})\n if info:\n for line in info:\n if re.match(\"^\\w.+:\", line):\n linevals = line.split(':')\n currentkey = linevals.pop(0)\n #clean up the key string...\n currentkey = re.sub('[()]', '', currentkey)\n currentkey = re.sub(swap, '_', currentkey)\n currentkey = currentkey.lower()\n value = \":\".join(str(x) for x in linevals) or \"\"\n setattr(system_info, currentkey, str(value).strip())\n elif currentkey:\n #this is an additional value to our previous key\n prev_value = getattr(system_info, currentkey)\n if not isinstance(prev_value, types.ListType):\n updated_value = [prev_value]\n updated_value.append(str(line).strip())\n setattr(system_info, currentkey, updated_value)\n self.system_info = system_info\n\n def get_cygwin_path(self, prefix=\"c:\\\\\"):\n if self.cygwin_path:\n return self.cygwin_path\n path = None\n self.debug('Trying to find cygwin path...')\n out = self.sys('dir ' + str(prefix) + ' /B')\n for line in out:\n if re.search('cygwin', line):\n path = str(prefix) + str(line.strip()) + \"\\\\\"\n self.cygwin_path = path\n break\n return path\n\n def cygwin_curl(self, url, connect_timeout=30):\n cygpath = self.get_cygwin_path()\n if cygpath is None:\n raise Exception('Could not find cygwin path on guest for curl?')\n curl = cygpath + 'bin\\curl.exe --connect-timeout ' + str(connect_timeout) + ' '\n return self.sys(curl + str(url), code=0, timeout=connect_timeout)\n\n\n\n def get_metadata(self, element_path='', prefix='latest/meta-data/', use_cygwin=True):\n \"\"\"Return the lines of metadata from the element path provided\"\"\"\n ### If i can reach the metadata service ip use it to get metadata otherwise try the clc directly\n try:\n if use_cygwin:\n return self.cygwin_curl(\"http://169.254.169.254/\"+str(prefix)+str(element_path), connect_timeout=10)\n else:\n return self.sys(\"curl --connect-timeout 10 http://169.254.169.254/\"+str(prefix)+str(element_path), code=0)\n except:\n if use_cygwin:\n return self.cygwin_curl(\"http://\" + self.tester.get_ec2_ip() + \":8773/\"+str(prefix) + str(element_path))\n else:\n return self.sys(\"curl http://\" + self.tester.get_ec2_ip() + \":8773/\"+str(prefix) + str(element_path), code=0)\n\n\n def print_diskdrive_summary(self,printmethod=None):\n printmethod = printmethod or self.debug\n if not self.diskdrives:\n printmethod('No disk drives to print?')\n return\n disklist = copy.copy(self.diskdrives)\n buf = (disklist.pop()).get_summary()\n for disk in disklist:\n buf += disk.get_summary(printheader=False)\n printmethod(buf)\n\n def print_partition_summary(self,printmethod=None):\n printmethod = printmethod or self.debug\n if not self.disk_partitions:\n printmethod('No disk partitions to print?')\n return\n partlist = copy.copy(self.disk_partitions)\n buf = (partlist.pop()).get_summary()\n for part in partlist:\n buf += part.get_summary(printheader=False)\n printmethod(buf)\n\n def print_logicaldisk_summary(self,printmethod=None):\n printmethod = printmethod or self.debug\n if not self.logicaldisks:\n printmethod('No disk disk_partitions to print?')\n return\n disklist = copy.copy(self.logicaldisks)\n buf = (disklist.pop()).get_summary()\n for disk in disklist:\n buf += disk.get_summary(printheader=False)\n printmethod(buf)\n\n\n def update_disk_info(self , forceupdate=False):\n if self.diskdrives:\n if not forceupdate and (time.time() - self.diskdrives[0].last_updated) <= self.disk_update_interval:\n return\n self.debug('Fetching updated disk info...')\n self.diskdrives = []\n self.disk_partitions = []\n self.logicaldisks = []\n self.diskdrives = self.get_updated_diskdrive_info()\n self.disk_partitions = self.get_updated_partition_info()\n self.logicaldisks = self.get_updated_logicaldisk_info()\n self.associate_diskdrives_to_partitions()\n self.associate_partitions_to_logicaldrives()\n\n def get_updated_diskdrive_info(self):\n '''\n Populate self.diskdrives with WinInstanceDisk objects containing info parsed from wmic command.\n Since wmic doesn't seem to use delimeters this method attempts to derive the lengh of each column/header\n in order to parse out the info per disk.\n :pararm force: boolean. Will force an update, otherwise this method will wait a minimum of\n self.disk_update_interval before updating again.\n '''\n #cmd = \"wmic diskdrive get /format:textvaluelist.xsl\"\n self.debug('Getting updated diskdrive info...')\n cmd = \"wmic diskdrive list full\"\n\n diskdrives = []\n for disk_dict in self.get_parsed_wmic_command_output(cmd):\n try:\n diskdrives.append(WinInstanceDiskDrive(self,disk_dict))\n except Exception, e:\n tb = self.tester.get_traceback()\n self.debug('Error attempting to create WinInstanceDiskDrive from following dict:')\n self.print_dict(dict=disk_dict)\n raise Exception(str(tb) + \"\\n Error attempting to create WinInstanceDiskDrive:\" + str(e))\n self.debug('get_updated_diskdrive_info, Done')\n return diskdrives\n\n\n def get_updated_partition_info(self):\n '''\n Populate self.diskdrives with WinInstanceDisk objects containing info parsed from wmic command.\n Since wmic doesn't seem to use delimeters this method attempts to derive the lengh of each column/header\n in order to parse out the info per disk.\n :pararm force: boolean. Will force an update, otherwise this method will wait a minimum of\n self.disk_update_interval before updating again.\n '''\n self.debug('Getting udpated partition info...')\n cmd = \"wmic partition list brief /format:textvaluelist.xsl\"\n\n disk_partitions = []\n for part_dict in self.get_parsed_wmic_command_output(cmd):\n try:\n disk_partitions.append(WinInstanceDiskPartition(self,part_dict))\n except Exception, e:\n tb = self.tester.get_traceback()\n self.debug('Error attempting to create WinInstanceDiskPartition from following dict:')\n self.print_dict(dict=part_dict)\n raise Exception(str(tb) + \"\\n Error attempting to create WinInstanceDiskPartition:\" + str(e))\n self.debug('get_updated_partition_info, Done')\n return disk_partitions\n\n\n def get_updated_logicaldisk_info(self):\n self.debug('Getting updated logicaldisk info...')\n cmd ='wmic logicaldisk list /format:textvaluelist.xsl'\n logicaldisks = []\n for part_dict in self.get_parsed_wmic_command_output(cmd):\n try:\n logicaldisks.append(WinInstanceLogicalDisk(self,part_dict))\n except Exception, e:\n tb = self.tester.get_traceback()\n self.debug('Error attempting to create WinInstanceLogicalDisk from following dict:')\n self.print_dict(dict=part_dict)\n raise Exception(str(tb) + \"\\n Error attempting to create WinInstanceLogicalDisk:\" + str(e))\n self.debug('get_updated_logicaldisk_info, Done')\n return logicaldisks\n\n\n def associate_diskdrives_to_partitions(self):\n for disk in self.diskdrives:\n disk.disk_partitions = []\n for part in self.disk_partitions:\n if part.diskindex == disk.index:\n disk.disk_partitions.append(part)\n\n def associate_partitions_to_logicaldrives(self, verbose=False):\n for part in self.disk_partitions:\n drive_id = None\n part.logicaldisks = []\n cmd = 'wmic partition where (DeviceID=\"Disk #' + str(part.diskindex) + \\\n ', Partition #' + str(part.index) + '\") assoc /assocclass:Win32_LogicalDiskToPartition'\n output = self.sys(cmd, verbose=verbose, code=0)\n for line in output:\n if re.search('Win32_LogicalDisk.DeviceID',line):\n try:\n drive_id = str(line.split()[0].split('=')[1]).replace('\"','').strip()\n except Exception, e:\n tb = self.tester.get_traceback()\n self.debug(str(tb)+ \"\\nError getting logical drive info:\" + str(e))\n if drive_id:\n for disk in self.logicaldisks:\n if re.match(disk.deviceid, drive_id):\n part.logicaldisks.append(disk)\n disk.partition = part\n break\n\n def get_cygwin_scsi_dev_for_windows_drive(self, windisk=None, drive_id=\"\"):\n '''\n param windisk: WinInstanceDiskType object. windisk.deviceid is used to look up the associated cygwin device\n param drive_id: String representing the deviceid. Can be used instead of passing a WinInstanceDiskType\n '''\n windisk_classname = \"\"\n update = False\n retries = 2\n if windisk:\n drive_id = windisk.deviceid\n windisk_classname = str(windisk.__class__).split('.').pop()\n #If this is a disk drive allow a retry which set the force update flag, otherwise don't force and retry\n if isinstance(windisk,WinInstanceDiskDrive):\n update = True\n if not drive_id:\n raise Exception('WinInstanceDiskType or string w/ device id not provided')\n\n self.debug('Attempting to get cygwin dev for windows drive:' + str(drive_id))\n self.update_cygwin_windows_device_map()\n for retry in xrange(0, retries):\n for device in self.cygwin_dev_map:\n if re.search(\"dev\", device):\n win_dev = str(self.cygwin_dev_map[device].split('\\\\').pop()).strip().upper()\n formated_drive_id = str(drive_id.split('\\\\').pop()).strip().upper()\n #self.debug('Attempt to match:\"' + str(win_dev) + '\" with \"' + str(formated_drive_id) + '\"')\n if formated_drive_id == win_dev:\n #self.debug('Found match')\n return device\n if update:\n self.update_cygwin_windows_device_map(force_update=True)\n else:\n break\n self.debug('WARNING: Could not find cygwin device for type:\"' + str(windisk_classname) + '\", deviceid:' + str(drive_id))\n return \"\"\n\n def get_parsed_wmic_command_output(self, wmic_command, verbose=False):\n '''\n Attempts to parse a wmic command using \"/format:textvaluelist.xsl\" for key value format into a list of\n dicts.\n :param wmic_command: string representing the remote wmic command to be run\n :returns : list of dict(s) created from the parsed key value output of the command.\n Note keys will be in lowercase\n\n '''\n self.debug('get_parsed_wmic_command_output, command:' + str(wmic_command))\n ret_dicts = []\n output = self.sys(wmic_command, verbose=verbose, code=0)\n newdict = {}\n for line in output:\n if not re.match(r\"^\\w\",line):\n #If there is a blank line(s) then the previous object is complete\n if newdict:\n ret_dicts.append(newdict)\n newdict = {}\n else:\n splitline = line.split('=')\n key = str(splitline.pop(0)).lower()\n if len(splitline) > 1:\n value = \"=\".join(str(x) for x in splitline)\n else:\n if splitline:\n value = splitline.pop()\n else:\n value = ''\n newdict[key] = value\n return ret_dicts\n\n def get_logicaldisk_ids(self, forceupdate=False):\n '''\n :param forceupdate: boolean, to force an update of logical disks detected on the guest. Otherwise updates are\n throttled to self.disk_update_interval\n :returns list of device ids (ie: [A:,C:,D:]\n '''\n ret = []\n self.update_disk_info(forceupdate=forceupdate)\n for disk in self.logicaldisks:\n ret.append(disk.deviceid)\n return ret\n\n def get_diskdrive_ids(self, drivelist=None, forceupdate=False):\n '''\n :param forceupdate: boolean, to force an update of logical disks detected on the guest. Otherwise updates are\n throttled to self.disk_update_interval\n :returns list of device ids ie: ['\\\\.\\PHYSICALDRIVE0','\\\\.\\PHYSICALDRIVE1,'\\\\.\\PHYSICALDRIVE2']\n '''\n ret = []\n if not drivelist:\n self.update_disk_info(forceupdate=forceupdate)\n drivelist = self.diskdrives\n for disk in drivelist:\n ret.append(disk.deviceid)\n return ret\n\n def get_diskdrive_by_deviceid(self, deviceid):\n for disk in self.diskdrives:\n if disk.deviceid == deviceid:\n return disk\n\n\n def found(self, command, regex):\n \"\"\" Returns a Boolean of whether the result of the command contains the regex\"\"\"\n result = self.sys(command)\n for line in result:\n found = re.search(regex,line)\n if found:\n return True\n return False\n\n def assertFilePresent(self,filepath):\n '''\n Raise exception if file not found at filepath on remote guest. dirs '\\' need to be represented as '\\\\'\n '''\n self.sys('dir ' + str(filepath), code=0)\n\n def assertCygwinFilePresent(self, filepath):\n self.cygwin_cmd('ls ' + str(filepath), code=0)\n\n\n def attach_volume(self, volume, dev=None, timeout=180, overwrite=False):\n '''\n Method used to attach a volume to an instance and track it's use by that instance\n required - euvolume - the euvolume object being attached\n required - tester - the eucaops/nephoria object/connection for this cloud\n optional - dev - string to specify the dev path to 'request' when attaching the volume to\n optional - timeout - integer- time allowed before failing\n optional - overwrite - flag to indicate whether to overwrite head data of a non-zero filled volume upon attach for md5\n '''\n if not isinstance(volume, EuVolume):\n volume = EuVolume.make_euvol_from_vol(volume)\n return self.attach_euvolume(volume, dev=dev, timeout=timeout, overwrite=overwrite)\n\n\n def attach_euvolume(self, euvolume, dev=None, timeout=180, overwrite=False):\n '''\n Method used to attach a volume to an instance and track it's use by that instance\n required - euvolume - the euvolume object being attached\n required - tester - the eucaops/nephoria object/connection for this cloud\n optional - dev - string to specify the dev path to 'request' when attaching the volume to\n optional - timeout - integer- time allowed before failing\n optional - overwrite - flag to indicate whether to overwrite head data of a non-zero filled volume upon attach for md5\n '''\n if not isinstance(euvolume, EuVolume):\n raise Exception(\"Volume needs to be of type euvolume, try attach_volume() instead?\")\n\n self.debug('Disk drive summary before attach attempt:')\n self.print_logicaldisk_summary()\n self.print_diskdrive_summary()\n self.debug(\"Attempting to attach volume:\"+str(euvolume.id)+\" to instance:\" +str(self.id)+\" to dev:\"+ str(dev))\n #grab a snapshot of our devices before attach for comparison purposes\n diskdrive_list_before = self.get_diskdrive_ids()\n use_serial = False\n for disk in self.diskdrives:\n if re.search('vol-', disk.serialnumber):\n use_serial = True\n break\n attached_dev = None\n start= time.time()\n elapsed = 0\n if dev is None:\n #update our block device prefix\n dev = self.get_free_scsi_dev()\n if (self.tester.attach_volume(self, euvolume, dev, pause=10,timeout=timeout)):\n if euvolume.attach_data.device != dev:\n raise Exception('Attached device:' + str(euvolume.attach_data.device) +\n \", does not equal requested dev:\" + str(dev))\n #Find device this volume is using on guest...\n euvolume.guestdev = None\n while (not euvolume.guestdev and elapsed < timeout):\n #Since all hypervisors may not support serial number info, check for an incremental diff in the\n # list of physical diskdrives on this guest.\n self.debug(\"Checking for volume attachment on guest, elapsed time(\"+str(elapsed)+\")\")\n diskdrive_list_after = self.get_diskdrive_ids(forceupdate=True)\n self.print_logicaldisk_summary()\n self.print_diskdrive_summary()\n self.debug(\"dev_list_after:\"+\" \".join(diskdrive_list_after))\n diff =list( set(diskdrive_list_after) - set(diskdrive_list_before) )\n if len(diff) > 0:\n self.debug('Got Diff in drives:' + str(diff))\n for disk in self.diskdrives:\n if re.search('vol-', disk.serialnumber):\n use_serial = True\n if euvolume.id == disk.ebs_volume:\n attached_dev = disk.deviceid\n euvolume.guestdev = attached_dev\n self.debug(\"Volume:\"+str(euvolume.id)+\" guest device by serialnumber:\"+str(euvolume.guestdev))\n break\n if not use_serial:\n attached_dev = str(diff[0])\n euvolume.guestdev = attached_dev.strip()\n self.debug(\"Volume:\"+str(euvolume.id)+\"found guest device by diff:\"+str(euvolume.guestdev))\n if attached_dev:\n euvolume.guestdev = attached_dev\n attached_vol = self.get_volume_from_attached_list_by_id(euvolume.id)\n self.attached_vols.append(euvolume)\n self.debug(euvolume.id+\": Requested dev:\"+str(euvolume.attach_data.device)+\", attached to guest device:\"+str(euvolume.guestdev))\n break\n elapsed = int(time.time() - start)\n time.sleep(2)\n if not euvolume.guestdev or not attached_dev:\n raise Exception('Device not found on guest after '+str(elapsed)+' seconds')\n else:\n self.debug('Failed to attach volume:'+str(euvolume.id)+' to instance:'+self.id)\n raise Exception('Failed to attach volume:'+str(euvolume.id)+' to instance:'+self.id)\n if (attached_dev is None):\n self.debug(\"List after\\n\"+\" \".join(diskdrive_list_after))\n raise Exception('Volume:'+str(euvolume.id)+' attached, but not found on guest'+str(self.id)+' after '+str(elapsed)+' seconds?')\n #Store the md5sum of this diskdrive in the euvolume...\n disk = self.get_diskdrive_by_deviceid(attached_dev)\n euvolume.md5len = 1024\n euvolume.md5 = self.get_dev_md5(devpath=disk.cygwin_scsi_drive, length=euvolume.md5len)\n #update the volume and instances information about the attachment...\n self.update_volume_guest_info(volume=euvolume,md5=euvolume.md5, md5len=euvolume.md5len, guestdev=euvolume.guestdev)\n self.debug('Success attaching volume:'+str(euvolume.id)+' to instance:'+self.id +\n ', cloud dev:'+str(euvolume.attach_data.device)+', attached dev:'+str(attached_dev) +\n \", elapsed:\" + str(elapsed))\n try:\n self.rescan_disks(timeout=20)\n except Exception, e:\n self.debug('Warning. Error while trying to rescan disks after attaching volume. Error: ' + str(e))\n euvolume.printself(printmethod=self.debug)\n disk.print_self()\n return attached_dev\n\n\n def get_guest_dev_for_volume(self, volume, forceupdate=False):\n use_serial = False\n self.update_disk_info(forceupdate=forceupdate)\n for disk in self.diskdrives:\n if re.search('vol-', disk.serialnumber):\n use_serial = True\n break\n\n if not isinstance(volume, EuVolume):\n volume = EuVolume.make_euvol_from_vol(volume=volume, tester=self.tester)\n\n\n def get_disk_drive_by_id(self, deviceid):\n self.update_system_info()\n for disk in self.diskdrives:\n if disk.deviceid == deviceid:\n return disk\n return None\n\n\n def get_guestdevs_inuse_by_vols(self):\n retlist = []\n for vol in self.attached_vols:\n retlist.append(vol.guestdev)\n return retlist\n\n\n def get_free_scsi_dev(self, prefix=None,maxdevs=16):\n '''\n The volume attach command requires a cloud level device name that is not currently associated with a volume\n Note: This is the device name from the clouds perspective, not necessarily the guest's\n This method attempts to find a free device name to use in the command\n optional - prefix - string, pre-pended to the the device search string\n optional - maxdevs - number use to specify the max device names to iterate over.Some virt envs have a limit of 16 devs.\n '''\n d='e'\n in_use_cloud = \"\"\n in_use_guest = \"\"\n dev = None\n if prefix is None:\n prefix = self.block_device_prefix\n cloudlist=self.tester.get_volumes(attached_instance=self.id)\n\n for x in xrange(0,maxdevs):\n inuse=False\n #double up the letter identifier to avoid exceeding z\n if d == 'z':\n prefix= prefix+'e'\n dev = \"/dev/\"+prefix+str(d)\n for avol in self.attached_vols:\n if avol.attach_data.device == dev:\n inuse = True\n in_use_guest += str(avol.id)+\", \"\n continue\n #Check to see if the cloud has a conflict with this device name...\n for vol in cloudlist:\n vol.update()\n if (vol.attach_data is not None) and (vol.attach_data.device == dev):\n inuse = True\n in_use_cloud += str(vol.id)+\", \"\n continue\n if inuse is False:\n self.debug(\"Instance:\"+str(self.id)+\" returning available cloud scsi dev:\"+str(dev))\n return str(dev)\n else:\n d = chr(ord('e') + x) #increment the letter we append to the device string prefix\n dev = None\n if dev is None:\n raise Exception(\"Could not find a free scsi dev on instance:\"+self.id+\", maxdevs:\"+str(maxdevs)+\"\\nCloud_devs:\"+str(in_use_cloud)+\"\\nGuest_devs:\"+str(in_use_guest))\n\n\n def detach_euvolume(self, euvolume, waitfordev=True, timeout=180):\n '''\n Method used to detach detach a volume to an instance and track it's use by that instance\n required - euvolume - the euvolume object being deattached\n waitfordev - boolean to indicate whether or no to poll guest instance for local device to be removed\n optional - timeout - integer seconds to wait before timing out waiting for the volume to detach\n '''\n start = time.time()\n elapsed = 0\n found = True\n for vol in self.attached_vols:\n if vol.id == euvolume.id:\n dev = vol.guestdev\n if (self.tester.detach_volume(euvolume,timeout=timeout)):\n if waitfordev:\n self.debug(\"Cloud has detached\" + str(vol.id) + \", Wait for device:\"+str(dev)+\" to be removed on guest...\")\n while (elapsed < timeout):\n diskdrive_ids = []\n try:\n disk_drives = self.get_updated_diskdrive_info()\n for disk in disk_drives:\n if dev == disk.deviceid:\n found = True\n break\n found = False\n self.debug('Diskdrive associated with ' + str(vol.id) + ' has been removed from guest.')\n #if device is not present remove it\n self.attached_vols.remove(vol)\n\n except Exception, de:\n self.debug('Warning, error getting diskdrive id during detach:' + str(de))\n if not found:\n try:\n self.rescan_disks(timeout=20)\n except Exception, re:\n self.debug('Warning: Error while trying to rescan disks after detaching volume:' + str(re))\n try:\n self.update_disk_info()\n except Exception, ue:\n self.debug('Warning: Error while trying to update disk info:' + str(ue))\n try:\n self.print_diskdrive_summary()\n except: pass\n self.debug('Volume:' + str(vol.id) + ', detached, and no longer found on guest at:' + str(dev))\n vol.set_volume_detached_tags()\n return True\n time.sleep(10)\n elapsed = int(time.time()-start)\n diskdrive_ids = self.get_diskdrive_ids(drivelist=disk_drives)\n self.debug('Current disk drives on guest:' + \",\".join(str(x) for x in diskdrive_ids))\n self.debug(\"Waiting for device '\"+str(dev)+\"' on guest to be removed.Elapsed:\"+str(elapsed))\n\n else:\n self.attached_vols.remove(vol)\n vol.set_volume_detached_tags()\n return True\n else:\n raise Exception(\"Volume(\"+str(vol.id)+\") failed to detach from device(\"+str(dev)+\") on (\"+str(self.id)+\")\")\n\n raise Exception(\"Detach Volume(\"+str(euvolume.id)+\") not found on (\"+str(self.id)+\")\")\n return False\n\n def check_hostname(self):\n if not hasattr(self, 'system_info'):\n self.update_system_info()\n if hasattr(self, 'system_info') and hasattr(self.system_info, 'host_name'):\n if self.id.upper() == self.system_info.host_name.upper():\n self.debug('Hostname:' + str(self.id) + \", instance.id:\" + str(self.system_info.host_name))\n else:\n raise Exception('check_hostname failed: hostname:' + str(self.system_info.host_name).upper() +\n \" != id:\" + str(self.id).upper())\n else:\n raise Exception('check_hostname failed: System_info.hostname not populated')\n\n def get_process_list_brief(self):\n '''\n Returns a list of dicts representing the processes running on the remote guest. Each service is represented by a\n dict containing information about the service.\n '''\n cmd = \"wmic process list brief /format:textvaluelist.xsl\"\n return self.get_parsed_wmic_command_output(cmd)\n\n def get_process_list_full(self):\n '''\n Returns a list of dicts representing the processes running on the remote guest. Each service is represented by a\n dict containing information about the service.\n '''\n cmd = \"wmic process list full\"\n return self.get_parsed_wmic_command_output(cmd)\n\n def get_process_by_name(self,process_name):\n '''\n Attempts to lookup a service on the remote guest.\n param service_name: string. The name of the service to get info\n returns a dict representing the information returned from the remote guest\n '''\n cmd = 'wmic process ' + str(process_name) + ' get /format:textvaluelist.xsl'\n result = self.get_parsed_wmic_command_output(cmd)\n if result:\n return result[0]\n\n def get_services_list_brief(self):\n '''\n Returns a list of dicts representing the services from the remote guest. Each service is represented by a\n dict containing information about the service.\n '''\n cmd = 'wmic service list brief /format:textvaluelist.xsl'\n return self.get_parsed_wmic_command_output(cmd)\n\n def get_services_list_full(self):\n '''\n Returns a list of dicts representing the services from the remote guest. Each service is represented by a\n dict containing information about the service.\n '''\n cmd = 'wmic service list full'\n return self.get_parsed_wmic_command_output(cmd)\n\n def get_service_by_name(self,service_name):\n '''\n Attempts to lookup a service on the remote guest.\n param service_name: string. The name of the service to get info\n returns a dict representing the information returned from the remote guest\n '''\n cmd = 'wmic service ' + str(service_name) + ' get /format:textvaluelist.xsl'\n result = self.get_parsed_wmic_command_output(cmd)\n if result:\n return result[0]\n\n def get_memtotal_in_mb(self):\n return long(self.system_info.total_physical_memory.split()[0].replace(',',''))\n\n def get_memtotal_in_gb(self):\n return long(self.get_memtotal_in_mb()/1024)\n\n def check_ram_against_vmtype(self, pad=32):\n total_ram = self.get_memtotal_in_mb()\n self.debug('Ram check: vm_ram:' + str(self.vmtype_info.ram)\n + \"mb vs memtotal:\" + str(total_ram)\n + \"mb. Diff:\" + str(self.vmtype_info.ram - total_ram)\n + \"mb, pad:\" + str(pad) + \"mb\")\n if not ((self.vmtype_info.ram - total_ram) <= pad):\n raise Exception('Ram check failed. vm_ram:' + str(self.vmtype_info.ram)\n + \" vs memtotal:\" + str(total_ram) + \". Diff is greater than allowed pad:\" + str(pad) + \"mb\")\n else:\n self.debug('check_ram_against_vmtype, passed')\n\n def check_ephemeral_against_vmtype(self):\n gb = self.gigabyte\n size = self.vmtype_info.disk\n ephemeral_dev = self.get_ephemeral_dev()\n block_size = self.get_blockdev_size_in_bytes(ephemeral_dev)\n gbs = block_size / gb\n self.debug('Ephemeral check: ephem_dev:'\n + str(ephemeral_dev)\n + \", bytes:\"\n + str(block_size)\n + \", gbs:\"\n + str(gbs)\n + \", vmtype size:\"\n + str(size))\n if gbs != size:\n raise Exception('Ephemeral check failed. ' + str(ephemeral_dev) + ' Blocksize: '\n + str(gbs) + \"gb (\" + str(block_size) + \"bytes)\"\n + ' != vmtype size:' +str(size) + \"gb\")\n else:\n self.debug('check_ephemeral_against_vmtype, passed')\n return ephemeral_dev\n\n def get_ephemeral_dev(self):\n \"\"\"\n Attempts to find the block device path on this instance\n\n :return: string representing path to ephemeral block device\n \"\"\"\n ephem_name = None\n dev_prefixs = ['s','v','xd','xvd']\n if not self.root_device_type == 'ebs':\n try:\n self.assertFilePresent('/dev/' + str(self.rootfs_device))\n return self.rootfs_device\n except:\n ephem_name = 'da'\n else:\n ephem_name = 'db'\n devs = self.get_dev_dir()\n for prefix in dev_prefixs:\n if str(prefix+ephem_name) in devs:\n return str('/dev/'+prefix+ephem_name)\n raise Exception('Could not find ephemeral device?')\n\n\n def cygwin_cmd(self, cmd, timeout=120, verbose=False, code=None):\n cmd = self.get_cygwin_path() + '\\\\bin\\\\bash.exe --login -c \"' + str(cmd) + '\"'\n return self.sys(cmd,timeout=timeout, verbose=verbose, code=code)\n\n def get_dev_md5(self, devpath, length, timeout=60):\n self.assertCygwinFilePresent(devpath)\n if length == 0:\n md5 = str(self.cygwin_cmd('md5sum ' + devpath, timeout=timeout)[0]).split(' ')[0].strip()\n else:\n md5 = str(self.cygwin_cmd(\"head -c \" + str(length) + \" \" + str(devpath) + \" | md5sum\")[0]).split(' ')[0].strip()\n return md5\n\n\n def update_cygwin_windows_device_map(self, prefix='/dev/*', force_update=False):\n cygwin_dev_map = {}\n if not force_update:\n if self.cygwin_dev_map:\n if time.time() - self.cygwin_dev_map['last_updated'] <= 30:\n cygwin_dev_map = self.cygwin_dev_map\n if not cygwin_dev_map:\n self.debug('Updating cygwin to windows device mapping...')\n output = self.cygwin_cmd(\"for DEV in \" + prefix + \" ; do printf $DEV=$(cygpath -w $DEV); echo ''; done\",\n verbose=False, code=0)\n for line in output:\n if re.match(prefix, line):\n split = line.split('=')\n key = split.pop(0)\n if split:\n value = split.pop()\n else:\n value = ''\n cygwin_dev_map[key]=value\n cygwin_dev_map['last_updated'] = time.time()\n self.cygwin_dev_map = cygwin_dev_map\n self.debug('Updated cygwin to windows device mapping')\n return cygwin_dev_map\n\n\n def rescan_disks(self, timeout=20):\n '''\n Attempts to rescan disks on the guest. This may help expedite updates/discovery when attaching/detaching\n volumes to the guest. This has also been found to hang post device removal so is used with a 20 second\n command timeout as the default.\n param timeout: integer. Seconds to wait on command before failing\n '''\n scriptname = 'eutester_diskpart_script'\n self.sys('(echo rescan && echo list disk ) > ' + str(scriptname), code=0)\n self.sys('diskpart /s ' + str(scriptname), code=0, timeout=timeout)\n\n\n def get_diskdrive_for_volume(self, volume):\n if not self.is_volume_attached_to_this_instance(volume):\n return None\n ret_disk = None\n for disk in self.diskdrives:\n disk.update_ebs_info()\n if disk.ebs_volume == volume.id:\n ret_disk = disk\n if not ret_disk:\n ret_disk = self.find_diskdrive_for_volume_by_serial_number(volume, force_check=True)\n if not ret_disk:\n if hasattr(volume,'md5') and volume.md5:\n ret_disk = self.find_diskdrive_for_volume_by_md5(volume, force_check=True)\n return ret_disk\n\n\n\n def find_diskdrive_for_volume_by_md5(self, volume, md5=None, length=None, force_check=False):\n if not force_check and not self.is_volume_attached_to_this_instance(volume):\n return None\n if not isinstance(volume, EuVolume):\n volume = EuVolume.make_euvol_from_vol(volume=volume,tester=self.tester)\n md5 = md5 or volume.md5\n if not md5:\n return None\n length = length or volume.md5len\n for disk in self.diskdrives:\n if disk.cygwin_scsi_drive:\n disk_md5 = self.get_dev_md5(disk.cygwin_scsi_drive, length=length)\n if disk_md5 == md5:\n volume.guestdev = disk.deviceid\n volume.md5 = disk_md5\n volume.md5len = length\n disk.ebs_volume = volume.id\n return disk\n return None\n\n\n\n def find_diskdrive_for_volume_by_serial_number(self, volume, serial_number=None, force_check=False):\n '''\n Attempt to iterate through all the diskdrives were aware of. If a diskdrive is found with a serial_number\n associated with the volume, return that diskdrive obj..\n example serial number format: vol-81C13EA4-dev-sdg\n\n :param volume: volume obj to use for deriving the serial_number\n :param serial_number: string. Optional. The string representing the serial # to match.\n :returns WinInstanceDiskDrive if found, else None\n '''\n if not force_check and not self.is_volume_attached_to_this_instance(volume):\n return None\n if not serial_number:\n serial_number = volume.id + volume.attach_data.device.replace('/','-')\n for disk in self.diskdrives:\n if disk.serialnumber == serial_number:\n return disk\n return None\n\n\n\n def is_volume_attached_to_this_instance(self, volume):\n '''\n Attempts to look up volume state per cloud to confirm the cloud believe the state of this volume is attached\n to this instance. This does not verify the guest/hypervisor also belives the volume is attached.\n :param volume: volume obj.\n :returns boolean\n '''\n volume.update()\n if hasattr(volume, 'attach_data') and volume.attach_data and (volume.attach_data.instance_id == self.id):\n self.debug('Volume:' + str(volume.id) + \" is attached to this instance: \" + str(self.id) + \" per cloud perspective\")\n return True\n else:\n self.debug('Volume:' + str(volume.id) + \" is NOT attached to this instance: \" + str(self.id) + \" per cloud perspective\")\n return False\n\n\n\n def update_volume_guest_info(self, volume, md5=None, md5len=None, guestdev=None):\n self.debug(\"{0} update_volume_guest_info: {1} {2}\"\n .format(termline, volume, termline))\n if not self.is_volume_attached_to_this_instance(volume):\n raise Exception('Volume not attached to this instance')\n disk = None\n if not self.get_volume_from_attached_list_by_id(volume.id):\n self.attached_vols.append(volume)\n volume.guestdev = guestdev or volume.guestdev\n if md5:\n if not md5len:\n raise Exception('Must provide md5len if providing the md5')\n volume.md5 = md5\n volume.md5len = md5len\n else:\n disk = self.get_diskdrive_for_volume(volume)\n if not disk:\n raise Exception('Could not find diskdrive for volume when attempting to update volume guest info:' + str(volume))\n volume.md5len = md5len or 1024\n volume.md5 = self.get_dev_md5(disk.cygwin_scsi_drive, volume.md5len)\n if not guestdev:\n volume.guestdev = disk.deviceid\n disk = disk or self.get_diskdrive_for_volume(volume)\n disk.update_ebs_info()\n volume.update_volume_attach_info_tags(md5=volume.md5, md5len=volume.md5len, instance_id=self.id, guestdev=volume.guestdev)\n return volume\n\n def get_unsynced_volumes(self, check_md5=True):\n '''\n Description: Returns list of volumes which are:\n -in a state the cloud believes the vol is no longer attached\n -the attached device has changed, or is not found.\n If all euvols are shown as attached to this instance, and the last known local dev is present and/or a local device is found with matching md5 checksum\n then the list will return 'None' as all volumes are successfully attached and state is in sync.\n By default this method will iterate through all the known euvolumes attached to this euinstance.\n A subset can be provided in the list argument 'euvol_list'.\n Returns a list of euvolumes for which a corresponding guest device could not be found, or the cloud no longer believes is attached.\n\n :param euvol_list: - optional - euvolume object list. Defaults to all self.attached_vols\n :param md5length: - optional - defaults to the length given in each euvolume. Used to calc md5 checksum of devices\n :param timerpervolume: -optional - time to wait for device to appear, per volume before failing\n :param min_polls: - optional - minimum iterations to check guest devs before failing, despite timeout\n :param check_md5: - optional - find devices by md5 comparision. Default is to only perform this check when virtio_blk is in use.\n '''\n bad_list = []\n retdict = self.sync_attached_volumes_with_clouds_view()\n bad_list.extend(retdict['badvols'])\n return bad_list\n\n\n\n def reboot_instance_and_verify(self,\n waitconnect=60,\n timeout=600,\n wait_for_ports=180,\n connect=True,\n checkvolstatus=False,\n pad=5,\n uptime_retries=3):\n '''\n Attempts to reboot an instance and verify it's state post reboot.\n waitconnect-optional-integer representing seconds to wait before attempting to connect to instance after reboot\n timeout-optional-integer, seconds. If a connection has failed, this timer is used to determine a retry\n connect- optional - boolean to indicate whether an ssh session should be established once the expected state has been reached\n checkvolstatus - optional -boolean to be used to check volume status post start up\n '''\n msg=\"\"\n newuptime = None\n attempt = 0\n def get_safe_uptime():\n uptime = None\n try:\n uptime = self.get_uptime()\n except: pass\n return uptime\n self.debug('Attempting to reboot instance:'+str(self.id)+', check attached volume state first')\n uptime = self.tester.wait_for_result( get_safe_uptime, None, oper=operator.ne)\n elapsed = 0\n start = time.time()\n if checkvolstatus:\n #update the md5sums per volume before reboot\n bad_vols=self.get_unsynced_volumes()\n if bad_vols != []:\n for bv in bad_vols:\n self.debug(str(self.id)+'Unsynced volume found:'+str(bv.id))\n raise Exception(str(self.id)+\"Could not reboot using checkvolstatus flag due to unsync'd volumes\")\n self.debug('Rebooting now...')\n self.reboot()\n time.sleep(waitconnect)\n try:\n self.poll_for_ports_status(ports=[3389,5589], timeout=wait_for_ports)\n except:\n self.debug('Failed to poll winrm and rdp ports after ' + str(wait_for_ports) + ' seconds, try to connect anyways...')\n timeout=timeout - int(time.time()-start)\n while (elapsed < timeout):\n self.connect_to_instance(timeout=timeout)\n #Wait for the system to provide a valid response for uptime, early connections may not\n newuptime = self.tester.wait_for_result( get_safe_uptime, None, oper=operator.ne)\n elapsed = int(time.time()-start)\n #Check to see if new uptime is at least 'pad' less than before, allowing for some pad\n if (newuptime - (uptime+elapsed)) > pad:\n err_msg = \"Instance uptime does not represent a reboot. Orig:\"+str(uptime)+\\\n \", New:\"+str(newuptime)+\", elapsed:\"+str(elapsed)+\"/\"+str(timeout)\n if elapsed > timeout:\n raise Exception(err_msg)\n else:\n self.debug(err_msg)\n else:\n self.debug(\"Instance uptime indicates a reboot. Orig:\"+str(uptime)+\\\n \", New:\"+str(newuptime)+\", elapsed:\"+str(elapsed))\n break\n if checkvolstatus:\n badvols= self.get_unsynced_volumes()\n if badvols != []:\n for vol in badvols:\n msg = msg+\"\\nVolume:\"+vol.id+\" Local Dev:\"+vol.guestdev\n raise Exception(\"Missing volumes post reboot:\"+str(msg)+\"\\n\")\n self.debug(self.id+\" reboot_instance_and_verify Success\")\n\n\n def get_uptime(self):\n if not hasattr(self, 'system_info'):\n self.update_system_info()\n if hasattr(self.system_info, 'system_boot_time'):\n return self._get_uptime_from_system_boot_time()\n elif hasattr(self.system_info, 'system_up_time'):\n return self._get_uptime_from_system_up_time()\n else:\n tb = self.tester.get_traceback()\n raise Exception(str(tb) + '\\nCould not get system boot or up time from system_info')\n\n def _get_uptime_from_system_boot_time(self):\n #11/18/2013, 3:15:39 PM\n if not hasattr(self, 'system_info'):\n self.update_system_info()\n splitdate = self.system_info.system_boot_time.split()\n datestring = splitdate[0]\n timestring = splitdate[1]\n ampm = splitdate[2]\n month, day, year = datestring.replace(',',\"\").split('/')\n hours, minutes, seconds = timestring.split(':')\n if ampm == 'PM':\n hours = int(hours) + 12\n datetimestring = str(year) + \" \" + \\\n str(month) + \" \" + \\\n str(day) + \" \" + \\\n str(hours) + \" \" + \\\n str(minutes) + \" \" + \\\n str(seconds)\n dt = datetime.strptime(datetimestring, \"%Y %m %d %H %M %S\")\n return int(time.time() - time.mktime(dt.timetuple()))\n \n def _get_uptime_from_system_up_time(self):\n #0 Days, 0 Hours, 6 Minutes, 39 Seconds\n if not hasattr(self, 'system_info'):\n self.update_system_info()\n uptime_string = self.system_info.system_up_time\n days = 0\n hours = 0\n minutes = 0\n seconds = 0\n split = uptime_string.split(',')\n for part in split:\n time_string = \"\"\n if re.search('Days', part, re.IGNORECASE):\n time_string = str(part.split()[0]).strip()\n days = int(time_string or 0)\n elif re.search('Hours', part, re.IGNORECASE):\n time_string = str(part.split()[0]).strip()\n hours = int(time_string or 0)\n elif re.search('Minutes', part, re.IGNORECASE):\n time_string = str(part.split()[0]).strip()\n minutes = int(time_string or 0)\n elif re.search('Seconds', part, re.IGNORECASE):\n time_string = str(part.split()[0]).strip()\n seconds = int(time_string or 0)\n self.debug(\"Days:\" +str(days)+', Hours:'+ str(hours) + \", Minutes:\" + str(minutes) + \", Seconds:\" + str(seconds))\n uptime = (days * 86400) + (hours * 3600) + (minutes * 60) + seconds\n return uptime\n\n\n def stop_instance_and_verify(self, timeout=200, state='stopped',\n failstate='terminated', check_vols=True):\n '''\n Attempts to stop instance and verify the state has gone to\n stopped state\n :param timeout; -optional-time to wait on instance to go to state 'state' before failing\n :param state: -optional-the expected state to signify success, default is stopped\n :param failstate: -optional-a state transition that indicates failure, default is terminated\n '''\n self.debug(self.id+\" Attempting to stop instance...\")\n start = time.time()\n elapsed = 0\n self.stop()\n while (elapsed < timeout):\n time.sleep(2)\n self.update()\n if self.state == state:\n break\n if self.state == failstate:\n raise Exception(str(self.id) + \" instance went to state:\" +\n str(self.state) + \" while stopping\")\n elapsed = int(time.time()- start)\n if elapsed % 10 == 0 :\n self.debug(str(self.id) + \" wait for stop, in state:\" +\n str(self.state) + \",time remaining:\" +\n str(elapsed) + \"/\" + str(timeout) )\n if self.state != state:\n raise Exception(self.id + \" state: \" + str(self.state) +\n \" expected:\" + str(state) +\n \", after elapsed:\" + str(elapsed))\n if check_vols:\n for volume in self.attached_vols:\n volume.update\n if volume.status != 'in-use':\n raise Exception(str(self.id) + ', Volume ' +\n str(volume.id) + ':' + str(volume.status)\n + ' state did not remain in-use '\n 'during stop')\n self.debug(self.id + \" stop_instance_and_verify Success\")\n\n\n def start_instance_and_verify(self, timeout=300, state = 'running',\n failstates=['terminated'], failfasttime=30,\n connect=True, checkvolstatus=True):\n '''\n Attempts to start instance and verify state, and reconnects ssh session\n :param timeout: -optional-time to wait on instance to go to state\n 'state' before failing\n :param state: -optional-the expected state to signify success,\n default is running\n :param failstate: -optional-a state transition that indicates failure,\n default is terminated\n :param connect: -optional - boolean to indicate whether an ssh\n session should be established once the expected state\n has been reached\n :param checkvolstatus: -optional -boolean to be used to check volume\n status post start up\n '''\n self.debug(self.id+\" Attempting to start instance...\")\n if checkvolstatus:\n for volume in self.attached_vols:\n volume.update\n if checkvolstatus:\n if volume.status != 'in-use':\n raise Exception(str(self.id) + ', Volume ' + str(volume.id) + ':' + str(volume.status)\n + ' state did not remain in-use during stop' )\n self.debug(\"\\n\"+ str(self.id) + \": Printing Instance 'attached_vol' list:\\n\")\n self.tester.show_volumes(self.attached_vols)\n msg=\"\"\n start = time.time()\n elapsed = 0\n self.update()\n #Add fail fast states...\n if self.state == 'stopped':\n failstates.extend(['stopped','stopping'])\n self.start()\n\n while (elapsed < timeout):\n elapsed = int(time.time()- start)\n self.update()\n self.debug(str(self.id) + \" wait for start, in state:\" +\n str(self.state) + \",time remaining:\" + str(elapsed) +\n \"/\"+str(timeout) )\n if self.state == state:\n break\n if elapsed >= failfasttime:\n for failstate in failstates:\n if self.state == failstate:\n raise Exception(str(self.id) +\n \" instance went to state:\" +\n str(self.state) + \" while starting\")\n time.sleep(10)\n if self.state != state:\n raise Exception(self.id + \" not in \" + str(state) +\n \" state after elapsed:\" + str(elapsed))\n else:\n self.debug(self.id + \" went to state:\" + str(state))\n if connect:\n self.connect_to_instance(timeout=timeout)\n if checkvolstatus:\n badvols= self.get_unsynced_volumes(check_md5=True)\n if badvols != []:\n for vol in badvols:\n msg = msg + \"\\nVolume:\" + vol.id + \" Local Dev:\" +\\\n vol.guestdev\n raise Exception(\"Missing volumes post reboot:\" + str(msg) +\n \"\\n\")\n self.debug(self.id+\" start_instance_and_verify Success\")\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"
] | true |
821 |
2b7d9ded82fa980eeae06beb2d84d89612d53df1
|
import SimpleITK as sitk
import numpy as np
from sklearn.ensemble import RandomForestClassifier
# # Estimation function # #
# --------------------------- #
# Linear registration function
# --------------------------- #
# --- Input --- #
# im_ref : The common image [numpy.ndarray]
# im_mov : The group image [numpy.ndarray]
# mov_mask : List of GROUP masks [list]
# show_parameters : If you want to see the parameters, false by default [boolean]
# --- Output --- #
# lin_xfm : Estimated transformation parameters [itk.simple.Transform]
def est_lin_transf(im_ref, im_mov, mov_mask=None, show_parameters=False):
initial_transform = sitk.CenteredTransformInitializer(im_ref, im_mov, sitk.ScaleSkewVersor3DTransform(),
sitk.CenteredTransformInitializerFilter.MOMENTS)
# Initialize registration
lin_transformation = sitk.ImageRegistrationMethod()
# Set metrics
lin_transformation.SetMetricAsMeanSquares()
lin_transformation.SetMetricSamplingStrategy(lin_transformation.RANDOM)
lin_transformation.SetMetricSamplingPercentage(0.01)
# Set mask
if mov_mask:
lin_transformation.SetMetricMovingMask(mov_mask)
# Gradient Descent optimizer
lin_transformation.SetOptimizerAsGradientDescent(learningRate=1, numberOfIterations=400,
convergenceMinimumValue=1e-6, convergenceWindowSize=10)
lin_transformation.SetOptimizerScalesFromPhysicalShift()
# Set the initial transformation
lin_transformation.SetInitialTransform(initial_transform)
# Switching to preferred variable
lin_xfm = lin_transformation
if show_parameters:
print(lin_xfm)
return lin_xfm
# # Estimation function # #
# --------------------------- #
# Non-linear 'Demons' registration function
# --------------------------- #
# --- Input --- #
# im_ref : The common image [numpy.ndarray]
# fixed_mask : The mask of common image, default is None [numpy.ndarray]
# show_parameters : If you want to see the parameters, false by default [boolean]
# --- Output --- #
# nl_xfm : Estimated transformation parameters [itk.simple.Transform]
def est_nl_transf(im_ref, fixed_mask=None, show_parameters=False):
# Initialize the registration
reg_method = sitk.ImageRegistrationMethod()
# Create initial identity transformation.
transform_to_displacement_field_filter = sitk.TransformToDisplacementFieldFilter()
transform_to_displacement_field_filter.SetReferenceImage(im_ref)
initial_transform = sitk.DisplacementFieldTransform(
transform_to_displacement_field_filter.Execute(sitk.Transform()))
# Regularization. The update field refers to fluid regularization; the total field to elastic regularization.
initial_transform.SetSmoothingGaussianOnUpdate(varianceForUpdateField=0, varianceForTotalField=1.5)
# Set the initial transformation
reg_method.SetInitialTransform(initial_transform)
# Set Demons registration
reg_method.SetMetricAsDemons(intensityDifferenceThreshold=0.001)
# Evaluate the metrics only in the mask
if fixed_mask is not None:
reg_method.SetMetricFixedMask(fixed_mask)
# Set a linear interpolator
reg_method.SetInterpolator(sitk.sitkLinear)
# Set a gradient descent optimizer
reg_method.SetOptimizerAsGradientDescent(learningRate=1.0, numberOfIterations=10, convergenceMinimumValue=1e-6,
convergenceWindowSize=10)
reg_method.SetOptimizerScalesFromPhysicalShift()
# Switching to the preferred variable
nl_xfm = reg_method
if show_parameters:
print(nl_xfm)
return nl_xfm
# # Application function # #
# --------------------------- #
# Executes either the linear or the non-linear function
# --------------------------- #
# --- Input --- #
# im_ref : The common image [numpy.ndarray]
# im_mov : The group image [numpy.ndarray]
# trafo : The chosen transformation [numpy.ndarray]
# show_parameters : If you want to see the parameters, false by default [boolean]
# --- Output --- #
# final_image : Returns the registered image [numpy.ndarray]
def apply_transf(im_ref, im_mov, trafo, show_parameters=False):
# Perform registration (Executes it)
transf = trafo.Execute(sitk.Cast(im_ref, sitk.sitkFloat32), sitk.Cast(im_mov, sitk.sitkFloat32))
if show_parameters:
print(transf)
print("--------")
print("Optimizer stop condition: {0}".format(trafo.GetOptimizerStopConditionDescription()))
print("Number of iterations: {0}".format(trafo.GetOptimizerIteration()))
print("--------")
return transf
# # Atlas segmentation function # #
# --------------------------- #
# Atlas-based segmentation using the CT images in 'ct_list'
# and corresponding segmentation masks from 'seg_list'.
# After that, majority voting to return a segmentation mask.
# --------------------------- #
# --- Input --- #
# common_img : The chosen COMMON image [sitk-image]
# ct_list : List of GROUP images [list]
# seg_list : List of GROUP masks [list]
# --- Output --- #
# segmented_array : The segmentation as an array [numpy.ndarray]
def seg_atlas(common_img, ct_list, seg_list):
# Creating the necessary lists
seg = []
image_list = []
# # REGISTRATION # #
for i in range(len(ct_list)):
# Adjusting the settings and applying
trafo_settings = est_lin_transf(common_img, ct_list[i], mov_mask=seg_list[i], show_parameters=False)
final_trafo = apply_transf(common_img, ct_list[i], trafo_settings)
# Perform registration on mask image
resampler = sitk.ResampleImageFilter()
resampler.SetReferenceImage(common_img)
resampler.SetInterpolator(sitk.sitkLinear)
resampler.SetTransform(final_trafo)
resampled_mask = resampler.Execute(seg_list[i])
resampled_mask_data = sitk.GetArrayFromImage(resampled_mask)
seg.append(resampled_mask_data)
# # MAJORITY VOTING # #
for i in range(len(seg)):
for j in range(i + 1, len(seg)):
arr1 = np.transpose(np.nonzero(seg[i]))
arr2 = np.transpose(np.nonzero(seg[j]))
# Filling two lists
arr1list = [tuple(e) for e in arr1.tolist()]
arr2list = [tuple(e) for e in arr2.tolist()]
# Sorting both lists
arr1list.sort()
arr2list.sort()
# Creating necessary list & sorting
intersections = list(set(arr1list).intersection(arr2list))
intersections.sort()
image_list.append(intersections)
# Creating a list which contains the indexes of intersecting voxels
intersection_list = list(set(image_list[0]) | set(image_list[1]) | set(image_list[2]))
# Sorting the list
intersection_list.sort()
# Fetches array from image
image_array = sitk.GetArrayFromImage(common_img)
# Creates an array for the points and fills it using indexes
segmented_array = np.zeros(shape=image_array.shape, dtype=np.uint8)
for x, y, z in intersection_list:
segmented_array[x, y, z] = 1
return segmented_array
# # Similarity function # #
# --------------------------- #
# Calculates the following distances between images:
# 1. Jaccard coef.
# 2. Dice coef.
# 3. Hausdorff distance
# --------------------------- #
# --- Input --- #
# mask_img : The mask image [sikt-image]
# seg_img: The segmented image [sikt-image]
# --- Output --- #
# None
def distances(mask_img, seg_img):
# Creating the necessary filters
hausdorff = sitk.HausdorffDistanceImageFilter()
overlap = sitk.LabelOverlapMeasuresImageFilter()
# Execute filters
hausdorff.Execute(mask_img, seg_img)
overlap.Execute(mask_img, seg_img)
# Fetching the distances and appending to distance list
# Jaccard coef.
jaccard = overlap.GetJaccardCoefficient()
# Dice coef.
dice = overlap.GetDiceCoefficient()
# Hausdorff distance
hausdorff_distance = hausdorff.GetHausdorffDistance()
# Printing out the distances for user
print('The Hausdorff distance: {}'.format(
hausdorff_distance))
print('The Dice coefficient: {}'.format(dice))
print('The Jaccard coefficient: {}'.format(jaccard))
return None
# # Classifier Function # #
# --------------------------- #
# Trains a random forest classifier by reading 2d images and comparing
# them to a vector which has labels that correspond to if it contains
# the pubic symphysis. The labels are binary.
# --------------------------- #
# --- Input --- #
# slice_list : List of 2D slice images [list]
# vector_list : List of vectors with binary labels [list]
# --- Output --- #
# trained_forest : Trained random forest classifier [sklearn.ensemble.forest.RandomForestClassifier]
def train_classifier(slice_list, vector_list):
# Creating necessary list
x_train_list = []
# Reading in input data
for image in slice_list:
# Fetching arrays
image_array = sitk.GetArrayFromImage(image)
# Resizing
image_array.resize((512, 512, 512))
for z in range(image_array.shape[2]):
x_train_list.append(image_array[:, :, z].flatten())
x_train = np.asarray(x_train_list, dtype=np.uint8)
# Reading in training labels
y_train = None
for i in range(0, len(vector_list)):
if i == 0:
y_train = vector_list[i]
else:
y_train = np.concatenate([y_train, vector_list[i]])
# Train classifier
trained_forest = RandomForestClassifier(n_estimators=150)
trained_forest.fit(x_train, y_train)
return trained_forest
# # Classifier Function # #
# --------------------------- #
# Utilizes a trained random forest classifier by reading CT image and prints
# which slice has the highest probability of containing the pubic symphysis.
# --------------------------- #
# --- Input --- #
# ct_image : List of 2D axial slice images [list]
# classifier : Trained random forest classifier [sklearn.ensemble.forest.RandomForestClassifier]
# --- Output --- #
# None
def slice_probability(ct_image, classifier):
# Creating necessary lists
test_list = []
max_list = []
# Convert image to numpy array & resize
im_array = sitk.GetArrayFromImage(ct_image)
im_array.resize((512, 512, 512))
for z in range(im_array.shape[2]):
test_list.append(im_array[:, :, z].flatten())
test_array = np.asarray(test_list, dtype=np.uint8)
# Predict probabilities for each slice
probabilities = classifier.predict_proba(test_array)
# Fetching array with maximum probabilities
max = np.amax(probabilities, axis=0)[1]
for i, prob in enumerate(probabilities):
if prob[1] == max:
max_list.append(i)
# Print result to user
if len(max_list) == 1:
print("Slice {} has highest probability which is: {}".format(max_list[0], max))
else:
print("Slices {} have the highest probability which is: {}".format(max_list, max))
return None
|
[
"import SimpleITK as sitk\r\nimport numpy as np\r\nfrom sklearn.ensemble import RandomForestClassifier\r\n\r\n\r\n# # Estimation function # #\r\n# --------------------------- #\r\n# Linear registration function\r\n# --------------------------- #\r\n\r\n# --- Input --- #\r\n# im_ref : The common image [numpy.ndarray]\r\n# im_mov : The group image [numpy.ndarray]\r\n# mov_mask : List of GROUP masks [list]\r\n# show_parameters : If you want to see the parameters, false by default [boolean]\r\n\r\n\r\n# --- Output --- #\r\n# lin_xfm : Estimated transformation parameters [itk.simple.Transform]\r\n\r\ndef est_lin_transf(im_ref, im_mov, mov_mask=None, show_parameters=False):\r\n initial_transform = sitk.CenteredTransformInitializer(im_ref, im_mov, sitk.ScaleSkewVersor3DTransform(),\r\n sitk.CenteredTransformInitializerFilter.MOMENTS)\r\n\r\n # Initialize registration\r\n lin_transformation = sitk.ImageRegistrationMethod()\r\n\r\n # Set metrics\r\n lin_transformation.SetMetricAsMeanSquares()\r\n lin_transformation.SetMetricSamplingStrategy(lin_transformation.RANDOM)\r\n lin_transformation.SetMetricSamplingPercentage(0.01)\r\n\r\n # Set mask\r\n if mov_mask:\r\n lin_transformation.SetMetricMovingMask(mov_mask)\r\n\r\n # Gradient Descent optimizer\r\n lin_transformation.SetOptimizerAsGradientDescent(learningRate=1, numberOfIterations=400,\r\n convergenceMinimumValue=1e-6, convergenceWindowSize=10)\r\n lin_transformation.SetOptimizerScalesFromPhysicalShift()\r\n\r\n # Set the initial transformation\r\n lin_transformation.SetInitialTransform(initial_transform)\r\n\r\n # Switching to preferred variable\r\n lin_xfm = lin_transformation\r\n\r\n if show_parameters:\r\n print(lin_xfm)\r\n\r\n return lin_xfm\r\n\r\n\r\n# # Estimation function # #\r\n# --------------------------- #\r\n# Non-linear 'Demons' registration function\r\n# --------------------------- #\r\n\r\n# --- Input --- #\r\n# im_ref : The common image [numpy.ndarray]\r\n# fixed_mask : The mask of common image, default is None [numpy.ndarray]\r\n# show_parameters : If you want to see the parameters, false by default [boolean]\r\n\r\n\r\n# --- Output --- #\r\n# nl_xfm : Estimated transformation parameters [itk.simple.Transform]\r\n\r\ndef est_nl_transf(im_ref, fixed_mask=None, show_parameters=False):\r\n # Initialize the registration\r\n reg_method = sitk.ImageRegistrationMethod()\r\n\r\n # Create initial identity transformation.\r\n transform_to_displacement_field_filter = sitk.TransformToDisplacementFieldFilter()\r\n transform_to_displacement_field_filter.SetReferenceImage(im_ref)\r\n initial_transform = sitk.DisplacementFieldTransform(\r\n transform_to_displacement_field_filter.Execute(sitk.Transform()))\r\n\r\n # Regularization. The update field refers to fluid regularization; the total field to elastic regularization.\r\n initial_transform.SetSmoothingGaussianOnUpdate(varianceForUpdateField=0, varianceForTotalField=1.5)\r\n\r\n # Set the initial transformation\r\n reg_method.SetInitialTransform(initial_transform)\r\n\r\n # Set Demons registration\r\n reg_method.SetMetricAsDemons(intensityDifferenceThreshold=0.001)\r\n\r\n # Evaluate the metrics only in the mask\r\n if fixed_mask is not None:\r\n reg_method.SetMetricFixedMask(fixed_mask)\r\n\r\n # Set a linear interpolator\r\n reg_method.SetInterpolator(sitk.sitkLinear)\r\n\r\n # Set a gradient descent optimizer\r\n reg_method.SetOptimizerAsGradientDescent(learningRate=1.0, numberOfIterations=10, convergenceMinimumValue=1e-6,\r\n convergenceWindowSize=10)\r\n reg_method.SetOptimizerScalesFromPhysicalShift()\r\n\r\n # Switching to the preferred variable\r\n nl_xfm = reg_method\r\n\r\n if show_parameters:\r\n print(nl_xfm)\r\n\r\n return nl_xfm\r\n\r\n# # Application function # #\r\n# --------------------------- #\r\n# Executes either the linear or the non-linear function\r\n# --------------------------- #\r\n\r\n# --- Input --- #\r\n# im_ref : The common image [numpy.ndarray]\r\n# im_mov : The group image [numpy.ndarray]\r\n# trafo : The chosen transformation [numpy.ndarray]\r\n# show_parameters : If you want to see the parameters, false by default [boolean]\r\n\r\n\r\n# --- Output --- #\r\n# final_image : Returns the registered image [numpy.ndarray]\r\n\r\ndef apply_transf(im_ref, im_mov, trafo, show_parameters=False):\r\n # Perform registration (Executes it)\r\n transf = trafo.Execute(sitk.Cast(im_ref, sitk.sitkFloat32), sitk.Cast(im_mov, sitk.sitkFloat32))\r\n\r\n if show_parameters:\r\n print(transf)\r\n print(\"--------\")\r\n print(\"Optimizer stop condition: {0}\".format(trafo.GetOptimizerStopConditionDescription()))\r\n print(\"Number of iterations: {0}\".format(trafo.GetOptimizerIteration()))\r\n print(\"--------\")\r\n\r\n return transf\r\n\r\n\r\n# # Atlas segmentation function # #\r\n# --------------------------- #\r\n# Atlas-based segmentation using the CT images in 'ct_list'\r\n# and corresponding segmentation masks from 'seg_list'.\r\n# After that, majority voting to return a segmentation mask.\r\n# --------------------------- #\r\n\r\n# --- Input --- #\r\n# common_img : The chosen COMMON image [sitk-image]\r\n# ct_list : List of GROUP images [list]\r\n# seg_list : List of GROUP masks [list]\r\n\r\n# --- Output --- #\r\n# segmented_array : The segmentation as an array [numpy.ndarray]\r\n\r\ndef seg_atlas(common_img, ct_list, seg_list):\r\n # Creating the necessary lists\r\n seg = []\r\n image_list = []\r\n\r\n # # REGISTRATION # #\r\n for i in range(len(ct_list)):\r\n # Adjusting the settings and applying\r\n trafo_settings = est_lin_transf(common_img, ct_list[i], mov_mask=seg_list[i], show_parameters=False)\r\n final_trafo = apply_transf(common_img, ct_list[i], trafo_settings)\r\n\r\n # Perform registration on mask image\r\n resampler = sitk.ResampleImageFilter()\r\n resampler.SetReferenceImage(common_img)\r\n resampler.SetInterpolator(sitk.sitkLinear)\r\n\r\n resampler.SetTransform(final_trafo)\r\n resampled_mask = resampler.Execute(seg_list[i])\r\n\r\n resampled_mask_data = sitk.GetArrayFromImage(resampled_mask)\r\n seg.append(resampled_mask_data)\r\n\r\n # # MAJORITY VOTING # #\r\n for i in range(len(seg)):\r\n for j in range(i + 1, len(seg)):\r\n arr1 = np.transpose(np.nonzero(seg[i]))\r\n arr2 = np.transpose(np.nonzero(seg[j]))\r\n\r\n # Filling two lists\r\n arr1list = [tuple(e) for e in arr1.tolist()]\r\n arr2list = [tuple(e) for e in arr2.tolist()]\r\n\r\n # Sorting both lists\r\n arr1list.sort()\r\n arr2list.sort()\r\n\r\n # Creating necessary list & sorting\r\n intersections = list(set(arr1list).intersection(arr2list))\r\n intersections.sort()\r\n\r\n image_list.append(intersections)\r\n # Creating a list which contains the indexes of intersecting voxels\r\n intersection_list = list(set(image_list[0]) | set(image_list[1]) | set(image_list[2]))\r\n\r\n # Sorting the list\r\n intersection_list.sort()\r\n\r\n # Fetches array from image\r\n image_array = sitk.GetArrayFromImage(common_img)\r\n\r\n # Creates an array for the points and fills it using indexes\r\n segmented_array = np.zeros(shape=image_array.shape, dtype=np.uint8)\r\n for x, y, z in intersection_list:\r\n segmented_array[x, y, z] = 1\r\n\r\n return segmented_array\r\n\r\n\r\n# # Similarity function # #\r\n# --------------------------- #\r\n# Calculates the following distances between images:\r\n# 1. Jaccard coef.\r\n# 2. Dice coef.\r\n# 3. Hausdorff distance\r\n# --------------------------- #\r\n\r\n# --- Input --- #\r\n# mask_img : The mask image [sikt-image]\r\n# seg_img: The segmented image [sikt-image]\r\n\r\n# --- Output --- #\r\n# None\r\n\r\ndef distances(mask_img, seg_img):\r\n # Creating the necessary filters\r\n hausdorff = sitk.HausdorffDistanceImageFilter()\r\n overlap = sitk.LabelOverlapMeasuresImageFilter()\r\n\r\n # Execute filters\r\n hausdorff.Execute(mask_img, seg_img)\r\n overlap.Execute(mask_img, seg_img)\r\n\r\n # Fetching the distances and appending to distance list\r\n # Jaccard coef.\r\n jaccard = overlap.GetJaccardCoefficient()\r\n\r\n # Dice coef.\r\n dice = overlap.GetDiceCoefficient()\r\n\r\n # Hausdorff distance\r\n hausdorff_distance = hausdorff.GetHausdorffDistance()\r\n\r\n # Printing out the distances for user\r\n print('The Hausdorff distance: {}'.format(\r\n hausdorff_distance))\r\n print('The Dice coefficient: {}'.format(dice))\r\n print('The Jaccard coefficient: {}'.format(jaccard))\r\n\r\n return None\r\n\r\n\r\n# # Classifier Function # #\r\n# --------------------------- #\r\n# Trains a random forest classifier by reading 2d images and comparing\r\n# them to a vector which has labels that correspond to if it contains\r\n# the pubic symphysis. The labels are binary.\r\n# --------------------------- #\r\n\r\n# --- Input --- #\r\n# slice_list : List of 2D slice images [list]\r\n# vector_list : List of vectors with binary labels [list]\r\n\r\n# --- Output --- #\r\n# trained_forest : Trained random forest classifier [sklearn.ensemble.forest.RandomForestClassifier]\r\n\r\ndef train_classifier(slice_list, vector_list):\r\n # Creating necessary list\r\n x_train_list = []\r\n\r\n # Reading in input data\r\n for image in slice_list:\r\n\r\n # Fetching arrays\r\n image_array = sitk.GetArrayFromImage(image)\r\n\r\n # Resizing\r\n image_array.resize((512, 512, 512))\r\n\r\n for z in range(image_array.shape[2]):\r\n x_train_list.append(image_array[:, :, z].flatten())\r\n x_train = np.asarray(x_train_list, dtype=np.uint8)\r\n\r\n # Reading in training labels\r\n y_train = None\r\n for i in range(0, len(vector_list)):\r\n if i == 0:\r\n y_train = vector_list[i]\r\n else:\r\n y_train = np.concatenate([y_train, vector_list[i]])\r\n\r\n # Train classifier\r\n trained_forest = RandomForestClassifier(n_estimators=150)\r\n trained_forest.fit(x_train, y_train)\r\n\r\n return trained_forest\r\n\r\n\r\n# # Classifier Function # #\r\n# --------------------------- #\r\n# Utilizes a trained random forest classifier by reading CT image and prints\r\n# which slice has the highest probability of containing the pubic symphysis.\r\n# --------------------------- #\r\n\r\n# --- Input --- #\r\n# ct_image : List of 2D axial slice images [list]\r\n# classifier : Trained random forest classifier [sklearn.ensemble.forest.RandomForestClassifier]\r\n\r\n# --- Output --- #\r\n# None\r\n\r\ndef slice_probability(ct_image, classifier):\r\n # Creating necessary lists\r\n test_list = []\r\n max_list = []\r\n\r\n # Convert image to numpy array & resize\r\n im_array = sitk.GetArrayFromImage(ct_image)\r\n im_array.resize((512, 512, 512))\r\n\r\n for z in range(im_array.shape[2]):\r\n test_list.append(im_array[:, :, z].flatten())\r\n test_array = np.asarray(test_list, dtype=np.uint8)\r\n\r\n # Predict probabilities for each slice\r\n probabilities = classifier.predict_proba(test_array)\r\n\r\n # Fetching array with maximum probabilities\r\n max = np.amax(probabilities, axis=0)[1]\r\n\r\n for i, prob in enumerate(probabilities):\r\n if prob[1] == max:\r\n max_list.append(i)\r\n\r\n # Print result to user\r\n if len(max_list) == 1:\r\n print(\"Slice {} has highest probability which is: {}\".format(max_list[0], max))\r\n else:\r\n print(\"Slices {} have the highest probability which is: {}\".format(max_list, max))\r\n\r\n return None\r\n",
"import SimpleITK as sitk\nimport numpy as np\nfrom sklearn.ensemble import RandomForestClassifier\n\n\ndef est_lin_transf(im_ref, im_mov, mov_mask=None, show_parameters=False):\n initial_transform = sitk.CenteredTransformInitializer(im_ref, im_mov,\n sitk.ScaleSkewVersor3DTransform(), sitk.\n CenteredTransformInitializerFilter.MOMENTS)\n lin_transformation = sitk.ImageRegistrationMethod()\n lin_transformation.SetMetricAsMeanSquares()\n lin_transformation.SetMetricSamplingStrategy(lin_transformation.RANDOM)\n lin_transformation.SetMetricSamplingPercentage(0.01)\n if mov_mask:\n lin_transformation.SetMetricMovingMask(mov_mask)\n lin_transformation.SetOptimizerAsGradientDescent(learningRate=1,\n numberOfIterations=400, convergenceMinimumValue=1e-06,\n convergenceWindowSize=10)\n lin_transformation.SetOptimizerScalesFromPhysicalShift()\n lin_transformation.SetInitialTransform(initial_transform)\n lin_xfm = lin_transformation\n if show_parameters:\n print(lin_xfm)\n return lin_xfm\n\n\ndef est_nl_transf(im_ref, fixed_mask=None, show_parameters=False):\n reg_method = sitk.ImageRegistrationMethod()\n transform_to_displacement_field_filter = (sitk.\n TransformToDisplacementFieldFilter())\n transform_to_displacement_field_filter.SetReferenceImage(im_ref)\n initial_transform = sitk.DisplacementFieldTransform(\n transform_to_displacement_field_filter.Execute(sitk.Transform()))\n initial_transform.SetSmoothingGaussianOnUpdate(varianceForUpdateField=0,\n varianceForTotalField=1.5)\n reg_method.SetInitialTransform(initial_transform)\n reg_method.SetMetricAsDemons(intensityDifferenceThreshold=0.001)\n if fixed_mask is not None:\n reg_method.SetMetricFixedMask(fixed_mask)\n reg_method.SetInterpolator(sitk.sitkLinear)\n reg_method.SetOptimizerAsGradientDescent(learningRate=1.0,\n numberOfIterations=10, convergenceMinimumValue=1e-06,\n convergenceWindowSize=10)\n reg_method.SetOptimizerScalesFromPhysicalShift()\n nl_xfm = reg_method\n if show_parameters:\n print(nl_xfm)\n return nl_xfm\n\n\ndef apply_transf(im_ref, im_mov, trafo, show_parameters=False):\n transf = trafo.Execute(sitk.Cast(im_ref, sitk.sitkFloat32), sitk.Cast(\n im_mov, sitk.sitkFloat32))\n if show_parameters:\n print(transf)\n print('--------')\n print('Optimizer stop condition: {0}'.format(trafo.\n GetOptimizerStopConditionDescription()))\n print('Number of iterations: {0}'.format(trafo.GetOptimizerIteration())\n )\n print('--------')\n return transf\n\n\ndef seg_atlas(common_img, ct_list, seg_list):\n seg = []\n image_list = []\n for i in range(len(ct_list)):\n trafo_settings = est_lin_transf(common_img, ct_list[i], mov_mask=\n seg_list[i], show_parameters=False)\n final_trafo = apply_transf(common_img, ct_list[i], trafo_settings)\n resampler = sitk.ResampleImageFilter()\n resampler.SetReferenceImage(common_img)\n resampler.SetInterpolator(sitk.sitkLinear)\n resampler.SetTransform(final_trafo)\n resampled_mask = resampler.Execute(seg_list[i])\n resampled_mask_data = sitk.GetArrayFromImage(resampled_mask)\n seg.append(resampled_mask_data)\n for i in range(len(seg)):\n for j in range(i + 1, len(seg)):\n arr1 = np.transpose(np.nonzero(seg[i]))\n arr2 = np.transpose(np.nonzero(seg[j]))\n arr1list = [tuple(e) for e in arr1.tolist()]\n arr2list = [tuple(e) for e in arr2.tolist()]\n arr1list.sort()\n arr2list.sort()\n intersections = list(set(arr1list).intersection(arr2list))\n intersections.sort()\n image_list.append(intersections)\n intersection_list = list(set(image_list[0]) | set(image_list[1]) | set(\n image_list[2]))\n intersection_list.sort()\n image_array = sitk.GetArrayFromImage(common_img)\n segmented_array = np.zeros(shape=image_array.shape, dtype=np.uint8)\n for x, y, z in intersection_list:\n segmented_array[x, y, z] = 1\n return segmented_array\n\n\ndef distances(mask_img, seg_img):\n hausdorff = sitk.HausdorffDistanceImageFilter()\n overlap = sitk.LabelOverlapMeasuresImageFilter()\n hausdorff.Execute(mask_img, seg_img)\n overlap.Execute(mask_img, seg_img)\n jaccard = overlap.GetJaccardCoefficient()\n dice = overlap.GetDiceCoefficient()\n hausdorff_distance = hausdorff.GetHausdorffDistance()\n print('The Hausdorff distance: {}'.format(hausdorff_distance))\n print('The Dice coefficient: {}'.format(dice))\n print('The Jaccard coefficient: {}'.format(jaccard))\n return None\n\n\ndef train_classifier(slice_list, vector_list):\n x_train_list = []\n for image in slice_list:\n image_array = sitk.GetArrayFromImage(image)\n image_array.resize((512, 512, 512))\n for z in range(image_array.shape[2]):\n x_train_list.append(image_array[:, :, z].flatten())\n x_train = np.asarray(x_train_list, dtype=np.uint8)\n y_train = None\n for i in range(0, len(vector_list)):\n if i == 0:\n y_train = vector_list[i]\n else:\n y_train = np.concatenate([y_train, vector_list[i]])\n trained_forest = RandomForestClassifier(n_estimators=150)\n trained_forest.fit(x_train, y_train)\n return trained_forest\n\n\ndef slice_probability(ct_image, classifier):\n test_list = []\n max_list = []\n im_array = sitk.GetArrayFromImage(ct_image)\n im_array.resize((512, 512, 512))\n for z in range(im_array.shape[2]):\n test_list.append(im_array[:, :, z].flatten())\n test_array = np.asarray(test_list, dtype=np.uint8)\n probabilities = classifier.predict_proba(test_array)\n max = np.amax(probabilities, axis=0)[1]\n for i, prob in enumerate(probabilities):\n if prob[1] == max:\n max_list.append(i)\n if len(max_list) == 1:\n print('Slice {} has highest probability which is: {}'.format(\n max_list[0], max))\n else:\n print('Slices {} have the highest probability which is: {}'.format(\n max_list, max))\n return None\n",
"<import token>\n\n\ndef est_lin_transf(im_ref, im_mov, mov_mask=None, show_parameters=False):\n initial_transform = sitk.CenteredTransformInitializer(im_ref, im_mov,\n sitk.ScaleSkewVersor3DTransform(), sitk.\n CenteredTransformInitializerFilter.MOMENTS)\n lin_transformation = sitk.ImageRegistrationMethod()\n lin_transformation.SetMetricAsMeanSquares()\n lin_transformation.SetMetricSamplingStrategy(lin_transformation.RANDOM)\n lin_transformation.SetMetricSamplingPercentage(0.01)\n if mov_mask:\n lin_transformation.SetMetricMovingMask(mov_mask)\n lin_transformation.SetOptimizerAsGradientDescent(learningRate=1,\n numberOfIterations=400, convergenceMinimumValue=1e-06,\n convergenceWindowSize=10)\n lin_transformation.SetOptimizerScalesFromPhysicalShift()\n lin_transformation.SetInitialTransform(initial_transform)\n lin_xfm = lin_transformation\n if show_parameters:\n print(lin_xfm)\n return lin_xfm\n\n\ndef est_nl_transf(im_ref, fixed_mask=None, show_parameters=False):\n reg_method = sitk.ImageRegistrationMethod()\n transform_to_displacement_field_filter = (sitk.\n TransformToDisplacementFieldFilter())\n transform_to_displacement_field_filter.SetReferenceImage(im_ref)\n initial_transform = sitk.DisplacementFieldTransform(\n transform_to_displacement_field_filter.Execute(sitk.Transform()))\n initial_transform.SetSmoothingGaussianOnUpdate(varianceForUpdateField=0,\n varianceForTotalField=1.5)\n reg_method.SetInitialTransform(initial_transform)\n reg_method.SetMetricAsDemons(intensityDifferenceThreshold=0.001)\n if fixed_mask is not None:\n reg_method.SetMetricFixedMask(fixed_mask)\n reg_method.SetInterpolator(sitk.sitkLinear)\n reg_method.SetOptimizerAsGradientDescent(learningRate=1.0,\n numberOfIterations=10, convergenceMinimumValue=1e-06,\n convergenceWindowSize=10)\n reg_method.SetOptimizerScalesFromPhysicalShift()\n nl_xfm = reg_method\n if show_parameters:\n print(nl_xfm)\n return nl_xfm\n\n\ndef apply_transf(im_ref, im_mov, trafo, show_parameters=False):\n transf = trafo.Execute(sitk.Cast(im_ref, sitk.sitkFloat32), sitk.Cast(\n im_mov, sitk.sitkFloat32))\n if show_parameters:\n print(transf)\n print('--------')\n print('Optimizer stop condition: {0}'.format(trafo.\n GetOptimizerStopConditionDescription()))\n print('Number of iterations: {0}'.format(trafo.GetOptimizerIteration())\n )\n print('--------')\n return transf\n\n\ndef seg_atlas(common_img, ct_list, seg_list):\n seg = []\n image_list = []\n for i in range(len(ct_list)):\n trafo_settings = est_lin_transf(common_img, ct_list[i], mov_mask=\n seg_list[i], show_parameters=False)\n final_trafo = apply_transf(common_img, ct_list[i], trafo_settings)\n resampler = sitk.ResampleImageFilter()\n resampler.SetReferenceImage(common_img)\n resampler.SetInterpolator(sitk.sitkLinear)\n resampler.SetTransform(final_trafo)\n resampled_mask = resampler.Execute(seg_list[i])\n resampled_mask_data = sitk.GetArrayFromImage(resampled_mask)\n seg.append(resampled_mask_data)\n for i in range(len(seg)):\n for j in range(i + 1, len(seg)):\n arr1 = np.transpose(np.nonzero(seg[i]))\n arr2 = np.transpose(np.nonzero(seg[j]))\n arr1list = [tuple(e) for e in arr1.tolist()]\n arr2list = [tuple(e) for e in arr2.tolist()]\n arr1list.sort()\n arr2list.sort()\n intersections = list(set(arr1list).intersection(arr2list))\n intersections.sort()\n image_list.append(intersections)\n intersection_list = list(set(image_list[0]) | set(image_list[1]) | set(\n image_list[2]))\n intersection_list.sort()\n image_array = sitk.GetArrayFromImage(common_img)\n segmented_array = np.zeros(shape=image_array.shape, dtype=np.uint8)\n for x, y, z in intersection_list:\n segmented_array[x, y, z] = 1\n return segmented_array\n\n\ndef distances(mask_img, seg_img):\n hausdorff = sitk.HausdorffDistanceImageFilter()\n overlap = sitk.LabelOverlapMeasuresImageFilter()\n hausdorff.Execute(mask_img, seg_img)\n overlap.Execute(mask_img, seg_img)\n jaccard = overlap.GetJaccardCoefficient()\n dice = overlap.GetDiceCoefficient()\n hausdorff_distance = hausdorff.GetHausdorffDistance()\n print('The Hausdorff distance: {}'.format(hausdorff_distance))\n print('The Dice coefficient: {}'.format(dice))\n print('The Jaccard coefficient: {}'.format(jaccard))\n return None\n\n\ndef train_classifier(slice_list, vector_list):\n x_train_list = []\n for image in slice_list:\n image_array = sitk.GetArrayFromImage(image)\n image_array.resize((512, 512, 512))\n for z in range(image_array.shape[2]):\n x_train_list.append(image_array[:, :, z].flatten())\n x_train = np.asarray(x_train_list, dtype=np.uint8)\n y_train = None\n for i in range(0, len(vector_list)):\n if i == 0:\n y_train = vector_list[i]\n else:\n y_train = np.concatenate([y_train, vector_list[i]])\n trained_forest = RandomForestClassifier(n_estimators=150)\n trained_forest.fit(x_train, y_train)\n return trained_forest\n\n\ndef slice_probability(ct_image, classifier):\n test_list = []\n max_list = []\n im_array = sitk.GetArrayFromImage(ct_image)\n im_array.resize((512, 512, 512))\n for z in range(im_array.shape[2]):\n test_list.append(im_array[:, :, z].flatten())\n test_array = np.asarray(test_list, dtype=np.uint8)\n probabilities = classifier.predict_proba(test_array)\n max = np.amax(probabilities, axis=0)[1]\n for i, prob in enumerate(probabilities):\n if prob[1] == max:\n max_list.append(i)\n if len(max_list) == 1:\n print('Slice {} has highest probability which is: {}'.format(\n max_list[0], max))\n else:\n print('Slices {} have the highest probability which is: {}'.format(\n max_list, max))\n return None\n",
"<import token>\n\n\ndef est_lin_transf(im_ref, im_mov, mov_mask=None, show_parameters=False):\n initial_transform = sitk.CenteredTransformInitializer(im_ref, im_mov,\n sitk.ScaleSkewVersor3DTransform(), sitk.\n CenteredTransformInitializerFilter.MOMENTS)\n lin_transformation = sitk.ImageRegistrationMethod()\n lin_transformation.SetMetricAsMeanSquares()\n lin_transformation.SetMetricSamplingStrategy(lin_transformation.RANDOM)\n lin_transformation.SetMetricSamplingPercentage(0.01)\n if mov_mask:\n lin_transformation.SetMetricMovingMask(mov_mask)\n lin_transformation.SetOptimizerAsGradientDescent(learningRate=1,\n numberOfIterations=400, convergenceMinimumValue=1e-06,\n convergenceWindowSize=10)\n lin_transformation.SetOptimizerScalesFromPhysicalShift()\n lin_transformation.SetInitialTransform(initial_transform)\n lin_xfm = lin_transformation\n if show_parameters:\n print(lin_xfm)\n return lin_xfm\n\n\n<function token>\n\n\ndef apply_transf(im_ref, im_mov, trafo, show_parameters=False):\n transf = trafo.Execute(sitk.Cast(im_ref, sitk.sitkFloat32), sitk.Cast(\n im_mov, sitk.sitkFloat32))\n if show_parameters:\n print(transf)\n print('--------')\n print('Optimizer stop condition: {0}'.format(trafo.\n GetOptimizerStopConditionDescription()))\n print('Number of iterations: {0}'.format(trafo.GetOptimizerIteration())\n )\n print('--------')\n return transf\n\n\ndef seg_atlas(common_img, ct_list, seg_list):\n seg = []\n image_list = []\n for i in range(len(ct_list)):\n trafo_settings = est_lin_transf(common_img, ct_list[i], mov_mask=\n seg_list[i], show_parameters=False)\n final_trafo = apply_transf(common_img, ct_list[i], trafo_settings)\n resampler = sitk.ResampleImageFilter()\n resampler.SetReferenceImage(common_img)\n resampler.SetInterpolator(sitk.sitkLinear)\n resampler.SetTransform(final_trafo)\n resampled_mask = resampler.Execute(seg_list[i])\n resampled_mask_data = sitk.GetArrayFromImage(resampled_mask)\n seg.append(resampled_mask_data)\n for i in range(len(seg)):\n for j in range(i + 1, len(seg)):\n arr1 = np.transpose(np.nonzero(seg[i]))\n arr2 = np.transpose(np.nonzero(seg[j]))\n arr1list = [tuple(e) for e in arr1.tolist()]\n arr2list = [tuple(e) for e in arr2.tolist()]\n arr1list.sort()\n arr2list.sort()\n intersections = list(set(arr1list).intersection(arr2list))\n intersections.sort()\n image_list.append(intersections)\n intersection_list = list(set(image_list[0]) | set(image_list[1]) | set(\n image_list[2]))\n intersection_list.sort()\n image_array = sitk.GetArrayFromImage(common_img)\n segmented_array = np.zeros(shape=image_array.shape, dtype=np.uint8)\n for x, y, z in intersection_list:\n segmented_array[x, y, z] = 1\n return segmented_array\n\n\ndef distances(mask_img, seg_img):\n hausdorff = sitk.HausdorffDistanceImageFilter()\n overlap = sitk.LabelOverlapMeasuresImageFilter()\n hausdorff.Execute(mask_img, seg_img)\n overlap.Execute(mask_img, seg_img)\n jaccard = overlap.GetJaccardCoefficient()\n dice = overlap.GetDiceCoefficient()\n hausdorff_distance = hausdorff.GetHausdorffDistance()\n print('The Hausdorff distance: {}'.format(hausdorff_distance))\n print('The Dice coefficient: {}'.format(dice))\n print('The Jaccard coefficient: {}'.format(jaccard))\n return None\n\n\ndef train_classifier(slice_list, vector_list):\n x_train_list = []\n for image in slice_list:\n image_array = sitk.GetArrayFromImage(image)\n image_array.resize((512, 512, 512))\n for z in range(image_array.shape[2]):\n x_train_list.append(image_array[:, :, z].flatten())\n x_train = np.asarray(x_train_list, dtype=np.uint8)\n y_train = None\n for i in range(0, len(vector_list)):\n if i == 0:\n y_train = vector_list[i]\n else:\n y_train = np.concatenate([y_train, vector_list[i]])\n trained_forest = RandomForestClassifier(n_estimators=150)\n trained_forest.fit(x_train, y_train)\n return trained_forest\n\n\ndef slice_probability(ct_image, classifier):\n test_list = []\n max_list = []\n im_array = sitk.GetArrayFromImage(ct_image)\n im_array.resize((512, 512, 512))\n for z in range(im_array.shape[2]):\n test_list.append(im_array[:, :, z].flatten())\n test_array = np.asarray(test_list, dtype=np.uint8)\n probabilities = classifier.predict_proba(test_array)\n max = np.amax(probabilities, axis=0)[1]\n for i, prob in enumerate(probabilities):\n if prob[1] == max:\n max_list.append(i)\n if len(max_list) == 1:\n print('Slice {} has highest probability which is: {}'.format(\n max_list[0], max))\n else:\n print('Slices {} have the highest probability which is: {}'.format(\n max_list, max))\n return None\n",
"<import token>\n\n\ndef est_lin_transf(im_ref, im_mov, mov_mask=None, show_parameters=False):\n initial_transform = sitk.CenteredTransformInitializer(im_ref, im_mov,\n sitk.ScaleSkewVersor3DTransform(), sitk.\n CenteredTransformInitializerFilter.MOMENTS)\n lin_transformation = sitk.ImageRegistrationMethod()\n lin_transformation.SetMetricAsMeanSquares()\n lin_transformation.SetMetricSamplingStrategy(lin_transformation.RANDOM)\n lin_transformation.SetMetricSamplingPercentage(0.01)\n if mov_mask:\n lin_transformation.SetMetricMovingMask(mov_mask)\n lin_transformation.SetOptimizerAsGradientDescent(learningRate=1,\n numberOfIterations=400, convergenceMinimumValue=1e-06,\n convergenceWindowSize=10)\n lin_transformation.SetOptimizerScalesFromPhysicalShift()\n lin_transformation.SetInitialTransform(initial_transform)\n lin_xfm = lin_transformation\n if show_parameters:\n print(lin_xfm)\n return lin_xfm\n\n\n<function token>\n\n\ndef apply_transf(im_ref, im_mov, trafo, show_parameters=False):\n transf = trafo.Execute(sitk.Cast(im_ref, sitk.sitkFloat32), sitk.Cast(\n im_mov, sitk.sitkFloat32))\n if show_parameters:\n print(transf)\n print('--------')\n print('Optimizer stop condition: {0}'.format(trafo.\n GetOptimizerStopConditionDescription()))\n print('Number of iterations: {0}'.format(trafo.GetOptimizerIteration())\n )\n print('--------')\n return transf\n\n\n<function token>\n\n\ndef distances(mask_img, seg_img):\n hausdorff = sitk.HausdorffDistanceImageFilter()\n overlap = sitk.LabelOverlapMeasuresImageFilter()\n hausdorff.Execute(mask_img, seg_img)\n overlap.Execute(mask_img, seg_img)\n jaccard = overlap.GetJaccardCoefficient()\n dice = overlap.GetDiceCoefficient()\n hausdorff_distance = hausdorff.GetHausdorffDistance()\n print('The Hausdorff distance: {}'.format(hausdorff_distance))\n print('The Dice coefficient: {}'.format(dice))\n print('The Jaccard coefficient: {}'.format(jaccard))\n return None\n\n\ndef train_classifier(slice_list, vector_list):\n x_train_list = []\n for image in slice_list:\n image_array = sitk.GetArrayFromImage(image)\n image_array.resize((512, 512, 512))\n for z in range(image_array.shape[2]):\n x_train_list.append(image_array[:, :, z].flatten())\n x_train = np.asarray(x_train_list, dtype=np.uint8)\n y_train = None\n for i in range(0, len(vector_list)):\n if i == 0:\n y_train = vector_list[i]\n else:\n y_train = np.concatenate([y_train, vector_list[i]])\n trained_forest = RandomForestClassifier(n_estimators=150)\n trained_forest.fit(x_train, y_train)\n return trained_forest\n\n\ndef slice_probability(ct_image, classifier):\n test_list = []\n max_list = []\n im_array = sitk.GetArrayFromImage(ct_image)\n im_array.resize((512, 512, 512))\n for z in range(im_array.shape[2]):\n test_list.append(im_array[:, :, z].flatten())\n test_array = np.asarray(test_list, dtype=np.uint8)\n probabilities = classifier.predict_proba(test_array)\n max = np.amax(probabilities, axis=0)[1]\n for i, prob in enumerate(probabilities):\n if prob[1] == max:\n max_list.append(i)\n if len(max_list) == 1:\n print('Slice {} has highest probability which is: {}'.format(\n max_list[0], max))\n else:\n print('Slices {} have the highest probability which is: {}'.format(\n max_list, max))\n return None\n",
"<import token>\n\n\ndef est_lin_transf(im_ref, im_mov, mov_mask=None, show_parameters=False):\n initial_transform = sitk.CenteredTransformInitializer(im_ref, im_mov,\n sitk.ScaleSkewVersor3DTransform(), sitk.\n CenteredTransformInitializerFilter.MOMENTS)\n lin_transformation = sitk.ImageRegistrationMethod()\n lin_transformation.SetMetricAsMeanSquares()\n lin_transformation.SetMetricSamplingStrategy(lin_transformation.RANDOM)\n lin_transformation.SetMetricSamplingPercentage(0.01)\n if mov_mask:\n lin_transformation.SetMetricMovingMask(mov_mask)\n lin_transformation.SetOptimizerAsGradientDescent(learningRate=1,\n numberOfIterations=400, convergenceMinimumValue=1e-06,\n convergenceWindowSize=10)\n lin_transformation.SetOptimizerScalesFromPhysicalShift()\n lin_transformation.SetInitialTransform(initial_transform)\n lin_xfm = lin_transformation\n if show_parameters:\n print(lin_xfm)\n return lin_xfm\n\n\n<function token>\n<function token>\n<function token>\n\n\ndef distances(mask_img, seg_img):\n hausdorff = sitk.HausdorffDistanceImageFilter()\n overlap = sitk.LabelOverlapMeasuresImageFilter()\n hausdorff.Execute(mask_img, seg_img)\n overlap.Execute(mask_img, seg_img)\n jaccard = overlap.GetJaccardCoefficient()\n dice = overlap.GetDiceCoefficient()\n hausdorff_distance = hausdorff.GetHausdorffDistance()\n print('The Hausdorff distance: {}'.format(hausdorff_distance))\n print('The Dice coefficient: {}'.format(dice))\n print('The Jaccard coefficient: {}'.format(jaccard))\n return None\n\n\ndef train_classifier(slice_list, vector_list):\n x_train_list = []\n for image in slice_list:\n image_array = sitk.GetArrayFromImage(image)\n image_array.resize((512, 512, 512))\n for z in range(image_array.shape[2]):\n x_train_list.append(image_array[:, :, z].flatten())\n x_train = np.asarray(x_train_list, dtype=np.uint8)\n y_train = None\n for i in range(0, len(vector_list)):\n if i == 0:\n y_train = vector_list[i]\n else:\n y_train = np.concatenate([y_train, vector_list[i]])\n trained_forest = RandomForestClassifier(n_estimators=150)\n trained_forest.fit(x_train, y_train)\n return trained_forest\n\n\ndef slice_probability(ct_image, classifier):\n test_list = []\n max_list = []\n im_array = sitk.GetArrayFromImage(ct_image)\n im_array.resize((512, 512, 512))\n for z in range(im_array.shape[2]):\n test_list.append(im_array[:, :, z].flatten())\n test_array = np.asarray(test_list, dtype=np.uint8)\n probabilities = classifier.predict_proba(test_array)\n max = np.amax(probabilities, axis=0)[1]\n for i, prob in enumerate(probabilities):\n if prob[1] == max:\n max_list.append(i)\n if len(max_list) == 1:\n print('Slice {} has highest probability which is: {}'.format(\n max_list[0], max))\n else:\n print('Slices {} have the highest probability which is: {}'.format(\n max_list, max))\n return None\n",
"<import token>\n\n\ndef est_lin_transf(im_ref, im_mov, mov_mask=None, show_parameters=False):\n initial_transform = sitk.CenteredTransformInitializer(im_ref, im_mov,\n sitk.ScaleSkewVersor3DTransform(), sitk.\n CenteredTransformInitializerFilter.MOMENTS)\n lin_transformation = sitk.ImageRegistrationMethod()\n lin_transformation.SetMetricAsMeanSquares()\n lin_transformation.SetMetricSamplingStrategy(lin_transformation.RANDOM)\n lin_transformation.SetMetricSamplingPercentage(0.01)\n if mov_mask:\n lin_transformation.SetMetricMovingMask(mov_mask)\n lin_transformation.SetOptimizerAsGradientDescent(learningRate=1,\n numberOfIterations=400, convergenceMinimumValue=1e-06,\n convergenceWindowSize=10)\n lin_transformation.SetOptimizerScalesFromPhysicalShift()\n lin_transformation.SetInitialTransform(initial_transform)\n lin_xfm = lin_transformation\n if show_parameters:\n print(lin_xfm)\n return lin_xfm\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n\n\ndef train_classifier(slice_list, vector_list):\n x_train_list = []\n for image in slice_list:\n image_array = sitk.GetArrayFromImage(image)\n image_array.resize((512, 512, 512))\n for z in range(image_array.shape[2]):\n x_train_list.append(image_array[:, :, z].flatten())\n x_train = np.asarray(x_train_list, dtype=np.uint8)\n y_train = None\n for i in range(0, len(vector_list)):\n if i == 0:\n y_train = vector_list[i]\n else:\n y_train = np.concatenate([y_train, vector_list[i]])\n trained_forest = RandomForestClassifier(n_estimators=150)\n trained_forest.fit(x_train, y_train)\n return trained_forest\n\n\ndef slice_probability(ct_image, classifier):\n test_list = []\n max_list = []\n im_array = sitk.GetArrayFromImage(ct_image)\n im_array.resize((512, 512, 512))\n for z in range(im_array.shape[2]):\n test_list.append(im_array[:, :, z].flatten())\n test_array = np.asarray(test_list, dtype=np.uint8)\n probabilities = classifier.predict_proba(test_array)\n max = np.amax(probabilities, axis=0)[1]\n for i, prob in enumerate(probabilities):\n if prob[1] == max:\n max_list.append(i)\n if len(max_list) == 1:\n print('Slice {} has highest probability which is: {}'.format(\n max_list[0], max))\n else:\n print('Slices {} have the highest probability which is: {}'.format(\n max_list, max))\n return None\n",
"<import token>\n\n\ndef est_lin_transf(im_ref, im_mov, mov_mask=None, show_parameters=False):\n initial_transform = sitk.CenteredTransformInitializer(im_ref, im_mov,\n sitk.ScaleSkewVersor3DTransform(), sitk.\n CenteredTransformInitializerFilter.MOMENTS)\n lin_transformation = sitk.ImageRegistrationMethod()\n lin_transformation.SetMetricAsMeanSquares()\n lin_transformation.SetMetricSamplingStrategy(lin_transformation.RANDOM)\n lin_transformation.SetMetricSamplingPercentage(0.01)\n if mov_mask:\n lin_transformation.SetMetricMovingMask(mov_mask)\n lin_transformation.SetOptimizerAsGradientDescent(learningRate=1,\n numberOfIterations=400, convergenceMinimumValue=1e-06,\n convergenceWindowSize=10)\n lin_transformation.SetOptimizerScalesFromPhysicalShift()\n lin_transformation.SetInitialTransform(initial_transform)\n lin_xfm = lin_transformation\n if show_parameters:\n print(lin_xfm)\n return lin_xfm\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\ndef slice_probability(ct_image, classifier):\n test_list = []\n max_list = []\n im_array = sitk.GetArrayFromImage(ct_image)\n im_array.resize((512, 512, 512))\n for z in range(im_array.shape[2]):\n test_list.append(im_array[:, :, z].flatten())\n test_array = np.asarray(test_list, dtype=np.uint8)\n probabilities = classifier.predict_proba(test_array)\n max = np.amax(probabilities, axis=0)[1]\n for i, prob in enumerate(probabilities):\n if prob[1] == max:\n max_list.append(i)\n if len(max_list) == 1:\n print('Slice {} has highest probability which is: {}'.format(\n max_list[0], max))\n else:\n print('Slices {} have the highest probability which is: {}'.format(\n max_list, max))\n return None\n",
"<import token>\n\n\ndef est_lin_transf(im_ref, im_mov, mov_mask=None, show_parameters=False):\n initial_transform = sitk.CenteredTransformInitializer(im_ref, im_mov,\n sitk.ScaleSkewVersor3DTransform(), sitk.\n CenteredTransformInitializerFilter.MOMENTS)\n lin_transformation = sitk.ImageRegistrationMethod()\n lin_transformation.SetMetricAsMeanSquares()\n lin_transformation.SetMetricSamplingStrategy(lin_transformation.RANDOM)\n lin_transformation.SetMetricSamplingPercentage(0.01)\n if mov_mask:\n lin_transformation.SetMetricMovingMask(mov_mask)\n lin_transformation.SetOptimizerAsGradientDescent(learningRate=1,\n numberOfIterations=400, convergenceMinimumValue=1e-06,\n convergenceWindowSize=10)\n lin_transformation.SetOptimizerScalesFromPhysicalShift()\n lin_transformation.SetInitialTransform(initial_transform)\n lin_xfm = lin_transformation\n if show_parameters:\n print(lin_xfm)\n return lin_xfm\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n",
"<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n"
] | false |
822 |
1406b2ab78b52823a8f455c8e2719f6bd84bd168
|
# -*- coding: utf-8 -*-
"""MicroPython rotary encoder library."""
from machine import Pin
ENC_STATES = (0, -1, 1, 0, 1, 0, 0, -1, -1, 0, 0, 1, 0, 1, -1, 0)
class Encoder(object):
def __init__(self, pin_x='P4', pin_y='P5', pin_mode=Pin.PULL_UP,
scale=1, min=0, max=100, reverse=False):
self.pin_x = (pin_x if isinstance(pin_x, Pin) else
Pin(pin_x, mode=Pin.IN, pull=pin_mode))
self.pin_y = (pin_y if isinstance(pin_y, Pin) else
Pin(pin_y, mode=Pin.IN, pull=pin_mode))
self.pin_mode = pin_mode
self.scale = scale
self.min = min
self.max = max
self.reverse = 1 if reverse else -1
# The following variables are assigned to in the interrupt callback,
# so we have to allocate them here.
self._pos = -1
self._readings = 0
self._state = 0
self.set_callbacks(self._callback)
def _callback(self, line):
self._readings = (self._readings << 2 | self.pin_x.value() << 1 |
self.pin_y.value()) & 0x0f
self._state = ENC_STATES[self._readings] * self.reverse
if self._state:
self._pos = min(max(self.min, self._pos + self._state), self.max)
def set_callbacks(self, callback=None):
self.irq_x = self.pin_x.callback(
trigger=Pin.IRQ_FALLING | Pin.IRQ_RISING, handler=callback)
self.irq_y = self.pin_y.callback(
trigger=Pin.IRQ_FALLING | Pin.IRQ_RISING, handler=callback)
def position(self):
return self._pos * self.scale
def reset(self):
self._pos = 0
def setMax(self, Max):
self.max = Max
def setMin(self, Min):
self.min = Min
def setScale(self, Scale):
self.scale = Scale
|
[
"# -*- coding: utf-8 -*-\n\"\"\"MicroPython rotary encoder library.\"\"\"\n\nfrom machine import Pin\n\n\nENC_STATES = (0, -1, 1, 0, 1, 0, 0, -1, -1, 0, 0, 1, 0, 1, -1, 0)\n\n\nclass Encoder(object):\n def __init__(self, pin_x='P4', pin_y='P5', pin_mode=Pin.PULL_UP,\n scale=1, min=0, max=100, reverse=False):\n self.pin_x = (pin_x if isinstance(pin_x, Pin) else\n Pin(pin_x, mode=Pin.IN, pull=pin_mode))\n self.pin_y = (pin_y if isinstance(pin_y, Pin) else\n Pin(pin_y, mode=Pin.IN, pull=pin_mode))\n\n self.pin_mode = pin_mode\n self.scale = scale\n self.min = min\n self.max = max\n self.reverse = 1 if reverse else -1\n\n # The following variables are assigned to in the interrupt callback,\n # so we have to allocate them here.\n self._pos = -1\n self._readings = 0\n self._state = 0\n\n self.set_callbacks(self._callback)\n\n def _callback(self, line):\n self._readings = (self._readings << 2 | self.pin_x.value() << 1 |\n self.pin_y.value()) & 0x0f\n\n self._state = ENC_STATES[self._readings] * self.reverse\n if self._state:\n self._pos = min(max(self.min, self._pos + self._state), self.max)\n\n def set_callbacks(self, callback=None):\n self.irq_x = self.pin_x.callback(\n trigger=Pin.IRQ_FALLING | Pin.IRQ_RISING, handler=callback)\n self.irq_y = self.pin_y.callback(\n trigger=Pin.IRQ_FALLING | Pin.IRQ_RISING, handler=callback)\n\n def position(self):\n return self._pos * self.scale\n\n def reset(self):\n self._pos = 0\n\n def setMax(self, Max):\n self.max = Max\n\n def setMin(self, Min):\n self.min = Min\n\n def setScale(self, Scale):\n self.scale = Scale\n",
"<docstring token>\nfrom machine import Pin\nENC_STATES = 0, -1, 1, 0, 1, 0, 0, -1, -1, 0, 0, 1, 0, 1, -1, 0\n\n\nclass Encoder(object):\n\n def __init__(self, pin_x='P4', pin_y='P5', pin_mode=Pin.PULL_UP, scale=\n 1, min=0, max=100, reverse=False):\n self.pin_x = pin_x if isinstance(pin_x, Pin) else Pin(pin_x, mode=\n Pin.IN, pull=pin_mode)\n self.pin_y = pin_y if isinstance(pin_y, Pin) else Pin(pin_y, mode=\n Pin.IN, pull=pin_mode)\n self.pin_mode = pin_mode\n self.scale = scale\n self.min = min\n self.max = max\n self.reverse = 1 if reverse else -1\n self._pos = -1\n self._readings = 0\n self._state = 0\n self.set_callbacks(self._callback)\n\n def _callback(self, line):\n self._readings = (self._readings << 2 | self.pin_x.value() << 1 |\n self.pin_y.value()) & 15\n self._state = ENC_STATES[self._readings] * self.reverse\n if self._state:\n self._pos = min(max(self.min, self._pos + self._state), self.max)\n\n def set_callbacks(self, callback=None):\n self.irq_x = self.pin_x.callback(trigger=Pin.IRQ_FALLING | Pin.\n IRQ_RISING, handler=callback)\n self.irq_y = self.pin_y.callback(trigger=Pin.IRQ_FALLING | Pin.\n IRQ_RISING, handler=callback)\n\n def position(self):\n return self._pos * self.scale\n\n def reset(self):\n self._pos = 0\n\n def setMax(self, Max):\n self.max = Max\n\n def setMin(self, Min):\n self.min = Min\n\n def setScale(self, Scale):\n self.scale = Scale\n",
"<docstring token>\n<import token>\nENC_STATES = 0, -1, 1, 0, 1, 0, 0, -1, -1, 0, 0, 1, 0, 1, -1, 0\n\n\nclass Encoder(object):\n\n def __init__(self, pin_x='P4', pin_y='P5', pin_mode=Pin.PULL_UP, scale=\n 1, min=0, max=100, reverse=False):\n self.pin_x = pin_x if isinstance(pin_x, Pin) else Pin(pin_x, mode=\n Pin.IN, pull=pin_mode)\n self.pin_y = pin_y if isinstance(pin_y, Pin) else Pin(pin_y, mode=\n Pin.IN, pull=pin_mode)\n self.pin_mode = pin_mode\n self.scale = scale\n self.min = min\n self.max = max\n self.reverse = 1 if reverse else -1\n self._pos = -1\n self._readings = 0\n self._state = 0\n self.set_callbacks(self._callback)\n\n def _callback(self, line):\n self._readings = (self._readings << 2 | self.pin_x.value() << 1 |\n self.pin_y.value()) & 15\n self._state = ENC_STATES[self._readings] * self.reverse\n if self._state:\n self._pos = min(max(self.min, self._pos + self._state), self.max)\n\n def set_callbacks(self, callback=None):\n self.irq_x = self.pin_x.callback(trigger=Pin.IRQ_FALLING | Pin.\n IRQ_RISING, handler=callback)\n self.irq_y = self.pin_y.callback(trigger=Pin.IRQ_FALLING | Pin.\n IRQ_RISING, handler=callback)\n\n def position(self):\n return self._pos * self.scale\n\n def reset(self):\n self._pos = 0\n\n def setMax(self, Max):\n self.max = Max\n\n def setMin(self, Min):\n self.min = Min\n\n def setScale(self, Scale):\n self.scale = Scale\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\nclass Encoder(object):\n\n def __init__(self, pin_x='P4', pin_y='P5', pin_mode=Pin.PULL_UP, scale=\n 1, min=0, max=100, reverse=False):\n self.pin_x = pin_x if isinstance(pin_x, Pin) else Pin(pin_x, mode=\n Pin.IN, pull=pin_mode)\n self.pin_y = pin_y if isinstance(pin_y, Pin) else Pin(pin_y, mode=\n Pin.IN, pull=pin_mode)\n self.pin_mode = pin_mode\n self.scale = scale\n self.min = min\n self.max = max\n self.reverse = 1 if reverse else -1\n self._pos = -1\n self._readings = 0\n self._state = 0\n self.set_callbacks(self._callback)\n\n def _callback(self, line):\n self._readings = (self._readings << 2 | self.pin_x.value() << 1 |\n self.pin_y.value()) & 15\n self._state = ENC_STATES[self._readings] * self.reverse\n if self._state:\n self._pos = min(max(self.min, self._pos + self._state), self.max)\n\n def set_callbacks(self, callback=None):\n self.irq_x = self.pin_x.callback(trigger=Pin.IRQ_FALLING | Pin.\n IRQ_RISING, handler=callback)\n self.irq_y = self.pin_y.callback(trigger=Pin.IRQ_FALLING | Pin.\n IRQ_RISING, handler=callback)\n\n def position(self):\n return self._pos * self.scale\n\n def reset(self):\n self._pos = 0\n\n def setMax(self, Max):\n self.max = Max\n\n def setMin(self, Min):\n self.min = Min\n\n def setScale(self, Scale):\n self.scale = Scale\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\nclass Encoder(object):\n <function token>\n\n def _callback(self, line):\n self._readings = (self._readings << 2 | self.pin_x.value() << 1 |\n self.pin_y.value()) & 15\n self._state = ENC_STATES[self._readings] * self.reverse\n if self._state:\n self._pos = min(max(self.min, self._pos + self._state), self.max)\n\n def set_callbacks(self, callback=None):\n self.irq_x = self.pin_x.callback(trigger=Pin.IRQ_FALLING | Pin.\n IRQ_RISING, handler=callback)\n self.irq_y = self.pin_y.callback(trigger=Pin.IRQ_FALLING | Pin.\n IRQ_RISING, handler=callback)\n\n def position(self):\n return self._pos * self.scale\n\n def reset(self):\n self._pos = 0\n\n def setMax(self, Max):\n self.max = Max\n\n def setMin(self, Min):\n self.min = Min\n\n def setScale(self, Scale):\n self.scale = Scale\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\nclass Encoder(object):\n <function token>\n\n def _callback(self, line):\n self._readings = (self._readings << 2 | self.pin_x.value() << 1 |\n self.pin_y.value()) & 15\n self._state = ENC_STATES[self._readings] * self.reverse\n if self._state:\n self._pos = min(max(self.min, self._pos + self._state), self.max)\n\n def set_callbacks(self, callback=None):\n self.irq_x = self.pin_x.callback(trigger=Pin.IRQ_FALLING | Pin.\n IRQ_RISING, handler=callback)\n self.irq_y = self.pin_y.callback(trigger=Pin.IRQ_FALLING | Pin.\n IRQ_RISING, handler=callback)\n\n def position(self):\n return self._pos * self.scale\n\n def reset(self):\n self._pos = 0\n\n def setMax(self, Max):\n self.max = Max\n <function token>\n\n def setScale(self, Scale):\n self.scale = Scale\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\nclass Encoder(object):\n <function token>\n\n def _callback(self, line):\n self._readings = (self._readings << 2 | self.pin_x.value() << 1 |\n self.pin_y.value()) & 15\n self._state = ENC_STATES[self._readings] * self.reverse\n if self._state:\n self._pos = min(max(self.min, self._pos + self._state), self.max)\n\n def set_callbacks(self, callback=None):\n self.irq_x = self.pin_x.callback(trigger=Pin.IRQ_FALLING | Pin.\n IRQ_RISING, handler=callback)\n self.irq_y = self.pin_y.callback(trigger=Pin.IRQ_FALLING | Pin.\n IRQ_RISING, handler=callback)\n\n def position(self):\n return self._pos * self.scale\n\n def reset(self):\n self._pos = 0\n <function token>\n <function token>\n\n def setScale(self, Scale):\n self.scale = Scale\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\nclass Encoder(object):\n <function token>\n\n def _callback(self, line):\n self._readings = (self._readings << 2 | self.pin_x.value() << 1 |\n self.pin_y.value()) & 15\n self._state = ENC_STATES[self._readings] * self.reverse\n if self._state:\n self._pos = min(max(self.min, self._pos + self._state), self.max)\n <function token>\n\n def position(self):\n return self._pos * self.scale\n\n def reset(self):\n self._pos = 0\n <function token>\n <function token>\n\n def setScale(self, Scale):\n self.scale = Scale\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\nclass Encoder(object):\n <function token>\n\n def _callback(self, line):\n self._readings = (self._readings << 2 | self.pin_x.value() << 1 |\n self.pin_y.value()) & 15\n self._state = ENC_STATES[self._readings] * self.reverse\n if self._state:\n self._pos = min(max(self.min, self._pos + self._state), self.max)\n <function token>\n\n def position(self):\n return self._pos * self.scale\n\n def reset(self):\n self._pos = 0\n <function token>\n <function token>\n <function token>\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\nclass Encoder(object):\n <function token>\n <function token>\n <function token>\n\n def position(self):\n return self._pos * self.scale\n\n def reset(self):\n self._pos = 0\n <function token>\n <function token>\n <function token>\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\nclass Encoder(object):\n <function token>\n <function token>\n <function token>\n <function token>\n\n def reset(self):\n self._pos = 0\n <function token>\n <function token>\n <function token>\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\nclass Encoder(object):\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<docstring token>\n<import token>\n<assignment token>\n<class token>\n"
] | false |
823 |
1bab6b039462bb5762aa588d5ba7c3e74362d0a7
|
class Solution:
def minRemoveToMakeValid(self, s: str) -> str:
bracketsToRemove = set()
stack = []
for i, c in enumerate(s):
if c not in '()':
continue
if c == '(':
stack.append(i)
elif not stack:
bracketsToRemove.add(i)
else:
stack.pop()
bracketsToRemove = bracketsToRemove.union(set(stack))
stringBuilder = []
for i,c in enumerate(s):
if i not in bracketsToRemove:
stringBuilder.append(c)
return "".join(stringBuilder)
Solution().minRemoveToMakeValid('L(ee)(t(()coe')
|
[
"class Solution:\n def minRemoveToMakeValid(self, s: str) -> str:\n bracketsToRemove = set()\n stack = []\n \n for i, c in enumerate(s):\n \n if c not in '()':\n continue\n if c == '(':\n stack.append(i)\n elif not stack:\n bracketsToRemove.add(i)\n else:\n stack.pop()\n \n bracketsToRemove = bracketsToRemove.union(set(stack))\n stringBuilder = []\n for i,c in enumerate(s):\n if i not in bracketsToRemove:\n stringBuilder.append(c)\n \n return \"\".join(stringBuilder)\n\n\nSolution().minRemoveToMakeValid('L(ee)(t(()coe')\n\n",
"class Solution:\n\n def minRemoveToMakeValid(self, s: str) ->str:\n bracketsToRemove = set()\n stack = []\n for i, c in enumerate(s):\n if c not in '()':\n continue\n if c == '(':\n stack.append(i)\n elif not stack:\n bracketsToRemove.add(i)\n else:\n stack.pop()\n bracketsToRemove = bracketsToRemove.union(set(stack))\n stringBuilder = []\n for i, c in enumerate(s):\n if i not in bracketsToRemove:\n stringBuilder.append(c)\n return ''.join(stringBuilder)\n\n\nSolution().minRemoveToMakeValid('L(ee)(t(()coe')\n",
"class Solution:\n\n def minRemoveToMakeValid(self, s: str) ->str:\n bracketsToRemove = set()\n stack = []\n for i, c in enumerate(s):\n if c not in '()':\n continue\n if c == '(':\n stack.append(i)\n elif not stack:\n bracketsToRemove.add(i)\n else:\n stack.pop()\n bracketsToRemove = bracketsToRemove.union(set(stack))\n stringBuilder = []\n for i, c in enumerate(s):\n if i not in bracketsToRemove:\n stringBuilder.append(c)\n return ''.join(stringBuilder)\n\n\n<code token>\n",
"class Solution:\n <function token>\n\n\n<code token>\n",
"<class token>\n<code token>\n"
] | false |
824 |
75ddcdd4e80b962198ff9de1d996837927c3ac1a
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function, with_statement
"""
cosi299a- Cinderella
[email protected]
"""
def truecase_is(string):
""" -> lower/title/upper/other """
if string.islower():
return 'l'
if string.istitle():
return 't'
if string.isupper():
return 'u'
return 'o'
def alnum_is(string):
""" -> alpha/digit/other """ #assumption: only alnum strings analyzed
if string.isalpha():
return 'a'
if string.isdigit():
return 'd'
return 'o'
def truecase_matching_is(str1, str2):
""" -> f(ull-string)/s(ub-string)/n(one) """
if str1==str2:
return 'f'
if str1 in str2:
return 's'
return 'n'
def lowercase_matching_is(str1, str2):
return truecase_matching_is(str1.lower(),str2.lower())
|
[
"#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n\nfrom __future__ import print_function, with_statement\n\n\n\"\"\"\ncosi299a- Cinderella\[email protected]\n\"\"\"\n\ndef truecase_is(string):\n \"\"\" -> lower/title/upper/other \"\"\"\n if string.islower():\n return 'l'\n if string.istitle():\n return 't'\n if string.isupper():\n return 'u'\n return 'o'\n\ndef alnum_is(string):\n \"\"\" -> alpha/digit/other \"\"\" #assumption: only alnum strings analyzed\n if string.isalpha():\n return 'a'\n if string.isdigit():\n return 'd'\n return 'o'\n\ndef truecase_matching_is(str1, str2):\n \"\"\" -> f(ull-string)/s(ub-string)/n(one) \"\"\"\n if str1==str2:\n return 'f'\n if str1 in str2:\n return 's'\n return 'n'\n\ndef lowercase_matching_is(str1, str2):\n return truecase_matching_is(str1.lower(),str2.lower())\n",
"from __future__ import print_function, with_statement\n<docstring token>\n\n\ndef truecase_is(string):\n \"\"\" -> lower/title/upper/other \"\"\"\n if string.islower():\n return 'l'\n if string.istitle():\n return 't'\n if string.isupper():\n return 'u'\n return 'o'\n\n\ndef alnum_is(string):\n \"\"\" -> alpha/digit/other \"\"\"\n if string.isalpha():\n return 'a'\n if string.isdigit():\n return 'd'\n return 'o'\n\n\ndef truecase_matching_is(str1, str2):\n \"\"\" -> f(ull-string)/s(ub-string)/n(one) \"\"\"\n if str1 == str2:\n return 'f'\n if str1 in str2:\n return 's'\n return 'n'\n\n\ndef lowercase_matching_is(str1, str2):\n return truecase_matching_is(str1.lower(), str2.lower())\n",
"<import token>\n<docstring token>\n\n\ndef truecase_is(string):\n \"\"\" -> lower/title/upper/other \"\"\"\n if string.islower():\n return 'l'\n if string.istitle():\n return 't'\n if string.isupper():\n return 'u'\n return 'o'\n\n\ndef alnum_is(string):\n \"\"\" -> alpha/digit/other \"\"\"\n if string.isalpha():\n return 'a'\n if string.isdigit():\n return 'd'\n return 'o'\n\n\ndef truecase_matching_is(str1, str2):\n \"\"\" -> f(ull-string)/s(ub-string)/n(one) \"\"\"\n if str1 == str2:\n return 'f'\n if str1 in str2:\n return 's'\n return 'n'\n\n\ndef lowercase_matching_is(str1, str2):\n return truecase_matching_is(str1.lower(), str2.lower())\n",
"<import token>\n<docstring token>\n\n\ndef truecase_is(string):\n \"\"\" -> lower/title/upper/other \"\"\"\n if string.islower():\n return 'l'\n if string.istitle():\n return 't'\n if string.isupper():\n return 'u'\n return 'o'\n\n\ndef alnum_is(string):\n \"\"\" -> alpha/digit/other \"\"\"\n if string.isalpha():\n return 'a'\n if string.isdigit():\n return 'd'\n return 'o'\n\n\n<function token>\n\n\ndef lowercase_matching_is(str1, str2):\n return truecase_matching_is(str1.lower(), str2.lower())\n",
"<import token>\n<docstring token>\n\n\ndef truecase_is(string):\n \"\"\" -> lower/title/upper/other \"\"\"\n if string.islower():\n return 'l'\n if string.istitle():\n return 't'\n if string.isupper():\n return 'u'\n return 'o'\n\n\ndef alnum_is(string):\n \"\"\" -> alpha/digit/other \"\"\"\n if string.isalpha():\n return 'a'\n if string.isdigit():\n return 'd'\n return 'o'\n\n\n<function token>\n<function token>\n",
"<import token>\n<docstring token>\n\n\ndef truecase_is(string):\n \"\"\" -> lower/title/upper/other \"\"\"\n if string.islower():\n return 'l'\n if string.istitle():\n return 't'\n if string.isupper():\n return 'u'\n return 'o'\n\n\n<function token>\n<function token>\n<function token>\n",
"<import token>\n<docstring token>\n<function token>\n<function token>\n<function token>\n<function token>\n"
] | false |
825 |
cdcb2710291e9897b874f63840193470ed58be49
|
# -*- coding: utf-8 -*-
import json
import re
import scrapy
from scrapy import Request
class PageInfoAjaxSpider(scrapy.Spider):
name = 'page_info_ajax'
allowed_domains = ['bilibili.com']
# start_urls = ['http://bilibili.com/']
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.143 Safari/537.36',
}
def start_requests(self):
url = 'https://s.search.bilibili.com/cate/search?callback=jqueryCallback_bili_8995260575257822&main_ver=v3&search_type=video&view_type=hot_rank&order=click©_right=-1&cate_id=130&page=1&pagesize=20&jsonp=jsonp&time_from=20190426&time_to=20190625&_=1561516363499'
yield Request(url, headers=self.headers)
def parse(self, response):
req_body = response.body
json_data = req_body.decode('utf-8')
pure_json_data = re.sub(r'jqueryCallback_bili_([0-9])*', '', json_data, count=1)
pure_json_data = json.loads(pure_json_data[1:-1])
print(pure_json_data['numPages'])
|
[
"# -*- coding: utf-8 -*-\nimport json\nimport re\n\nimport scrapy\nfrom scrapy import Request\n\n\nclass PageInfoAjaxSpider(scrapy.Spider):\n name = 'page_info_ajax'\n allowed_domains = ['bilibili.com']\n # start_urls = ['http://bilibili.com/']\n\n headers = {\n 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.143 Safari/537.36',\n }\n\n def start_requests(self):\n url = 'https://s.search.bilibili.com/cate/search?callback=jqueryCallback_bili_8995260575257822&main_ver=v3&search_type=video&view_type=hot_rank&order=click©_right=-1&cate_id=130&page=1&pagesize=20&jsonp=jsonp&time_from=20190426&time_to=20190625&_=1561516363499'\n yield Request(url, headers=self.headers)\n\n def parse(self, response):\n req_body = response.body\n json_data = req_body.decode('utf-8')\n pure_json_data = re.sub(r'jqueryCallback_bili_([0-9])*', '', json_data, count=1)\n pure_json_data = json.loads(pure_json_data[1:-1])\n print(pure_json_data['numPages'])\n",
"import json\nimport re\nimport scrapy\nfrom scrapy import Request\n\n\nclass PageInfoAjaxSpider(scrapy.Spider):\n name = 'page_info_ajax'\n allowed_domains = ['bilibili.com']\n headers = {'User-Agent':\n 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.143 Safari/537.36'\n }\n\n def start_requests(self):\n url = (\n 'https://s.search.bilibili.com/cate/search?callback=jqueryCallback_bili_8995260575257822&main_ver=v3&search_type=video&view_type=hot_rank&order=click©_right=-1&cate_id=130&page=1&pagesize=20&jsonp=jsonp&time_from=20190426&time_to=20190625&_=1561516363499'\n )\n yield Request(url, headers=self.headers)\n\n def parse(self, response):\n req_body = response.body\n json_data = req_body.decode('utf-8')\n pure_json_data = re.sub('jqueryCallback_bili_([0-9])*', '',\n json_data, count=1)\n pure_json_data = json.loads(pure_json_data[1:-1])\n print(pure_json_data['numPages'])\n",
"<import token>\n\n\nclass PageInfoAjaxSpider(scrapy.Spider):\n name = 'page_info_ajax'\n allowed_domains = ['bilibili.com']\n headers = {'User-Agent':\n 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.143 Safari/537.36'\n }\n\n def start_requests(self):\n url = (\n 'https://s.search.bilibili.com/cate/search?callback=jqueryCallback_bili_8995260575257822&main_ver=v3&search_type=video&view_type=hot_rank&order=click©_right=-1&cate_id=130&page=1&pagesize=20&jsonp=jsonp&time_from=20190426&time_to=20190625&_=1561516363499'\n )\n yield Request(url, headers=self.headers)\n\n def parse(self, response):\n req_body = response.body\n json_data = req_body.decode('utf-8')\n pure_json_data = re.sub('jqueryCallback_bili_([0-9])*', '',\n json_data, count=1)\n pure_json_data = json.loads(pure_json_data[1:-1])\n print(pure_json_data['numPages'])\n",
"<import token>\n\n\nclass PageInfoAjaxSpider(scrapy.Spider):\n <assignment token>\n <assignment token>\n <assignment token>\n\n def start_requests(self):\n url = (\n 'https://s.search.bilibili.com/cate/search?callback=jqueryCallback_bili_8995260575257822&main_ver=v3&search_type=video&view_type=hot_rank&order=click©_right=-1&cate_id=130&page=1&pagesize=20&jsonp=jsonp&time_from=20190426&time_to=20190625&_=1561516363499'\n )\n yield Request(url, headers=self.headers)\n\n def parse(self, response):\n req_body = response.body\n json_data = req_body.decode('utf-8')\n pure_json_data = re.sub('jqueryCallback_bili_([0-9])*', '',\n json_data, count=1)\n pure_json_data = json.loads(pure_json_data[1:-1])\n print(pure_json_data['numPages'])\n",
"<import token>\n\n\nclass PageInfoAjaxSpider(scrapy.Spider):\n <assignment token>\n <assignment token>\n <assignment token>\n\n def start_requests(self):\n url = (\n 'https://s.search.bilibili.com/cate/search?callback=jqueryCallback_bili_8995260575257822&main_ver=v3&search_type=video&view_type=hot_rank&order=click©_right=-1&cate_id=130&page=1&pagesize=20&jsonp=jsonp&time_from=20190426&time_to=20190625&_=1561516363499'\n )\n yield Request(url, headers=self.headers)\n <function token>\n",
"<import token>\n\n\nclass PageInfoAjaxSpider(scrapy.Spider):\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n <function token>\n",
"<import token>\n<class token>\n"
] | false |
826 |
f4bfef2ee78b87184cc72666fade949f8f931fc3
|
#### про enumerate
##s = input()
##for index, letter in enumerate(s):
## print(index,':',letter)
#### то же что и
##for i in range(len(s)):
## print (i,':', s[i])
#### номер начала каждого слова
##st = input()
##for index, symbol in enumerate(st):
## if symbol == ' ' and index != len(st)-1 or index == 0 or index == len(st):
## print(index)
#### вводится имя и слово вывести имя без первой и последней букв
##sname = input()
##for index in range(len(sname)):
## if sname[index] == ' ':
## print(sname[(index+2):len(sname)-1])
#### про replace
####вводится имя и строка вывести строку без первых двух букв имени
##name = input("What's your name?")
##deal = input("How are you?")
##cutdeal = deal.replace(name[0], '')
##cutdeal = cutdeal.replace(name[1], '')
##print(cutdeal)
#### то же что и
##name = input("What's your name?")
##deal = input("How are you?")
##for index, symbol in enumerate(deal):
## if symbol == name[0] or symbol == name[1]:
## cutdeal = deal[:index-1] + deal[index+1:]
##print(cutdeal)
####про цикл while
##i = 1
##s = 0
##while s<500:
## s+=i
## i+=1
##print(i)
##
##s = input()
##while s: ## пока s - непустая строка
## print(s.lower) ## все буквы в нижнем регистре
####распечатыывать корень числа пока пользователь не введёт пустую строку, прекращается при отрицательном числе
##n = input("Введите число. ")
##while n:
## n = int(n)
## if n<0:
## break
## print (n**1/2)
## n = input("Введите число. ")
####распечатыывать корень числа пока пользователь не введёт пустую строку, просит положительное число при отрицательном числе
##n = input("Введите число. ")
##while n:
## n = int(n)
## if n<0:
## n = input("Введите лучше положительное число. ")
## continue
## print (n**(1/2))
## n = input("Введите число. ")
####пользователь вводит числа до пустой строки выводится сумма только тех чисел, которые больше 100 если введено число кратное 500 то прекратить спрашивать числа
##n = input("Введите число. ")
##m = 0
##while n:
## n = int(n)
## if n%500 == 0:
## break
## if n>100:
## sum += n
## n = input("Введите число. ")
## continue
## n = input("Введите число. ")
##print(sum)
|
[
"#### про enumerate\n##s = input()\n##for index, letter in enumerate(s):\n## print(index,':',letter)\n#### то же что и\n##for i in range(len(s)):\n## print (i,':', s[i])\n\n#### номер начала каждого слова\n##st = input()\n##for index, symbol in enumerate(st):\n## if symbol == ' ' and index != len(st)-1 or index == 0 or index == len(st):\n## print(index)\n\n#### вводится имя и слово вывести имя без первой и последней букв\n##sname = input()\n##for index in range(len(sname)):\n## if sname[index] == ' ':\n## print(sname[(index+2):len(sname)-1])\n\n\n#### про replace\n####вводится имя и строка вывести строку без первых двух букв имени\n##name = input(\"What's your name?\")\n##deal = input(\"How are you?\")\n##cutdeal = deal.replace(name[0], '')\n##cutdeal = cutdeal.replace(name[1], '')\n##print(cutdeal)\n#### то же что и\n##name = input(\"What's your name?\")\n##deal = input(\"How are you?\")\n##for index, symbol in enumerate(deal):\n## if symbol == name[0] or symbol == name[1]:\n## cutdeal = deal[:index-1] + deal[index+1:]\n##print(cutdeal)\n\n####про цикл while\n##i = 1\n##s = 0\n##while s<500:\n## s+=i\n## i+=1\n##print(i)\n##\n##s = input()\n##while s: ## пока s - непустая строка\n## print(s.lower) ## все буквы в нижнем регистре\n\n####распечатыывать корень числа пока пользователь не введёт пустую строку, прекращается при отрицательном числе\n##n = input(\"Введите число. \")\n##while n:\n## n = int(n)\n## if n<0:\n## break\n## print (n**1/2)\n## n = input(\"Введите число. \")\n\n####распечатыывать корень числа пока пользователь не введёт пустую строку, просит положительное число при отрицательном числе\n##n = input(\"Введите число. \")\n##while n:\n## n = int(n)\n## if n<0:\n## n = input(\"Введите лучше положительное число. \")\n## continue\n## print (n**(1/2))\n## n = input(\"Введите число. \")\n\n####пользователь вводит числа до пустой строки выводится сумма только тех чисел, которые больше 100 если введено число кратное 500 то прекратить спрашивать числа\n##n = input(\"Введите число. \")\n##m = 0\n##while n:\n## n = int(n)\n## if n%500 == 0:\n## break\n## if n>100:\n## sum += n\n## n = input(\"Введите число. \")\n## continue\n## n = input(\"Введите число. \")\n##print(sum)\n",
""
] | false |
827 |
6d0340a08701b0c4f34e9b833bca27cf455d682d
|
# coding: utf-8
# # Read Bathy data from ERDDAP
# In[ ]:
get_ipython().system(u'conda install basemap --yes')
# In[1]:
import numpy as np
import matplotlib.pyplot as plt
import urllib
import netCDF4
from mpl_toolkits.basemap import Basemap
# In[2]:
# Definine the domain of interest
minlat = 42
maxlat = 45
minlon = -67
maxlon = -61.5
isub = 5
# Read data from: http://coastwatch.pfeg.noaa.gov/erddap/griddap/usgsCeSrtm30v6.html
# using the netCDF output option
base_url='http://coastwatch.pfeg.noaa.gov/erddap/griddap/usgsCeSrtm30v6.nc?'
query='topo[(%f):%d:(%f)][(%f):%d:(%f)]' % (maxlat,isub,minlat,minlon,isub,maxlon)
url = base_url+query
print url
# In[3]:
# store data in NetCDF file
file='usgsCeSrtm30v6.nc'
urllib.urlretrieve (url, file)
# In[4]:
# open NetCDF data in
nc = netCDF4.Dataset(file)
ncv = nc.variables
print ncv.keys()
# In[5]:
lon = ncv['longitude'][:]
lat = ncv['latitude'][:]
lons, lats = np.meshgrid(lon,lat)
topo = ncv['topo'][:,:]
# In[ ]:
# Create map
m = Basemap(projection='mill', llcrnrlat=minlat,urcrnrlat=maxlat,llcrnrlon=minlon, urcrnrlon=maxlon,resolution='h')
fig1 = plt.figure(figsize=(10,8))
cs = m.pcolormesh(lons,lats,topo,cmap=plt.cm.jet,latlon=True)
m.drawcoastlines()
m.drawmapboundary()
plt.title('SMRT30 - Bathymetry/Topography')
cbar = plt.colorbar(orientation='horizontal', extend='both')
cbar.ax.set_xlabel('meters')
# Save figure (without 'white' borders)
plt.savefig('topo.png', bbox_inches='tight')
|
[
"\n# coding: utf-8\n\n# # Read Bathy data from ERDDAP\n\n# In[ ]:\n\nget_ipython().system(u'conda install basemap --yes')\n\n\n# In[1]:\n\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport urllib\nimport netCDF4\nfrom mpl_toolkits.basemap import Basemap\n\n\n# In[2]:\n\n# Definine the domain of interest\nminlat = 42\nmaxlat = 45\nminlon = -67\nmaxlon = -61.5\nisub = 5\n \n# Read data from: http://coastwatch.pfeg.noaa.gov/erddap/griddap/usgsCeSrtm30v6.html\n# using the netCDF output option\nbase_url='http://coastwatch.pfeg.noaa.gov/erddap/griddap/usgsCeSrtm30v6.nc?'\nquery='topo[(%f):%d:(%f)][(%f):%d:(%f)]' % (maxlat,isub,minlat,minlon,isub,maxlon)\nurl = base_url+query\nprint url\n\n\n# In[3]:\n\n# store data in NetCDF file\nfile='usgsCeSrtm30v6.nc'\nurllib.urlretrieve (url, file)\n\n\n# In[4]:\n\n# open NetCDF data in \nnc = netCDF4.Dataset(file)\nncv = nc.variables\nprint ncv.keys()\n\n\n# In[5]:\n\nlon = ncv['longitude'][:]\nlat = ncv['latitude'][:]\nlons, lats = np.meshgrid(lon,lat)\ntopo = ncv['topo'][:,:]\n\n\n# In[ ]:\n\n# Create map\nm = Basemap(projection='mill', llcrnrlat=minlat,urcrnrlat=maxlat,llcrnrlon=minlon, urcrnrlon=maxlon,resolution='h')\nfig1 = plt.figure(figsize=(10,8))\ncs = m.pcolormesh(lons,lats,topo,cmap=plt.cm.jet,latlon=True)\nm.drawcoastlines()\nm.drawmapboundary()\nplt.title('SMRT30 - Bathymetry/Topography')\ncbar = plt.colorbar(orientation='horizontal', extend='both')\ncbar.ax.set_xlabel('meters')\n \n# Save figure (without 'white' borders)\nplt.savefig('topo.png', bbox_inches='tight')\n\n"
] | true |
828 |
0f6737b9e9e9a13d75c20352e9ef9c1db6c0c8a3
|
#! /usr/bin/env python
# import ros stuff
import rospy
from std_srvs.srv import *
#to check if the service is active
active_ = False
def unable_service(req):
"""
This function contains the variable declared above that is
used to enable the service.
"""
global active_
active_ = req.data
res = SetBoolResponse()
res.success = True
res.message = 'Done!'
return res
def getInput():
"""
This function get the input, given by the user, on which of the 5
behaviors proposed, the robot must follow.
If one of the input chosen by the user is already active, the
function doesn't ask to give again the input.
"""
global active_
#to disable the service
active_ = False
# reading the previous input
prev_input_ = rospy.get_param('/input')
input_ = prev_input_
#in order to make the user to choose one of the 5 possible inputs
while (prev_input_ == input_) or (input_ > 5 or input_ < 1):
if input_ > 5 or input_ < 1:
#in the case in which the user make another selection
print "Unknown input, please try again"
#propose to the user which are the real possibilities
print("Please select one of the following senteces\n")
print("1 - Move the robot randomly in the environment, by choosing one of six possible target positions\n")
print("2 - The user can chose the next target position\n")
print("3 - Start following the external walls\n")
print("4 - Stop the robot in the last position\n")
print("5 - Change the planning algorithm from move_base to bug0 and vice versa\n")
#read the input typed by the user
input_ = (int(raw_input("Please select a number between 1 and 5: ")))
#set the choice made by the user
if input_ >= 1 and input_ <= 5:
rospy.set_param('/input', input_)
def main():
"""
The main function allows the user to choose the robot's behavior.
If the service is active it call the function getInput that allows
the user to make a new choice. If it is not, it check if the selected
behavior is the second one and in that case change it with the fourth one.
"""
global active_
#init user_interface
rospy.init_node('user_interface')
#service that allows the user to choose a new input
srv_user_interface = rospy.Service('/user_interface_service', SetBool, unable_service)
rate = rospy.Rate(1)
while not rospy.is_shutdown():
#if the service is not active
if not active_:
rate.sleep()
#if the selected behavior is the second one
if rospy.get_param("/input") == 2:
#change it in the fourth behavior
rospy.set_param("/input",4)
continue
#if the service is active
else:
getInput() # allow the user to choose a new behaviour
rate.sleep()
if __name__ == '__main__':
try:
main()
except rospy.ROSInterruptException:
pass
|
[
"#! /usr/bin/env python\n\n# import ros stuff\nimport rospy\nfrom std_srvs.srv import *\n\n#to check if the service is active\nactive_ = False\n\ndef unable_service(req):\n\t\"\"\"\n\tThis function contains the variable declared above that is\n\tused to enable the service.\n\t\"\"\"\n\tglobal active_\n \n\tactive_ = req.data\n\tres = SetBoolResponse()\n\tres.success = True\n\tres.message = 'Done!'\n\n\treturn res\n\t\ndef getInput():\n\t\"\"\"\n\tThis function get the input, given by the user, on which of the 5\n\tbehaviors proposed, the robot must follow.\n\tIf one of the input chosen by the user is already active, the \n\tfunction doesn't ask to give again the input.\n\t\"\"\"\t\n\tglobal active_\n\n\t#to disable the service \n\tactive_ = False \n\t\n\t# reading the previous input\n\tprev_input_ = rospy.get_param('/input')\n\tinput_ = prev_input_\n\t\n\t#in order to make the user to choose one of the 5 possible inputs\n\twhile (prev_input_ == input_) or (input_ > 5 or input_ < 1):\n\t\tif input_ > 5 or input_ < 1: \n\t\t\t#in the case in which the user make another selection\n\t\t\tprint \"Unknown input, please try again\" \n\t\t\n\t\t#propose to the user which are the real possibilities\n\t\tprint(\"Please select one of the following senteces\\n\")\n\t\tprint(\"1 - Move the robot randomly in the environment, by choosing one of six possible target positions\\n\")\n\t\tprint(\"2 - The user can chose the next target position\\n\")\n\t\tprint(\"3 - Start following the external walls\\n\")\n\t\tprint(\"4 - Stop the robot in the last position\\n\")\n\t\tprint(\"5 - Change the planning algorithm from move_base to bug0 and vice versa\\n\")\n\n\t\t#read the input typed by the user\t\n\t\tinput_ = (int(raw_input(\"Please select a number between 1 and 5: \")))\n\n\t#set the choice made by the user\n\tif input_ >= 1 and input_ <= 5:\n\t\trospy.set_param('/input', input_)\n\ndef main():\n\t\"\"\"\t\n\tThe main function allows the user to choose the robot's behavior.\n\tIf the service is active it call the function getInput that allows\n\tthe user to make a new choice. If it is not, it check if the selected\n\tbehavior is the second one and in that case change it with the fourth one.\n\t\"\"\"\n\tglobal active_\n\t\n\t#init user_interface\n\trospy.init_node('user_interface')\n\n\t#service that allows the user to choose a new input\n\tsrv_user_interface = rospy.Service('/user_interface_service', SetBool, unable_service)\n\t\n\trate = rospy.Rate(1)\n\twhile not rospy.is_shutdown():\n\t\t#if the service is not active\n\t\tif not active_: \n\t\t\trate.sleep()\n\t\t\t\n\t\t\t#if the selected behavior is the second one\n\t\t\tif rospy.get_param(\"/input\") == 2:\n\t\t\t\t#change it in the fourth behavior\n\t\t\t\trospy.set_param(\"/input\",4) \n\t\t\t\n\t\t\tcontinue\n\t\t\n\t\t#if the service is active\t\n\t\telse: \n\t\t\tgetInput() # allow the user to choose a new behaviour\n\t\t\n\t\trate.sleep()\n\t\t\nif __name__ == '__main__':\n try:\n main()\n except rospy.ROSInterruptException:\n pass\n"
] | true |
829 |
0686dec7f3dc23f01ffff41f611a1bb597bb5352
|
from .base import Base
class Files(Base):
endpoint = "/files"
def upload_file(self, channel_id, files):
return self.client.post(self.endpoint, data={"channel_id": channel_id}, files=files)
def get_file(self, file_id):
return self.client.get(
self.endpoint + "/" + file_id,
)
def get_file_thumbnail(self, file_id):
return self.client.get(
self.endpoint + "/" + file_id + "/thumbnail",
)
def get_file_preview(self, file_id):
return self.client.get(
self.endpoint + "/" + file_id + "/preview",
)
def get_public_file_link(self, file_id):
return self.client.get(
self.endpoint + "/" + file_id + "/link",
)
def get_file_metadata(self, file_id):
return self.client.get(
self.endpoint + "/" + file_id + "/info",
)
|
[
"from .base import Base\n\n\nclass Files(Base):\n endpoint = \"/files\"\n\n def upload_file(self, channel_id, files):\n return self.client.post(self.endpoint, data={\"channel_id\": channel_id}, files=files)\n\n def get_file(self, file_id):\n return self.client.get(\n self.endpoint + \"/\" + file_id,\n )\n\n def get_file_thumbnail(self, file_id):\n return self.client.get(\n self.endpoint + \"/\" + file_id + \"/thumbnail\",\n )\n\n def get_file_preview(self, file_id):\n return self.client.get(\n self.endpoint + \"/\" + file_id + \"/preview\",\n )\n\n def get_public_file_link(self, file_id):\n return self.client.get(\n self.endpoint + \"/\" + file_id + \"/link\",\n )\n\n def get_file_metadata(self, file_id):\n return self.client.get(\n self.endpoint + \"/\" + file_id + \"/info\",\n )\n",
"from .base import Base\n\n\nclass Files(Base):\n endpoint = '/files'\n\n def upload_file(self, channel_id, files):\n return self.client.post(self.endpoint, data={'channel_id':\n channel_id}, files=files)\n\n def get_file(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id)\n\n def get_file_thumbnail(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id + '/thumbnail')\n\n def get_file_preview(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id + '/preview')\n\n def get_public_file_link(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id + '/link')\n\n def get_file_metadata(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id + '/info')\n",
"<import token>\n\n\nclass Files(Base):\n endpoint = '/files'\n\n def upload_file(self, channel_id, files):\n return self.client.post(self.endpoint, data={'channel_id':\n channel_id}, files=files)\n\n def get_file(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id)\n\n def get_file_thumbnail(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id + '/thumbnail')\n\n def get_file_preview(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id + '/preview')\n\n def get_public_file_link(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id + '/link')\n\n def get_file_metadata(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id + '/info')\n",
"<import token>\n\n\nclass Files(Base):\n <assignment token>\n\n def upload_file(self, channel_id, files):\n return self.client.post(self.endpoint, data={'channel_id':\n channel_id}, files=files)\n\n def get_file(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id)\n\n def get_file_thumbnail(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id + '/thumbnail')\n\n def get_file_preview(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id + '/preview')\n\n def get_public_file_link(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id + '/link')\n\n def get_file_metadata(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id + '/info')\n",
"<import token>\n\n\nclass Files(Base):\n <assignment token>\n\n def upload_file(self, channel_id, files):\n return self.client.post(self.endpoint, data={'channel_id':\n channel_id}, files=files)\n\n def get_file(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id)\n\n def get_file_thumbnail(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id + '/thumbnail')\n\n def get_file_preview(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id + '/preview')\n\n def get_public_file_link(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id + '/link')\n <function token>\n",
"<import token>\n\n\nclass Files(Base):\n <assignment token>\n\n def upload_file(self, channel_id, files):\n return self.client.post(self.endpoint, data={'channel_id':\n channel_id}, files=files)\n\n def get_file(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id)\n\n def get_file_thumbnail(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id + '/thumbnail')\n <function token>\n\n def get_public_file_link(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id + '/link')\n <function token>\n",
"<import token>\n\n\nclass Files(Base):\n <assignment token>\n\n def upload_file(self, channel_id, files):\n return self.client.post(self.endpoint, data={'channel_id':\n channel_id}, files=files)\n\n def get_file(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id)\n\n def get_file_thumbnail(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id + '/thumbnail')\n <function token>\n <function token>\n <function token>\n",
"<import token>\n\n\nclass Files(Base):\n <assignment token>\n <function token>\n\n def get_file(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id)\n\n def get_file_thumbnail(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id + '/thumbnail')\n <function token>\n <function token>\n <function token>\n",
"<import token>\n\n\nclass Files(Base):\n <assignment token>\n <function token>\n <function token>\n\n def get_file_thumbnail(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id + '/thumbnail')\n <function token>\n <function token>\n <function token>\n",
"<import token>\n\n\nclass Files(Base):\n <assignment token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n<class token>\n"
] | false |
830 |
c3d9ad49b62c56dfbd9674cb1ac5c206e6401a27
|
# Copyright (c) 2017, Matt Layman
import bisect
import configparser
import os
import smartypants
from werkzeug.contrib.atom import AtomFeed, FeedEntry
from handroll import logger
from handroll.exceptions import AbortError
from handroll.extensions.base import Extension
from handroll.i18n import _
class BlogPost(object):
def __init__(self, **kwargs):
self.date = kwargs['date']
self.source_file = kwargs['source_file']
self.summary = smartypants.smartypants(kwargs['summary'])
self.title = smartypants.smartypants(kwargs['title'])
self.route = kwargs['route']
self.url = kwargs['url']
# Having the posts enables a blog post to find its relationships.
self._posts = kwargs['posts']
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __lt__(self, other):
return self.date < other.date
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
return 'BlogPost({}, {})'.format(self.source_file, self.date)
@property
def next(self):
"""Get the next chronological blog post."""
posts_by_date = self.posts_by_date
index = bisect.bisect_left(posts_by_date, self)
if index + 1 == len(posts_by_date):
return None
return posts_by_date[index + 1]
@property
def previous(self):
"""Get the previous chronological blog post."""
posts_by_date = self.posts_by_date
index = bisect.bisect_left(posts_by_date, self)
if index == 0:
return None
return posts_by_date[index - 1]
@property
def posts_by_date(self):
return sorted(self._posts.values(), key=lambda p: p.date)
class BlogExtension(Extension):
"""Track files marked as blog entries and generate a feed."""
handle_frontmatter_loaded = True
handle_pre_composition = True
handle_post_composition = True
required_metadata = {
'author': 'atom_author',
'id': 'atom_id',
'title': 'atom_title',
'url': 'atom_url',
}
def __init__(self, config):
super(BlogExtension, self).__init__(config)
self.posts = {}
self.atom_metadata = {}
self.atom_output = ''
self.list_template = None
self.list_output = None
self._resolver = None
self._should_generate = True
def on_pre_composition(self, director):
"""Check that all the required configuration exists."""
if not self._config.parser.has_section('blog'):
raise AbortError(
_('A blog section is missing in the configuration file.'))
# Collect atom feed configuration.
for metadata, option in self.required_metadata.items():
self._add_atom_metadata(metadata, option)
self.atom_output = self._get_option('atom_output')
# Collect HTML listing configuration.
if self._config.parser.has_option('blog', 'list_template'):
self.list_template = self._get_option('list_template')
self.list_output = self._get_option('list_output')
# Grab the resolver from the director for determining URLs for posts.
self._resolver = director.resolver
def on_frontmatter_loaded(self, source_file, frontmatter):
"""Record any new blog posts."""
if not self._is_post(frontmatter):
return
self._validate_post(source_file, frontmatter)
post = BlogPost(
date=frontmatter['date'],
source_file=source_file,
summary=frontmatter.get('summary', ''),
title=frontmatter['title'],
route=self._resolver.as_route(source_file),
url=self._resolver.as_url(source_file),
posts=self.posts,
)
frontmatter['post'] = post
if post != self.posts.get(source_file):
self.posts[source_file] = post
self._should_generate = True
def on_post_composition(self, director):
"""Generate blog output."""
if not self._should_generate:
return
blog_posts = sorted(
self.posts.values(), key=lambda p: p.date, reverse=True)
self._generate_atom_feed(director, blog_posts)
if self.list_template is not None:
self._generate_list_page(director, blog_posts)
self._should_generate = False
def _is_post(self, frontmatter):
"""Check if the front matter looks like a blog post."""
is_post = frontmatter.get('blog', False)
if type(is_post) != bool:
raise AbortError(
_('Invalid blog frontmatter (expects True or False): '
'{blog_value}').format(blog_value=is_post))
return is_post
def _validate_post(self, source_file, frontmatter):
"""Validate that the post contains all the required fields."""
required = set([
'date',
'title',
])
fields = set(frontmatter.keys())
missing = required - fields
if missing:
raise AbortError(_(
'The blog post, {filename}, '
'is missing required fields: {missing_fields}'.format(
filename=source_file, missing_fields=', '.join(missing))))
def _generate_atom_feed(self, director, blog_posts):
"""Generate the atom feed."""
logger.info(_('Generating Atom XML feed ...'))
builder = FeedBuilder(self.atom_metadata)
builder.add(blog_posts)
output_file = os.path.join(director.outdir, self.atom_output)
builder.write_to(output_file)
def _generate_list_page(self, director, blog_posts):
"""Generate the list page."""
logger.info(_('Generating blog list page ...'))
template = director.catalog.get_template(self.list_template)
builder = ListPageBuilder(template)
builder.add(blog_posts)
output_file = os.path.join(director.outdir, self.list_output)
builder.write_to(output_file)
def _add_atom_metadata(self, name, option):
"""Add atom metadata from the config parser."""
self.atom_metadata[name] = self._get_option(option)
def _get_option(self, option):
"""Get an option out of the blog section."""
try:
return self._config.parser.get('blog', option)
except configparser.NoOptionError:
raise AbortError(
_('The blog extension requires the {option} option.').format(
option=option))
class BlogBuilder(object):
"""A template pattern class for generating output related to a blog."""
def _generate_output(self):
"""Generate output that belongs in the destination file.
Subclasses must implement this method.
"""
raise NotImplementedError()
def write_to(self, filepath):
"""Write the output to the provided filepath."""
output = self._generate_output()
with open(filepath, 'wb') as out:
out.write(output.encode('utf-8'))
out.write(b'<!-- handrolled for excellence -->\n')
class FeedBuilder(BlogBuilder):
"""Transform blog metadata and posts into an Atom feed."""
def __init__(self, metadata):
self.metadata = metadata
self._feed = AtomFeed(**metadata)
def add(self, posts):
"""Add blog posts to the feed."""
for post in posts:
self._feed.add(FeedEntry(
summary=post.summary,
title=post.title,
title_type='html',
url=post.url,
updated=post.date,
))
def _generate_output(self):
return self._feed.to_string()
class ListPageBuilder(BlogBuilder):
"""Transform blog posts into a list page."""
def __init__(self, template):
self._template = template
self._blog_list = ''
self._posts = None
def add(self, posts):
"""Add the posts and generate a blog list."""
li_html = []
for post in posts:
li_html.append(
u'<li><a href="{route}">{title}</a></li>'.format(
route=post.route, title=post.title))
self._blog_list = u'\n'.join(li_html)
self._posts = posts
def _generate_output(self):
context = {
'blog_list': self._blog_list,
'posts': self._posts,
}
return self._template.render(context)
|
[
"# Copyright (c) 2017, Matt Layman\n\nimport bisect\nimport configparser\nimport os\n\nimport smartypants\nfrom werkzeug.contrib.atom import AtomFeed, FeedEntry\n\nfrom handroll import logger\nfrom handroll.exceptions import AbortError\nfrom handroll.extensions.base import Extension\nfrom handroll.i18n import _\n\n\nclass BlogPost(object):\n\n def __init__(self, **kwargs):\n self.date = kwargs['date']\n self.source_file = kwargs['source_file']\n self.summary = smartypants.smartypants(kwargs['summary'])\n self.title = smartypants.smartypants(kwargs['title'])\n self.route = kwargs['route']\n self.url = kwargs['url']\n # Having the posts enables a blog post to find its relationships.\n self._posts = kwargs['posts']\n\n def __eq__(self, other):\n if other is None:\n return False\n return self.__dict__ == other.__dict__\n\n def __lt__(self, other):\n return self.date < other.date\n\n def __ne__(self, other):\n return not self.__eq__(other)\n\n def __repr__(self):\n return 'BlogPost({}, {})'.format(self.source_file, self.date)\n\n @property\n def next(self):\n \"\"\"Get the next chronological blog post.\"\"\"\n posts_by_date = self.posts_by_date\n index = bisect.bisect_left(posts_by_date, self)\n if index + 1 == len(posts_by_date):\n return None\n return posts_by_date[index + 1]\n\n @property\n def previous(self):\n \"\"\"Get the previous chronological blog post.\"\"\"\n posts_by_date = self.posts_by_date\n index = bisect.bisect_left(posts_by_date, self)\n if index == 0:\n return None\n return posts_by_date[index - 1]\n\n @property\n def posts_by_date(self):\n return sorted(self._posts.values(), key=lambda p: p.date)\n\n\nclass BlogExtension(Extension):\n \"\"\"Track files marked as blog entries and generate a feed.\"\"\"\n\n handle_frontmatter_loaded = True\n handle_pre_composition = True\n handle_post_composition = True\n\n required_metadata = {\n 'author': 'atom_author',\n 'id': 'atom_id',\n 'title': 'atom_title',\n 'url': 'atom_url',\n }\n\n def __init__(self, config):\n super(BlogExtension, self).__init__(config)\n self.posts = {}\n self.atom_metadata = {}\n self.atom_output = ''\n self.list_template = None\n self.list_output = None\n self._resolver = None\n self._should_generate = True\n\n def on_pre_composition(self, director):\n \"\"\"Check that all the required configuration exists.\"\"\"\n if not self._config.parser.has_section('blog'):\n raise AbortError(\n _('A blog section is missing in the configuration file.'))\n\n # Collect atom feed configuration.\n for metadata, option in self.required_metadata.items():\n self._add_atom_metadata(metadata, option)\n self.atom_output = self._get_option('atom_output')\n\n # Collect HTML listing configuration.\n if self._config.parser.has_option('blog', 'list_template'):\n self.list_template = self._get_option('list_template')\n self.list_output = self._get_option('list_output')\n\n # Grab the resolver from the director for determining URLs for posts.\n self._resolver = director.resolver\n\n def on_frontmatter_loaded(self, source_file, frontmatter):\n \"\"\"Record any new blog posts.\"\"\"\n if not self._is_post(frontmatter):\n return\n self._validate_post(source_file, frontmatter)\n post = BlogPost(\n date=frontmatter['date'],\n source_file=source_file,\n summary=frontmatter.get('summary', ''),\n title=frontmatter['title'],\n route=self._resolver.as_route(source_file),\n url=self._resolver.as_url(source_file),\n posts=self.posts,\n )\n frontmatter['post'] = post\n if post != self.posts.get(source_file):\n self.posts[source_file] = post\n self._should_generate = True\n\n def on_post_composition(self, director):\n \"\"\"Generate blog output.\"\"\"\n if not self._should_generate:\n return\n blog_posts = sorted(\n self.posts.values(), key=lambda p: p.date, reverse=True)\n self._generate_atom_feed(director, blog_posts)\n if self.list_template is not None:\n self._generate_list_page(director, blog_posts)\n self._should_generate = False\n\n def _is_post(self, frontmatter):\n \"\"\"Check if the front matter looks like a blog post.\"\"\"\n is_post = frontmatter.get('blog', False)\n if type(is_post) != bool:\n raise AbortError(\n _('Invalid blog frontmatter (expects True or False): '\n '{blog_value}').format(blog_value=is_post))\n return is_post\n\n def _validate_post(self, source_file, frontmatter):\n \"\"\"Validate that the post contains all the required fields.\"\"\"\n required = set([\n 'date',\n 'title',\n ])\n fields = set(frontmatter.keys())\n missing = required - fields\n if missing:\n raise AbortError(_(\n 'The blog post, {filename}, '\n 'is missing required fields: {missing_fields}'.format(\n filename=source_file, missing_fields=', '.join(missing))))\n\n def _generate_atom_feed(self, director, blog_posts):\n \"\"\"Generate the atom feed.\"\"\"\n logger.info(_('Generating Atom XML feed ...'))\n builder = FeedBuilder(self.atom_metadata)\n builder.add(blog_posts)\n output_file = os.path.join(director.outdir, self.atom_output)\n builder.write_to(output_file)\n\n def _generate_list_page(self, director, blog_posts):\n \"\"\"Generate the list page.\"\"\"\n logger.info(_('Generating blog list page ...'))\n template = director.catalog.get_template(self.list_template)\n builder = ListPageBuilder(template)\n builder.add(blog_posts)\n output_file = os.path.join(director.outdir, self.list_output)\n builder.write_to(output_file)\n\n def _add_atom_metadata(self, name, option):\n \"\"\"Add atom metadata from the config parser.\"\"\"\n self.atom_metadata[name] = self._get_option(option)\n\n def _get_option(self, option):\n \"\"\"Get an option out of the blog section.\"\"\"\n try:\n return self._config.parser.get('blog', option)\n except configparser.NoOptionError:\n raise AbortError(\n _('The blog extension requires the {option} option.').format(\n option=option))\n\n\nclass BlogBuilder(object):\n \"\"\"A template pattern class for generating output related to a blog.\"\"\"\n\n def _generate_output(self):\n \"\"\"Generate output that belongs in the destination file.\n\n Subclasses must implement this method.\n \"\"\"\n raise NotImplementedError()\n\n def write_to(self, filepath):\n \"\"\"Write the output to the provided filepath.\"\"\"\n output = self._generate_output()\n with open(filepath, 'wb') as out:\n out.write(output.encode('utf-8'))\n out.write(b'<!-- handrolled for excellence -->\\n')\n\n\nclass FeedBuilder(BlogBuilder):\n \"\"\"Transform blog metadata and posts into an Atom feed.\"\"\"\n\n def __init__(self, metadata):\n self.metadata = metadata\n self._feed = AtomFeed(**metadata)\n\n def add(self, posts):\n \"\"\"Add blog posts to the feed.\"\"\"\n for post in posts:\n self._feed.add(FeedEntry(\n summary=post.summary,\n title=post.title,\n title_type='html',\n url=post.url,\n updated=post.date,\n ))\n\n def _generate_output(self):\n return self._feed.to_string()\n\n\nclass ListPageBuilder(BlogBuilder):\n \"\"\"Transform blog posts into a list page.\"\"\"\n\n def __init__(self, template):\n self._template = template\n self._blog_list = ''\n self._posts = None\n\n def add(self, posts):\n \"\"\"Add the posts and generate a blog list.\"\"\"\n li_html = []\n for post in posts:\n li_html.append(\n u'<li><a href=\"{route}\">{title}</a></li>'.format(\n route=post.route, title=post.title))\n self._blog_list = u'\\n'.join(li_html)\n self._posts = posts\n\n def _generate_output(self):\n context = {\n 'blog_list': self._blog_list,\n 'posts': self._posts,\n }\n return self._template.render(context)\n",
"import bisect\nimport configparser\nimport os\nimport smartypants\nfrom werkzeug.contrib.atom import AtomFeed, FeedEntry\nfrom handroll import logger\nfrom handroll.exceptions import AbortError\nfrom handroll.extensions.base import Extension\nfrom handroll.i18n import _\n\n\nclass BlogPost(object):\n\n def __init__(self, **kwargs):\n self.date = kwargs['date']\n self.source_file = kwargs['source_file']\n self.summary = smartypants.smartypants(kwargs['summary'])\n self.title = smartypants.smartypants(kwargs['title'])\n self.route = kwargs['route']\n self.url = kwargs['url']\n self._posts = kwargs['posts']\n\n def __eq__(self, other):\n if other is None:\n return False\n return self.__dict__ == other.__dict__\n\n def __lt__(self, other):\n return self.date < other.date\n\n def __ne__(self, other):\n return not self.__eq__(other)\n\n def __repr__(self):\n return 'BlogPost({}, {})'.format(self.source_file, self.date)\n\n @property\n def next(self):\n \"\"\"Get the next chronological blog post.\"\"\"\n posts_by_date = self.posts_by_date\n index = bisect.bisect_left(posts_by_date, self)\n if index + 1 == len(posts_by_date):\n return None\n return posts_by_date[index + 1]\n\n @property\n def previous(self):\n \"\"\"Get the previous chronological blog post.\"\"\"\n posts_by_date = self.posts_by_date\n index = bisect.bisect_left(posts_by_date, self)\n if index == 0:\n return None\n return posts_by_date[index - 1]\n\n @property\n def posts_by_date(self):\n return sorted(self._posts.values(), key=lambda p: p.date)\n\n\nclass BlogExtension(Extension):\n \"\"\"Track files marked as blog entries and generate a feed.\"\"\"\n handle_frontmatter_loaded = True\n handle_pre_composition = True\n handle_post_composition = True\n required_metadata = {'author': 'atom_author', 'id': 'atom_id', 'title':\n 'atom_title', 'url': 'atom_url'}\n\n def __init__(self, config):\n super(BlogExtension, self).__init__(config)\n self.posts = {}\n self.atom_metadata = {}\n self.atom_output = ''\n self.list_template = None\n self.list_output = None\n self._resolver = None\n self._should_generate = True\n\n def on_pre_composition(self, director):\n \"\"\"Check that all the required configuration exists.\"\"\"\n if not self._config.parser.has_section('blog'):\n raise AbortError(_(\n 'A blog section is missing in the configuration file.'))\n for metadata, option in self.required_metadata.items():\n self._add_atom_metadata(metadata, option)\n self.atom_output = self._get_option('atom_output')\n if self._config.parser.has_option('blog', 'list_template'):\n self.list_template = self._get_option('list_template')\n self.list_output = self._get_option('list_output')\n self._resolver = director.resolver\n\n def on_frontmatter_loaded(self, source_file, frontmatter):\n \"\"\"Record any new blog posts.\"\"\"\n if not self._is_post(frontmatter):\n return\n self._validate_post(source_file, frontmatter)\n post = BlogPost(date=frontmatter['date'], source_file=source_file,\n summary=frontmatter.get('summary', ''), title=frontmatter[\n 'title'], route=self._resolver.as_route(source_file), url=self.\n _resolver.as_url(source_file), posts=self.posts)\n frontmatter['post'] = post\n if post != self.posts.get(source_file):\n self.posts[source_file] = post\n self._should_generate = True\n\n def on_post_composition(self, director):\n \"\"\"Generate blog output.\"\"\"\n if not self._should_generate:\n return\n blog_posts = sorted(self.posts.values(), key=lambda p: p.date,\n reverse=True)\n self._generate_atom_feed(director, blog_posts)\n if self.list_template is not None:\n self._generate_list_page(director, blog_posts)\n self._should_generate = False\n\n def _is_post(self, frontmatter):\n \"\"\"Check if the front matter looks like a blog post.\"\"\"\n is_post = frontmatter.get('blog', False)\n if type(is_post) != bool:\n raise AbortError(_(\n 'Invalid blog frontmatter (expects True or False): {blog_value}'\n ).format(blog_value=is_post))\n return is_post\n\n def _validate_post(self, source_file, frontmatter):\n \"\"\"Validate that the post contains all the required fields.\"\"\"\n required = set(['date', 'title'])\n fields = set(frontmatter.keys())\n missing = required - fields\n if missing:\n raise AbortError(_(\n 'The blog post, {filename}, is missing required fields: {missing_fields}'\n .format(filename=source_file, missing_fields=', '.join(\n missing))))\n\n def _generate_atom_feed(self, director, blog_posts):\n \"\"\"Generate the atom feed.\"\"\"\n logger.info(_('Generating Atom XML feed ...'))\n builder = FeedBuilder(self.atom_metadata)\n builder.add(blog_posts)\n output_file = os.path.join(director.outdir, self.atom_output)\n builder.write_to(output_file)\n\n def _generate_list_page(self, director, blog_posts):\n \"\"\"Generate the list page.\"\"\"\n logger.info(_('Generating blog list page ...'))\n template = director.catalog.get_template(self.list_template)\n builder = ListPageBuilder(template)\n builder.add(blog_posts)\n output_file = os.path.join(director.outdir, self.list_output)\n builder.write_to(output_file)\n\n def _add_atom_metadata(self, name, option):\n \"\"\"Add atom metadata from the config parser.\"\"\"\n self.atom_metadata[name] = self._get_option(option)\n\n def _get_option(self, option):\n \"\"\"Get an option out of the blog section.\"\"\"\n try:\n return self._config.parser.get('blog', option)\n except configparser.NoOptionError:\n raise AbortError(_(\n 'The blog extension requires the {option} option.').format(\n option=option))\n\n\nclass BlogBuilder(object):\n \"\"\"A template pattern class for generating output related to a blog.\"\"\"\n\n def _generate_output(self):\n \"\"\"Generate output that belongs in the destination file.\n\n Subclasses must implement this method.\n \"\"\"\n raise NotImplementedError()\n\n def write_to(self, filepath):\n \"\"\"Write the output to the provided filepath.\"\"\"\n output = self._generate_output()\n with open(filepath, 'wb') as out:\n out.write(output.encode('utf-8'))\n out.write(b'<!-- handrolled for excellence -->\\n')\n\n\nclass FeedBuilder(BlogBuilder):\n \"\"\"Transform blog metadata and posts into an Atom feed.\"\"\"\n\n def __init__(self, metadata):\n self.metadata = metadata\n self._feed = AtomFeed(**metadata)\n\n def add(self, posts):\n \"\"\"Add blog posts to the feed.\"\"\"\n for post in posts:\n self._feed.add(FeedEntry(summary=post.summary, title=post.title,\n title_type='html', url=post.url, updated=post.date))\n\n def _generate_output(self):\n return self._feed.to_string()\n\n\nclass ListPageBuilder(BlogBuilder):\n \"\"\"Transform blog posts into a list page.\"\"\"\n\n def __init__(self, template):\n self._template = template\n self._blog_list = ''\n self._posts = None\n\n def add(self, posts):\n \"\"\"Add the posts and generate a blog list.\"\"\"\n li_html = []\n for post in posts:\n li_html.append(u'<li><a href=\"{route}\">{title}</a></li>'.format\n (route=post.route, title=post.title))\n self._blog_list = u'\\n'.join(li_html)\n self._posts = posts\n\n def _generate_output(self):\n context = {'blog_list': self._blog_list, 'posts': self._posts}\n return self._template.render(context)\n",
"<import token>\n\n\nclass BlogPost(object):\n\n def __init__(self, **kwargs):\n self.date = kwargs['date']\n self.source_file = kwargs['source_file']\n self.summary = smartypants.smartypants(kwargs['summary'])\n self.title = smartypants.smartypants(kwargs['title'])\n self.route = kwargs['route']\n self.url = kwargs['url']\n self._posts = kwargs['posts']\n\n def __eq__(self, other):\n if other is None:\n return False\n return self.__dict__ == other.__dict__\n\n def __lt__(self, other):\n return self.date < other.date\n\n def __ne__(self, other):\n return not self.__eq__(other)\n\n def __repr__(self):\n return 'BlogPost({}, {})'.format(self.source_file, self.date)\n\n @property\n def next(self):\n \"\"\"Get the next chronological blog post.\"\"\"\n posts_by_date = self.posts_by_date\n index = bisect.bisect_left(posts_by_date, self)\n if index + 1 == len(posts_by_date):\n return None\n return posts_by_date[index + 1]\n\n @property\n def previous(self):\n \"\"\"Get the previous chronological blog post.\"\"\"\n posts_by_date = self.posts_by_date\n index = bisect.bisect_left(posts_by_date, self)\n if index == 0:\n return None\n return posts_by_date[index - 1]\n\n @property\n def posts_by_date(self):\n return sorted(self._posts.values(), key=lambda p: p.date)\n\n\nclass BlogExtension(Extension):\n \"\"\"Track files marked as blog entries and generate a feed.\"\"\"\n handle_frontmatter_loaded = True\n handle_pre_composition = True\n handle_post_composition = True\n required_metadata = {'author': 'atom_author', 'id': 'atom_id', 'title':\n 'atom_title', 'url': 'atom_url'}\n\n def __init__(self, config):\n super(BlogExtension, self).__init__(config)\n self.posts = {}\n self.atom_metadata = {}\n self.atom_output = ''\n self.list_template = None\n self.list_output = None\n self._resolver = None\n self._should_generate = True\n\n def on_pre_composition(self, director):\n \"\"\"Check that all the required configuration exists.\"\"\"\n if not self._config.parser.has_section('blog'):\n raise AbortError(_(\n 'A blog section is missing in the configuration file.'))\n for metadata, option in self.required_metadata.items():\n self._add_atom_metadata(metadata, option)\n self.atom_output = self._get_option('atom_output')\n if self._config.parser.has_option('blog', 'list_template'):\n self.list_template = self._get_option('list_template')\n self.list_output = self._get_option('list_output')\n self._resolver = director.resolver\n\n def on_frontmatter_loaded(self, source_file, frontmatter):\n \"\"\"Record any new blog posts.\"\"\"\n if not self._is_post(frontmatter):\n return\n self._validate_post(source_file, frontmatter)\n post = BlogPost(date=frontmatter['date'], source_file=source_file,\n summary=frontmatter.get('summary', ''), title=frontmatter[\n 'title'], route=self._resolver.as_route(source_file), url=self.\n _resolver.as_url(source_file), posts=self.posts)\n frontmatter['post'] = post\n if post != self.posts.get(source_file):\n self.posts[source_file] = post\n self._should_generate = True\n\n def on_post_composition(self, director):\n \"\"\"Generate blog output.\"\"\"\n if not self._should_generate:\n return\n blog_posts = sorted(self.posts.values(), key=lambda p: p.date,\n reverse=True)\n self._generate_atom_feed(director, blog_posts)\n if self.list_template is not None:\n self._generate_list_page(director, blog_posts)\n self._should_generate = False\n\n def _is_post(self, frontmatter):\n \"\"\"Check if the front matter looks like a blog post.\"\"\"\n is_post = frontmatter.get('blog', False)\n if type(is_post) != bool:\n raise AbortError(_(\n 'Invalid blog frontmatter (expects True or False): {blog_value}'\n ).format(blog_value=is_post))\n return is_post\n\n def _validate_post(self, source_file, frontmatter):\n \"\"\"Validate that the post contains all the required fields.\"\"\"\n required = set(['date', 'title'])\n fields = set(frontmatter.keys())\n missing = required - fields\n if missing:\n raise AbortError(_(\n 'The blog post, {filename}, is missing required fields: {missing_fields}'\n .format(filename=source_file, missing_fields=', '.join(\n missing))))\n\n def _generate_atom_feed(self, director, blog_posts):\n \"\"\"Generate the atom feed.\"\"\"\n logger.info(_('Generating Atom XML feed ...'))\n builder = FeedBuilder(self.atom_metadata)\n builder.add(blog_posts)\n output_file = os.path.join(director.outdir, self.atom_output)\n builder.write_to(output_file)\n\n def _generate_list_page(self, director, blog_posts):\n \"\"\"Generate the list page.\"\"\"\n logger.info(_('Generating blog list page ...'))\n template = director.catalog.get_template(self.list_template)\n builder = ListPageBuilder(template)\n builder.add(blog_posts)\n output_file = os.path.join(director.outdir, self.list_output)\n builder.write_to(output_file)\n\n def _add_atom_metadata(self, name, option):\n \"\"\"Add atom metadata from the config parser.\"\"\"\n self.atom_metadata[name] = self._get_option(option)\n\n def _get_option(self, option):\n \"\"\"Get an option out of the blog section.\"\"\"\n try:\n return self._config.parser.get('blog', option)\n except configparser.NoOptionError:\n raise AbortError(_(\n 'The blog extension requires the {option} option.').format(\n option=option))\n\n\nclass BlogBuilder(object):\n \"\"\"A template pattern class for generating output related to a blog.\"\"\"\n\n def _generate_output(self):\n \"\"\"Generate output that belongs in the destination file.\n\n Subclasses must implement this method.\n \"\"\"\n raise NotImplementedError()\n\n def write_to(self, filepath):\n \"\"\"Write the output to the provided filepath.\"\"\"\n output = self._generate_output()\n with open(filepath, 'wb') as out:\n out.write(output.encode('utf-8'))\n out.write(b'<!-- handrolled for excellence -->\\n')\n\n\nclass FeedBuilder(BlogBuilder):\n \"\"\"Transform blog metadata and posts into an Atom feed.\"\"\"\n\n def __init__(self, metadata):\n self.metadata = metadata\n self._feed = AtomFeed(**metadata)\n\n def add(self, posts):\n \"\"\"Add blog posts to the feed.\"\"\"\n for post in posts:\n self._feed.add(FeedEntry(summary=post.summary, title=post.title,\n title_type='html', url=post.url, updated=post.date))\n\n def _generate_output(self):\n return self._feed.to_string()\n\n\nclass ListPageBuilder(BlogBuilder):\n \"\"\"Transform blog posts into a list page.\"\"\"\n\n def __init__(self, template):\n self._template = template\n self._blog_list = ''\n self._posts = None\n\n def add(self, posts):\n \"\"\"Add the posts and generate a blog list.\"\"\"\n li_html = []\n for post in posts:\n li_html.append(u'<li><a href=\"{route}\">{title}</a></li>'.format\n (route=post.route, title=post.title))\n self._blog_list = u'\\n'.join(li_html)\n self._posts = posts\n\n def _generate_output(self):\n context = {'blog_list': self._blog_list, 'posts': self._posts}\n return self._template.render(context)\n",
"<import token>\n\n\nclass BlogPost(object):\n\n def __init__(self, **kwargs):\n self.date = kwargs['date']\n self.source_file = kwargs['source_file']\n self.summary = smartypants.smartypants(kwargs['summary'])\n self.title = smartypants.smartypants(kwargs['title'])\n self.route = kwargs['route']\n self.url = kwargs['url']\n self._posts = kwargs['posts']\n <function token>\n\n def __lt__(self, other):\n return self.date < other.date\n\n def __ne__(self, other):\n return not self.__eq__(other)\n\n def __repr__(self):\n return 'BlogPost({}, {})'.format(self.source_file, self.date)\n\n @property\n def next(self):\n \"\"\"Get the next chronological blog post.\"\"\"\n posts_by_date = self.posts_by_date\n index = bisect.bisect_left(posts_by_date, self)\n if index + 1 == len(posts_by_date):\n return None\n return posts_by_date[index + 1]\n\n @property\n def previous(self):\n \"\"\"Get the previous chronological blog post.\"\"\"\n posts_by_date = self.posts_by_date\n index = bisect.bisect_left(posts_by_date, self)\n if index == 0:\n return None\n return posts_by_date[index - 1]\n\n @property\n def posts_by_date(self):\n return sorted(self._posts.values(), key=lambda p: p.date)\n\n\nclass BlogExtension(Extension):\n \"\"\"Track files marked as blog entries and generate a feed.\"\"\"\n handle_frontmatter_loaded = True\n handle_pre_composition = True\n handle_post_composition = True\n required_metadata = {'author': 'atom_author', 'id': 'atom_id', 'title':\n 'atom_title', 'url': 'atom_url'}\n\n def __init__(self, config):\n super(BlogExtension, self).__init__(config)\n self.posts = {}\n self.atom_metadata = {}\n self.atom_output = ''\n self.list_template = None\n self.list_output = None\n self._resolver = None\n self._should_generate = True\n\n def on_pre_composition(self, director):\n \"\"\"Check that all the required configuration exists.\"\"\"\n if not self._config.parser.has_section('blog'):\n raise AbortError(_(\n 'A blog section is missing in the configuration file.'))\n for metadata, option in self.required_metadata.items():\n self._add_atom_metadata(metadata, option)\n self.atom_output = self._get_option('atom_output')\n if self._config.parser.has_option('blog', 'list_template'):\n self.list_template = self._get_option('list_template')\n self.list_output = self._get_option('list_output')\n self._resolver = director.resolver\n\n def on_frontmatter_loaded(self, source_file, frontmatter):\n \"\"\"Record any new blog posts.\"\"\"\n if not self._is_post(frontmatter):\n return\n self._validate_post(source_file, frontmatter)\n post = BlogPost(date=frontmatter['date'], source_file=source_file,\n summary=frontmatter.get('summary', ''), title=frontmatter[\n 'title'], route=self._resolver.as_route(source_file), url=self.\n _resolver.as_url(source_file), posts=self.posts)\n frontmatter['post'] = post\n if post != self.posts.get(source_file):\n self.posts[source_file] = post\n self._should_generate = True\n\n def on_post_composition(self, director):\n \"\"\"Generate blog output.\"\"\"\n if not self._should_generate:\n return\n blog_posts = sorted(self.posts.values(), key=lambda p: p.date,\n reverse=True)\n self._generate_atom_feed(director, blog_posts)\n if self.list_template is not None:\n self._generate_list_page(director, blog_posts)\n self._should_generate = False\n\n def _is_post(self, frontmatter):\n \"\"\"Check if the front matter looks like a blog post.\"\"\"\n is_post = frontmatter.get('blog', False)\n if type(is_post) != bool:\n raise AbortError(_(\n 'Invalid blog frontmatter (expects True or False): {blog_value}'\n ).format(blog_value=is_post))\n return is_post\n\n def _validate_post(self, source_file, frontmatter):\n \"\"\"Validate that the post contains all the required fields.\"\"\"\n required = set(['date', 'title'])\n fields = set(frontmatter.keys())\n missing = required - fields\n if missing:\n raise AbortError(_(\n 'The blog post, {filename}, is missing required fields: {missing_fields}'\n .format(filename=source_file, missing_fields=', '.join(\n missing))))\n\n def _generate_atom_feed(self, director, blog_posts):\n \"\"\"Generate the atom feed.\"\"\"\n logger.info(_('Generating Atom XML feed ...'))\n builder = FeedBuilder(self.atom_metadata)\n builder.add(blog_posts)\n output_file = os.path.join(director.outdir, self.atom_output)\n builder.write_to(output_file)\n\n def _generate_list_page(self, director, blog_posts):\n \"\"\"Generate the list page.\"\"\"\n logger.info(_('Generating blog list page ...'))\n template = director.catalog.get_template(self.list_template)\n builder = ListPageBuilder(template)\n builder.add(blog_posts)\n output_file = os.path.join(director.outdir, self.list_output)\n builder.write_to(output_file)\n\n def _add_atom_metadata(self, name, option):\n \"\"\"Add atom metadata from the config parser.\"\"\"\n self.atom_metadata[name] = self._get_option(option)\n\n def _get_option(self, option):\n \"\"\"Get an option out of the blog section.\"\"\"\n try:\n return self._config.parser.get('blog', option)\n except configparser.NoOptionError:\n raise AbortError(_(\n 'The blog extension requires the {option} option.').format(\n option=option))\n\n\nclass BlogBuilder(object):\n \"\"\"A template pattern class for generating output related to a blog.\"\"\"\n\n def _generate_output(self):\n \"\"\"Generate output that belongs in the destination file.\n\n Subclasses must implement this method.\n \"\"\"\n raise NotImplementedError()\n\n def write_to(self, filepath):\n \"\"\"Write the output to the provided filepath.\"\"\"\n output = self._generate_output()\n with open(filepath, 'wb') as out:\n out.write(output.encode('utf-8'))\n out.write(b'<!-- handrolled for excellence -->\\n')\n\n\nclass FeedBuilder(BlogBuilder):\n \"\"\"Transform blog metadata and posts into an Atom feed.\"\"\"\n\n def __init__(self, metadata):\n self.metadata = metadata\n self._feed = AtomFeed(**metadata)\n\n def add(self, posts):\n \"\"\"Add blog posts to the feed.\"\"\"\n for post in posts:\n self._feed.add(FeedEntry(summary=post.summary, title=post.title,\n title_type='html', url=post.url, updated=post.date))\n\n def _generate_output(self):\n return self._feed.to_string()\n\n\nclass ListPageBuilder(BlogBuilder):\n \"\"\"Transform blog posts into a list page.\"\"\"\n\n def __init__(self, template):\n self._template = template\n self._blog_list = ''\n self._posts = None\n\n def add(self, posts):\n \"\"\"Add the posts and generate a blog list.\"\"\"\n li_html = []\n for post in posts:\n li_html.append(u'<li><a href=\"{route}\">{title}</a></li>'.format\n (route=post.route, title=post.title))\n self._blog_list = u'\\n'.join(li_html)\n self._posts = posts\n\n def _generate_output(self):\n context = {'blog_list': self._blog_list, 'posts': self._posts}\n return self._template.render(context)\n",
"<import token>\n\n\nclass BlogPost(object):\n <function token>\n <function token>\n\n def __lt__(self, other):\n return self.date < other.date\n\n def __ne__(self, other):\n return not self.__eq__(other)\n\n def __repr__(self):\n return 'BlogPost({}, {})'.format(self.source_file, self.date)\n\n @property\n def next(self):\n \"\"\"Get the next chronological blog post.\"\"\"\n posts_by_date = self.posts_by_date\n index = bisect.bisect_left(posts_by_date, self)\n if index + 1 == len(posts_by_date):\n return None\n return posts_by_date[index + 1]\n\n @property\n def previous(self):\n \"\"\"Get the previous chronological blog post.\"\"\"\n posts_by_date = self.posts_by_date\n index = bisect.bisect_left(posts_by_date, self)\n if index == 0:\n return None\n return posts_by_date[index - 1]\n\n @property\n def posts_by_date(self):\n return sorted(self._posts.values(), key=lambda p: p.date)\n\n\nclass BlogExtension(Extension):\n \"\"\"Track files marked as blog entries and generate a feed.\"\"\"\n handle_frontmatter_loaded = True\n handle_pre_composition = True\n handle_post_composition = True\n required_metadata = {'author': 'atom_author', 'id': 'atom_id', 'title':\n 'atom_title', 'url': 'atom_url'}\n\n def __init__(self, config):\n super(BlogExtension, self).__init__(config)\n self.posts = {}\n self.atom_metadata = {}\n self.atom_output = ''\n self.list_template = None\n self.list_output = None\n self._resolver = None\n self._should_generate = True\n\n def on_pre_composition(self, director):\n \"\"\"Check that all the required configuration exists.\"\"\"\n if not self._config.parser.has_section('blog'):\n raise AbortError(_(\n 'A blog section is missing in the configuration file.'))\n for metadata, option in self.required_metadata.items():\n self._add_atom_metadata(metadata, option)\n self.atom_output = self._get_option('atom_output')\n if self._config.parser.has_option('blog', 'list_template'):\n self.list_template = self._get_option('list_template')\n self.list_output = self._get_option('list_output')\n self._resolver = director.resolver\n\n def on_frontmatter_loaded(self, source_file, frontmatter):\n \"\"\"Record any new blog posts.\"\"\"\n if not self._is_post(frontmatter):\n return\n self._validate_post(source_file, frontmatter)\n post = BlogPost(date=frontmatter['date'], source_file=source_file,\n summary=frontmatter.get('summary', ''), title=frontmatter[\n 'title'], route=self._resolver.as_route(source_file), url=self.\n _resolver.as_url(source_file), posts=self.posts)\n frontmatter['post'] = post\n if post != self.posts.get(source_file):\n self.posts[source_file] = post\n self._should_generate = True\n\n def on_post_composition(self, director):\n \"\"\"Generate blog output.\"\"\"\n if not self._should_generate:\n return\n blog_posts = sorted(self.posts.values(), key=lambda p: p.date,\n reverse=True)\n self._generate_atom_feed(director, blog_posts)\n if self.list_template is not None:\n self._generate_list_page(director, blog_posts)\n self._should_generate = False\n\n def _is_post(self, frontmatter):\n \"\"\"Check if the front matter looks like a blog post.\"\"\"\n is_post = frontmatter.get('blog', False)\n if type(is_post) != bool:\n raise AbortError(_(\n 'Invalid blog frontmatter (expects True or False): {blog_value}'\n ).format(blog_value=is_post))\n return is_post\n\n def _validate_post(self, source_file, frontmatter):\n \"\"\"Validate that the post contains all the required fields.\"\"\"\n required = set(['date', 'title'])\n fields = set(frontmatter.keys())\n missing = required - fields\n if missing:\n raise AbortError(_(\n 'The blog post, {filename}, is missing required fields: {missing_fields}'\n .format(filename=source_file, missing_fields=', '.join(\n missing))))\n\n def _generate_atom_feed(self, director, blog_posts):\n \"\"\"Generate the atom feed.\"\"\"\n logger.info(_('Generating Atom XML feed ...'))\n builder = FeedBuilder(self.atom_metadata)\n builder.add(blog_posts)\n output_file = os.path.join(director.outdir, self.atom_output)\n builder.write_to(output_file)\n\n def _generate_list_page(self, director, blog_posts):\n \"\"\"Generate the list page.\"\"\"\n logger.info(_('Generating blog list page ...'))\n template = director.catalog.get_template(self.list_template)\n builder = ListPageBuilder(template)\n builder.add(blog_posts)\n output_file = os.path.join(director.outdir, self.list_output)\n builder.write_to(output_file)\n\n def _add_atom_metadata(self, name, option):\n \"\"\"Add atom metadata from the config parser.\"\"\"\n self.atom_metadata[name] = self._get_option(option)\n\n def _get_option(self, option):\n \"\"\"Get an option out of the blog section.\"\"\"\n try:\n return self._config.parser.get('blog', option)\n except configparser.NoOptionError:\n raise AbortError(_(\n 'The blog extension requires the {option} option.').format(\n option=option))\n\n\nclass BlogBuilder(object):\n \"\"\"A template pattern class for generating output related to a blog.\"\"\"\n\n def _generate_output(self):\n \"\"\"Generate output that belongs in the destination file.\n\n Subclasses must implement this method.\n \"\"\"\n raise NotImplementedError()\n\n def write_to(self, filepath):\n \"\"\"Write the output to the provided filepath.\"\"\"\n output = self._generate_output()\n with open(filepath, 'wb') as out:\n out.write(output.encode('utf-8'))\n out.write(b'<!-- handrolled for excellence -->\\n')\n\n\nclass FeedBuilder(BlogBuilder):\n \"\"\"Transform blog metadata and posts into an Atom feed.\"\"\"\n\n def __init__(self, metadata):\n self.metadata = metadata\n self._feed = AtomFeed(**metadata)\n\n def add(self, posts):\n \"\"\"Add blog posts to the feed.\"\"\"\n for post in posts:\n self._feed.add(FeedEntry(summary=post.summary, title=post.title,\n title_type='html', url=post.url, updated=post.date))\n\n def _generate_output(self):\n return self._feed.to_string()\n\n\nclass ListPageBuilder(BlogBuilder):\n \"\"\"Transform blog posts into a list page.\"\"\"\n\n def __init__(self, template):\n self._template = template\n self._blog_list = ''\n self._posts = None\n\n def add(self, posts):\n \"\"\"Add the posts and generate a blog list.\"\"\"\n li_html = []\n for post in posts:\n li_html.append(u'<li><a href=\"{route}\">{title}</a></li>'.format\n (route=post.route, title=post.title))\n self._blog_list = u'\\n'.join(li_html)\n self._posts = posts\n\n def _generate_output(self):\n context = {'blog_list': self._blog_list, 'posts': self._posts}\n return self._template.render(context)\n",
"<import token>\n\n\nclass BlogPost(object):\n <function token>\n <function token>\n\n def __lt__(self, other):\n return self.date < other.date\n\n def __ne__(self, other):\n return not self.__eq__(other)\n\n def __repr__(self):\n return 'BlogPost({}, {})'.format(self.source_file, self.date)\n <function token>\n\n @property\n def previous(self):\n \"\"\"Get the previous chronological blog post.\"\"\"\n posts_by_date = self.posts_by_date\n index = bisect.bisect_left(posts_by_date, self)\n if index == 0:\n return None\n return posts_by_date[index - 1]\n\n @property\n def posts_by_date(self):\n return sorted(self._posts.values(), key=lambda p: p.date)\n\n\nclass BlogExtension(Extension):\n \"\"\"Track files marked as blog entries and generate a feed.\"\"\"\n handle_frontmatter_loaded = True\n handle_pre_composition = True\n handle_post_composition = True\n required_metadata = {'author': 'atom_author', 'id': 'atom_id', 'title':\n 'atom_title', 'url': 'atom_url'}\n\n def __init__(self, config):\n super(BlogExtension, self).__init__(config)\n self.posts = {}\n self.atom_metadata = {}\n self.atom_output = ''\n self.list_template = None\n self.list_output = None\n self._resolver = None\n self._should_generate = True\n\n def on_pre_composition(self, director):\n \"\"\"Check that all the required configuration exists.\"\"\"\n if not self._config.parser.has_section('blog'):\n raise AbortError(_(\n 'A blog section is missing in the configuration file.'))\n for metadata, option in self.required_metadata.items():\n self._add_atom_metadata(metadata, option)\n self.atom_output = self._get_option('atom_output')\n if self._config.parser.has_option('blog', 'list_template'):\n self.list_template = self._get_option('list_template')\n self.list_output = self._get_option('list_output')\n self._resolver = director.resolver\n\n def on_frontmatter_loaded(self, source_file, frontmatter):\n \"\"\"Record any new blog posts.\"\"\"\n if not self._is_post(frontmatter):\n return\n self._validate_post(source_file, frontmatter)\n post = BlogPost(date=frontmatter['date'], source_file=source_file,\n summary=frontmatter.get('summary', ''), title=frontmatter[\n 'title'], route=self._resolver.as_route(source_file), url=self.\n _resolver.as_url(source_file), posts=self.posts)\n frontmatter['post'] = post\n if post != self.posts.get(source_file):\n self.posts[source_file] = post\n self._should_generate = True\n\n def on_post_composition(self, director):\n \"\"\"Generate blog output.\"\"\"\n if not self._should_generate:\n return\n blog_posts = sorted(self.posts.values(), key=lambda p: p.date,\n reverse=True)\n self._generate_atom_feed(director, blog_posts)\n if self.list_template is not None:\n self._generate_list_page(director, blog_posts)\n self._should_generate = False\n\n def _is_post(self, frontmatter):\n \"\"\"Check if the front matter looks like a blog post.\"\"\"\n is_post = frontmatter.get('blog', False)\n if type(is_post) != bool:\n raise AbortError(_(\n 'Invalid blog frontmatter (expects True or False): {blog_value}'\n ).format(blog_value=is_post))\n return is_post\n\n def _validate_post(self, source_file, frontmatter):\n \"\"\"Validate that the post contains all the required fields.\"\"\"\n required = set(['date', 'title'])\n fields = set(frontmatter.keys())\n missing = required - fields\n if missing:\n raise AbortError(_(\n 'The blog post, {filename}, is missing required fields: {missing_fields}'\n .format(filename=source_file, missing_fields=', '.join(\n missing))))\n\n def _generate_atom_feed(self, director, blog_posts):\n \"\"\"Generate the atom feed.\"\"\"\n logger.info(_('Generating Atom XML feed ...'))\n builder = FeedBuilder(self.atom_metadata)\n builder.add(blog_posts)\n output_file = os.path.join(director.outdir, self.atom_output)\n builder.write_to(output_file)\n\n def _generate_list_page(self, director, blog_posts):\n \"\"\"Generate the list page.\"\"\"\n logger.info(_('Generating blog list page ...'))\n template = director.catalog.get_template(self.list_template)\n builder = ListPageBuilder(template)\n builder.add(blog_posts)\n output_file = os.path.join(director.outdir, self.list_output)\n builder.write_to(output_file)\n\n def _add_atom_metadata(self, name, option):\n \"\"\"Add atom metadata from the config parser.\"\"\"\n self.atom_metadata[name] = self._get_option(option)\n\n def _get_option(self, option):\n \"\"\"Get an option out of the blog section.\"\"\"\n try:\n return self._config.parser.get('blog', option)\n except configparser.NoOptionError:\n raise AbortError(_(\n 'The blog extension requires the {option} option.').format(\n option=option))\n\n\nclass BlogBuilder(object):\n \"\"\"A template pattern class for generating output related to a blog.\"\"\"\n\n def _generate_output(self):\n \"\"\"Generate output that belongs in the destination file.\n\n Subclasses must implement this method.\n \"\"\"\n raise NotImplementedError()\n\n def write_to(self, filepath):\n \"\"\"Write the output to the provided filepath.\"\"\"\n output = self._generate_output()\n with open(filepath, 'wb') as out:\n out.write(output.encode('utf-8'))\n out.write(b'<!-- handrolled for excellence -->\\n')\n\n\nclass FeedBuilder(BlogBuilder):\n \"\"\"Transform blog metadata and posts into an Atom feed.\"\"\"\n\n def __init__(self, metadata):\n self.metadata = metadata\n self._feed = AtomFeed(**metadata)\n\n def add(self, posts):\n \"\"\"Add blog posts to the feed.\"\"\"\n for post in posts:\n self._feed.add(FeedEntry(summary=post.summary, title=post.title,\n title_type='html', url=post.url, updated=post.date))\n\n def _generate_output(self):\n return self._feed.to_string()\n\n\nclass ListPageBuilder(BlogBuilder):\n \"\"\"Transform blog posts into a list page.\"\"\"\n\n def __init__(self, template):\n self._template = template\n self._blog_list = ''\n self._posts = None\n\n def add(self, posts):\n \"\"\"Add the posts and generate a blog list.\"\"\"\n li_html = []\n for post in posts:\n li_html.append(u'<li><a href=\"{route}\">{title}</a></li>'.format\n (route=post.route, title=post.title))\n self._blog_list = u'\\n'.join(li_html)\n self._posts = posts\n\n def _generate_output(self):\n context = {'blog_list': self._blog_list, 'posts': self._posts}\n return self._template.render(context)\n",
"<import token>\n\n\nclass BlogPost(object):\n <function token>\n <function token>\n\n def __lt__(self, other):\n return self.date < other.date\n\n def __ne__(self, other):\n return not self.__eq__(other)\n\n def __repr__(self):\n return 'BlogPost({}, {})'.format(self.source_file, self.date)\n <function token>\n <function token>\n\n @property\n def posts_by_date(self):\n return sorted(self._posts.values(), key=lambda p: p.date)\n\n\nclass BlogExtension(Extension):\n \"\"\"Track files marked as blog entries and generate a feed.\"\"\"\n handle_frontmatter_loaded = True\n handle_pre_composition = True\n handle_post_composition = True\n required_metadata = {'author': 'atom_author', 'id': 'atom_id', 'title':\n 'atom_title', 'url': 'atom_url'}\n\n def __init__(self, config):\n super(BlogExtension, self).__init__(config)\n self.posts = {}\n self.atom_metadata = {}\n self.atom_output = ''\n self.list_template = None\n self.list_output = None\n self._resolver = None\n self._should_generate = True\n\n def on_pre_composition(self, director):\n \"\"\"Check that all the required configuration exists.\"\"\"\n if not self._config.parser.has_section('blog'):\n raise AbortError(_(\n 'A blog section is missing in the configuration file.'))\n for metadata, option in self.required_metadata.items():\n self._add_atom_metadata(metadata, option)\n self.atom_output = self._get_option('atom_output')\n if self._config.parser.has_option('blog', 'list_template'):\n self.list_template = self._get_option('list_template')\n self.list_output = self._get_option('list_output')\n self._resolver = director.resolver\n\n def on_frontmatter_loaded(self, source_file, frontmatter):\n \"\"\"Record any new blog posts.\"\"\"\n if not self._is_post(frontmatter):\n return\n self._validate_post(source_file, frontmatter)\n post = BlogPost(date=frontmatter['date'], source_file=source_file,\n summary=frontmatter.get('summary', ''), title=frontmatter[\n 'title'], route=self._resolver.as_route(source_file), url=self.\n _resolver.as_url(source_file), posts=self.posts)\n frontmatter['post'] = post\n if post != self.posts.get(source_file):\n self.posts[source_file] = post\n self._should_generate = True\n\n def on_post_composition(self, director):\n \"\"\"Generate blog output.\"\"\"\n if not self._should_generate:\n return\n blog_posts = sorted(self.posts.values(), key=lambda p: p.date,\n reverse=True)\n self._generate_atom_feed(director, blog_posts)\n if self.list_template is not None:\n self._generate_list_page(director, blog_posts)\n self._should_generate = False\n\n def _is_post(self, frontmatter):\n \"\"\"Check if the front matter looks like a blog post.\"\"\"\n is_post = frontmatter.get('blog', False)\n if type(is_post) != bool:\n raise AbortError(_(\n 'Invalid blog frontmatter (expects True or False): {blog_value}'\n ).format(blog_value=is_post))\n return is_post\n\n def _validate_post(self, source_file, frontmatter):\n \"\"\"Validate that the post contains all the required fields.\"\"\"\n required = set(['date', 'title'])\n fields = set(frontmatter.keys())\n missing = required - fields\n if missing:\n raise AbortError(_(\n 'The blog post, {filename}, is missing required fields: {missing_fields}'\n .format(filename=source_file, missing_fields=', '.join(\n missing))))\n\n def _generate_atom_feed(self, director, blog_posts):\n \"\"\"Generate the atom feed.\"\"\"\n logger.info(_('Generating Atom XML feed ...'))\n builder = FeedBuilder(self.atom_metadata)\n builder.add(blog_posts)\n output_file = os.path.join(director.outdir, self.atom_output)\n builder.write_to(output_file)\n\n def _generate_list_page(self, director, blog_posts):\n \"\"\"Generate the list page.\"\"\"\n logger.info(_('Generating blog list page ...'))\n template = director.catalog.get_template(self.list_template)\n builder = ListPageBuilder(template)\n builder.add(blog_posts)\n output_file = os.path.join(director.outdir, self.list_output)\n builder.write_to(output_file)\n\n def _add_atom_metadata(self, name, option):\n \"\"\"Add atom metadata from the config parser.\"\"\"\n self.atom_metadata[name] = self._get_option(option)\n\n def _get_option(self, option):\n \"\"\"Get an option out of the blog section.\"\"\"\n try:\n return self._config.parser.get('blog', option)\n except configparser.NoOptionError:\n raise AbortError(_(\n 'The blog extension requires the {option} option.').format(\n option=option))\n\n\nclass BlogBuilder(object):\n \"\"\"A template pattern class for generating output related to a blog.\"\"\"\n\n def _generate_output(self):\n \"\"\"Generate output that belongs in the destination file.\n\n Subclasses must implement this method.\n \"\"\"\n raise NotImplementedError()\n\n def write_to(self, filepath):\n \"\"\"Write the output to the provided filepath.\"\"\"\n output = self._generate_output()\n with open(filepath, 'wb') as out:\n out.write(output.encode('utf-8'))\n out.write(b'<!-- handrolled for excellence -->\\n')\n\n\nclass FeedBuilder(BlogBuilder):\n \"\"\"Transform blog metadata and posts into an Atom feed.\"\"\"\n\n def __init__(self, metadata):\n self.metadata = metadata\n self._feed = AtomFeed(**metadata)\n\n def add(self, posts):\n \"\"\"Add blog posts to the feed.\"\"\"\n for post in posts:\n self._feed.add(FeedEntry(summary=post.summary, title=post.title,\n title_type='html', url=post.url, updated=post.date))\n\n def _generate_output(self):\n return self._feed.to_string()\n\n\nclass ListPageBuilder(BlogBuilder):\n \"\"\"Transform blog posts into a list page.\"\"\"\n\n def __init__(self, template):\n self._template = template\n self._blog_list = ''\n self._posts = None\n\n def add(self, posts):\n \"\"\"Add the posts and generate a blog list.\"\"\"\n li_html = []\n for post in posts:\n li_html.append(u'<li><a href=\"{route}\">{title}</a></li>'.format\n (route=post.route, title=post.title))\n self._blog_list = u'\\n'.join(li_html)\n self._posts = posts\n\n def _generate_output(self):\n context = {'blog_list': self._blog_list, 'posts': self._posts}\n return self._template.render(context)\n",
"<import token>\n\n\nclass BlogPost(object):\n <function token>\n <function token>\n\n def __lt__(self, other):\n return self.date < other.date\n <function token>\n\n def __repr__(self):\n return 'BlogPost({}, {})'.format(self.source_file, self.date)\n <function token>\n <function token>\n\n @property\n def posts_by_date(self):\n return sorted(self._posts.values(), key=lambda p: p.date)\n\n\nclass BlogExtension(Extension):\n \"\"\"Track files marked as blog entries and generate a feed.\"\"\"\n handle_frontmatter_loaded = True\n handle_pre_composition = True\n handle_post_composition = True\n required_metadata = {'author': 'atom_author', 'id': 'atom_id', 'title':\n 'atom_title', 'url': 'atom_url'}\n\n def __init__(self, config):\n super(BlogExtension, self).__init__(config)\n self.posts = {}\n self.atom_metadata = {}\n self.atom_output = ''\n self.list_template = None\n self.list_output = None\n self._resolver = None\n self._should_generate = True\n\n def on_pre_composition(self, director):\n \"\"\"Check that all the required configuration exists.\"\"\"\n if not self._config.parser.has_section('blog'):\n raise AbortError(_(\n 'A blog section is missing in the configuration file.'))\n for metadata, option in self.required_metadata.items():\n self._add_atom_metadata(metadata, option)\n self.atom_output = self._get_option('atom_output')\n if self._config.parser.has_option('blog', 'list_template'):\n self.list_template = self._get_option('list_template')\n self.list_output = self._get_option('list_output')\n self._resolver = director.resolver\n\n def on_frontmatter_loaded(self, source_file, frontmatter):\n \"\"\"Record any new blog posts.\"\"\"\n if not self._is_post(frontmatter):\n return\n self._validate_post(source_file, frontmatter)\n post = BlogPost(date=frontmatter['date'], source_file=source_file,\n summary=frontmatter.get('summary', ''), title=frontmatter[\n 'title'], route=self._resolver.as_route(source_file), url=self.\n _resolver.as_url(source_file), posts=self.posts)\n frontmatter['post'] = post\n if post != self.posts.get(source_file):\n self.posts[source_file] = post\n self._should_generate = True\n\n def on_post_composition(self, director):\n \"\"\"Generate blog output.\"\"\"\n if not self._should_generate:\n return\n blog_posts = sorted(self.posts.values(), key=lambda p: p.date,\n reverse=True)\n self._generate_atom_feed(director, blog_posts)\n if self.list_template is not None:\n self._generate_list_page(director, blog_posts)\n self._should_generate = False\n\n def _is_post(self, frontmatter):\n \"\"\"Check if the front matter looks like a blog post.\"\"\"\n is_post = frontmatter.get('blog', False)\n if type(is_post) != bool:\n raise AbortError(_(\n 'Invalid blog frontmatter (expects True or False): {blog_value}'\n ).format(blog_value=is_post))\n return is_post\n\n def _validate_post(self, source_file, frontmatter):\n \"\"\"Validate that the post contains all the required fields.\"\"\"\n required = set(['date', 'title'])\n fields = set(frontmatter.keys())\n missing = required - fields\n if missing:\n raise AbortError(_(\n 'The blog post, {filename}, is missing required fields: {missing_fields}'\n .format(filename=source_file, missing_fields=', '.join(\n missing))))\n\n def _generate_atom_feed(self, director, blog_posts):\n \"\"\"Generate the atom feed.\"\"\"\n logger.info(_('Generating Atom XML feed ...'))\n builder = FeedBuilder(self.atom_metadata)\n builder.add(blog_posts)\n output_file = os.path.join(director.outdir, self.atom_output)\n builder.write_to(output_file)\n\n def _generate_list_page(self, director, blog_posts):\n \"\"\"Generate the list page.\"\"\"\n logger.info(_('Generating blog list page ...'))\n template = director.catalog.get_template(self.list_template)\n builder = ListPageBuilder(template)\n builder.add(blog_posts)\n output_file = os.path.join(director.outdir, self.list_output)\n builder.write_to(output_file)\n\n def _add_atom_metadata(self, name, option):\n \"\"\"Add atom metadata from the config parser.\"\"\"\n self.atom_metadata[name] = self._get_option(option)\n\n def _get_option(self, option):\n \"\"\"Get an option out of the blog section.\"\"\"\n try:\n return self._config.parser.get('blog', option)\n except configparser.NoOptionError:\n raise AbortError(_(\n 'The blog extension requires the {option} option.').format(\n option=option))\n\n\nclass BlogBuilder(object):\n \"\"\"A template pattern class for generating output related to a blog.\"\"\"\n\n def _generate_output(self):\n \"\"\"Generate output that belongs in the destination file.\n\n Subclasses must implement this method.\n \"\"\"\n raise NotImplementedError()\n\n def write_to(self, filepath):\n \"\"\"Write the output to the provided filepath.\"\"\"\n output = self._generate_output()\n with open(filepath, 'wb') as out:\n out.write(output.encode('utf-8'))\n out.write(b'<!-- handrolled for excellence -->\\n')\n\n\nclass FeedBuilder(BlogBuilder):\n \"\"\"Transform blog metadata and posts into an Atom feed.\"\"\"\n\n def __init__(self, metadata):\n self.metadata = metadata\n self._feed = AtomFeed(**metadata)\n\n def add(self, posts):\n \"\"\"Add blog posts to the feed.\"\"\"\n for post in posts:\n self._feed.add(FeedEntry(summary=post.summary, title=post.title,\n title_type='html', url=post.url, updated=post.date))\n\n def _generate_output(self):\n return self._feed.to_string()\n\n\nclass ListPageBuilder(BlogBuilder):\n \"\"\"Transform blog posts into a list page.\"\"\"\n\n def __init__(self, template):\n self._template = template\n self._blog_list = ''\n self._posts = None\n\n def add(self, posts):\n \"\"\"Add the posts and generate a blog list.\"\"\"\n li_html = []\n for post in posts:\n li_html.append(u'<li><a href=\"{route}\">{title}</a></li>'.format\n (route=post.route, title=post.title))\n self._blog_list = u'\\n'.join(li_html)\n self._posts = posts\n\n def _generate_output(self):\n context = {'blog_list': self._blog_list, 'posts': self._posts}\n return self._template.render(context)\n",
"<import token>\n\n\nclass BlogPost(object):\n <function token>\n <function token>\n\n def __lt__(self, other):\n return self.date < other.date\n <function token>\n\n def __repr__(self):\n return 'BlogPost({}, {})'.format(self.source_file, self.date)\n <function token>\n <function token>\n <function token>\n\n\nclass BlogExtension(Extension):\n \"\"\"Track files marked as blog entries and generate a feed.\"\"\"\n handle_frontmatter_loaded = True\n handle_pre_composition = True\n handle_post_composition = True\n required_metadata = {'author': 'atom_author', 'id': 'atom_id', 'title':\n 'atom_title', 'url': 'atom_url'}\n\n def __init__(self, config):\n super(BlogExtension, self).__init__(config)\n self.posts = {}\n self.atom_metadata = {}\n self.atom_output = ''\n self.list_template = None\n self.list_output = None\n self._resolver = None\n self._should_generate = True\n\n def on_pre_composition(self, director):\n \"\"\"Check that all the required configuration exists.\"\"\"\n if not self._config.parser.has_section('blog'):\n raise AbortError(_(\n 'A blog section is missing in the configuration file.'))\n for metadata, option in self.required_metadata.items():\n self._add_atom_metadata(metadata, option)\n self.atom_output = self._get_option('atom_output')\n if self._config.parser.has_option('blog', 'list_template'):\n self.list_template = self._get_option('list_template')\n self.list_output = self._get_option('list_output')\n self._resolver = director.resolver\n\n def on_frontmatter_loaded(self, source_file, frontmatter):\n \"\"\"Record any new blog posts.\"\"\"\n if not self._is_post(frontmatter):\n return\n self._validate_post(source_file, frontmatter)\n post = BlogPost(date=frontmatter['date'], source_file=source_file,\n summary=frontmatter.get('summary', ''), title=frontmatter[\n 'title'], route=self._resolver.as_route(source_file), url=self.\n _resolver.as_url(source_file), posts=self.posts)\n frontmatter['post'] = post\n if post != self.posts.get(source_file):\n self.posts[source_file] = post\n self._should_generate = True\n\n def on_post_composition(self, director):\n \"\"\"Generate blog output.\"\"\"\n if not self._should_generate:\n return\n blog_posts = sorted(self.posts.values(), key=lambda p: p.date,\n reverse=True)\n self._generate_atom_feed(director, blog_posts)\n if self.list_template is not None:\n self._generate_list_page(director, blog_posts)\n self._should_generate = False\n\n def _is_post(self, frontmatter):\n \"\"\"Check if the front matter looks like a blog post.\"\"\"\n is_post = frontmatter.get('blog', False)\n if type(is_post) != bool:\n raise AbortError(_(\n 'Invalid blog frontmatter (expects True or False): {blog_value}'\n ).format(blog_value=is_post))\n return is_post\n\n def _validate_post(self, source_file, frontmatter):\n \"\"\"Validate that the post contains all the required fields.\"\"\"\n required = set(['date', 'title'])\n fields = set(frontmatter.keys())\n missing = required - fields\n if missing:\n raise AbortError(_(\n 'The blog post, {filename}, is missing required fields: {missing_fields}'\n .format(filename=source_file, missing_fields=', '.join(\n missing))))\n\n def _generate_atom_feed(self, director, blog_posts):\n \"\"\"Generate the atom feed.\"\"\"\n logger.info(_('Generating Atom XML feed ...'))\n builder = FeedBuilder(self.atom_metadata)\n builder.add(blog_posts)\n output_file = os.path.join(director.outdir, self.atom_output)\n builder.write_to(output_file)\n\n def _generate_list_page(self, director, blog_posts):\n \"\"\"Generate the list page.\"\"\"\n logger.info(_('Generating blog list page ...'))\n template = director.catalog.get_template(self.list_template)\n builder = ListPageBuilder(template)\n builder.add(blog_posts)\n output_file = os.path.join(director.outdir, self.list_output)\n builder.write_to(output_file)\n\n def _add_atom_metadata(self, name, option):\n \"\"\"Add atom metadata from the config parser.\"\"\"\n self.atom_metadata[name] = self._get_option(option)\n\n def _get_option(self, option):\n \"\"\"Get an option out of the blog section.\"\"\"\n try:\n return self._config.parser.get('blog', option)\n except configparser.NoOptionError:\n raise AbortError(_(\n 'The blog extension requires the {option} option.').format(\n option=option))\n\n\nclass BlogBuilder(object):\n \"\"\"A template pattern class for generating output related to a blog.\"\"\"\n\n def _generate_output(self):\n \"\"\"Generate output that belongs in the destination file.\n\n Subclasses must implement this method.\n \"\"\"\n raise NotImplementedError()\n\n def write_to(self, filepath):\n \"\"\"Write the output to the provided filepath.\"\"\"\n output = self._generate_output()\n with open(filepath, 'wb') as out:\n out.write(output.encode('utf-8'))\n out.write(b'<!-- handrolled for excellence -->\\n')\n\n\nclass FeedBuilder(BlogBuilder):\n \"\"\"Transform blog metadata and posts into an Atom feed.\"\"\"\n\n def __init__(self, metadata):\n self.metadata = metadata\n self._feed = AtomFeed(**metadata)\n\n def add(self, posts):\n \"\"\"Add blog posts to the feed.\"\"\"\n for post in posts:\n self._feed.add(FeedEntry(summary=post.summary, title=post.title,\n title_type='html', url=post.url, updated=post.date))\n\n def _generate_output(self):\n return self._feed.to_string()\n\n\nclass ListPageBuilder(BlogBuilder):\n \"\"\"Transform blog posts into a list page.\"\"\"\n\n def __init__(self, template):\n self._template = template\n self._blog_list = ''\n self._posts = None\n\n def add(self, posts):\n \"\"\"Add the posts and generate a blog list.\"\"\"\n li_html = []\n for post in posts:\n li_html.append(u'<li><a href=\"{route}\">{title}</a></li>'.format\n (route=post.route, title=post.title))\n self._blog_list = u'\\n'.join(li_html)\n self._posts = posts\n\n def _generate_output(self):\n context = {'blog_list': self._blog_list, 'posts': self._posts}\n return self._template.render(context)\n",
"<import token>\n\n\nclass BlogPost(object):\n <function token>\n <function token>\n <function token>\n <function token>\n\n def __repr__(self):\n return 'BlogPost({}, {})'.format(self.source_file, self.date)\n <function token>\n <function token>\n <function token>\n\n\nclass BlogExtension(Extension):\n \"\"\"Track files marked as blog entries and generate a feed.\"\"\"\n handle_frontmatter_loaded = True\n handle_pre_composition = True\n handle_post_composition = True\n required_metadata = {'author': 'atom_author', 'id': 'atom_id', 'title':\n 'atom_title', 'url': 'atom_url'}\n\n def __init__(self, config):\n super(BlogExtension, self).__init__(config)\n self.posts = {}\n self.atom_metadata = {}\n self.atom_output = ''\n self.list_template = None\n self.list_output = None\n self._resolver = None\n self._should_generate = True\n\n def on_pre_composition(self, director):\n \"\"\"Check that all the required configuration exists.\"\"\"\n if not self._config.parser.has_section('blog'):\n raise AbortError(_(\n 'A blog section is missing in the configuration file.'))\n for metadata, option in self.required_metadata.items():\n self._add_atom_metadata(metadata, option)\n self.atom_output = self._get_option('atom_output')\n if self._config.parser.has_option('blog', 'list_template'):\n self.list_template = self._get_option('list_template')\n self.list_output = self._get_option('list_output')\n self._resolver = director.resolver\n\n def on_frontmatter_loaded(self, source_file, frontmatter):\n \"\"\"Record any new blog posts.\"\"\"\n if not self._is_post(frontmatter):\n return\n self._validate_post(source_file, frontmatter)\n post = BlogPost(date=frontmatter['date'], source_file=source_file,\n summary=frontmatter.get('summary', ''), title=frontmatter[\n 'title'], route=self._resolver.as_route(source_file), url=self.\n _resolver.as_url(source_file), posts=self.posts)\n frontmatter['post'] = post\n if post != self.posts.get(source_file):\n self.posts[source_file] = post\n self._should_generate = True\n\n def on_post_composition(self, director):\n \"\"\"Generate blog output.\"\"\"\n if not self._should_generate:\n return\n blog_posts = sorted(self.posts.values(), key=lambda p: p.date,\n reverse=True)\n self._generate_atom_feed(director, blog_posts)\n if self.list_template is not None:\n self._generate_list_page(director, blog_posts)\n self._should_generate = False\n\n def _is_post(self, frontmatter):\n \"\"\"Check if the front matter looks like a blog post.\"\"\"\n is_post = frontmatter.get('blog', False)\n if type(is_post) != bool:\n raise AbortError(_(\n 'Invalid blog frontmatter (expects True or False): {blog_value}'\n ).format(blog_value=is_post))\n return is_post\n\n def _validate_post(self, source_file, frontmatter):\n \"\"\"Validate that the post contains all the required fields.\"\"\"\n required = set(['date', 'title'])\n fields = set(frontmatter.keys())\n missing = required - fields\n if missing:\n raise AbortError(_(\n 'The blog post, {filename}, is missing required fields: {missing_fields}'\n .format(filename=source_file, missing_fields=', '.join(\n missing))))\n\n def _generate_atom_feed(self, director, blog_posts):\n \"\"\"Generate the atom feed.\"\"\"\n logger.info(_('Generating Atom XML feed ...'))\n builder = FeedBuilder(self.atom_metadata)\n builder.add(blog_posts)\n output_file = os.path.join(director.outdir, self.atom_output)\n builder.write_to(output_file)\n\n def _generate_list_page(self, director, blog_posts):\n \"\"\"Generate the list page.\"\"\"\n logger.info(_('Generating blog list page ...'))\n template = director.catalog.get_template(self.list_template)\n builder = ListPageBuilder(template)\n builder.add(blog_posts)\n output_file = os.path.join(director.outdir, self.list_output)\n builder.write_to(output_file)\n\n def _add_atom_metadata(self, name, option):\n \"\"\"Add atom metadata from the config parser.\"\"\"\n self.atom_metadata[name] = self._get_option(option)\n\n def _get_option(self, option):\n \"\"\"Get an option out of the blog section.\"\"\"\n try:\n return self._config.parser.get('blog', option)\n except configparser.NoOptionError:\n raise AbortError(_(\n 'The blog extension requires the {option} option.').format(\n option=option))\n\n\nclass BlogBuilder(object):\n \"\"\"A template pattern class for generating output related to a blog.\"\"\"\n\n def _generate_output(self):\n \"\"\"Generate output that belongs in the destination file.\n\n Subclasses must implement this method.\n \"\"\"\n raise NotImplementedError()\n\n def write_to(self, filepath):\n \"\"\"Write the output to the provided filepath.\"\"\"\n output = self._generate_output()\n with open(filepath, 'wb') as out:\n out.write(output.encode('utf-8'))\n out.write(b'<!-- handrolled for excellence -->\\n')\n\n\nclass FeedBuilder(BlogBuilder):\n \"\"\"Transform blog metadata and posts into an Atom feed.\"\"\"\n\n def __init__(self, metadata):\n self.metadata = metadata\n self._feed = AtomFeed(**metadata)\n\n def add(self, posts):\n \"\"\"Add blog posts to the feed.\"\"\"\n for post in posts:\n self._feed.add(FeedEntry(summary=post.summary, title=post.title,\n title_type='html', url=post.url, updated=post.date))\n\n def _generate_output(self):\n return self._feed.to_string()\n\n\nclass ListPageBuilder(BlogBuilder):\n \"\"\"Transform blog posts into a list page.\"\"\"\n\n def __init__(self, template):\n self._template = template\n self._blog_list = ''\n self._posts = None\n\n def add(self, posts):\n \"\"\"Add the posts and generate a blog list.\"\"\"\n li_html = []\n for post in posts:\n li_html.append(u'<li><a href=\"{route}\">{title}</a></li>'.format\n (route=post.route, title=post.title))\n self._blog_list = u'\\n'.join(li_html)\n self._posts = posts\n\n def _generate_output(self):\n context = {'blog_list': self._blog_list, 'posts': self._posts}\n return self._template.render(context)\n",
"<import token>\n\n\nclass BlogPost(object):\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\nclass BlogExtension(Extension):\n \"\"\"Track files marked as blog entries and generate a feed.\"\"\"\n handle_frontmatter_loaded = True\n handle_pre_composition = True\n handle_post_composition = True\n required_metadata = {'author': 'atom_author', 'id': 'atom_id', 'title':\n 'atom_title', 'url': 'atom_url'}\n\n def __init__(self, config):\n super(BlogExtension, self).__init__(config)\n self.posts = {}\n self.atom_metadata = {}\n self.atom_output = ''\n self.list_template = None\n self.list_output = None\n self._resolver = None\n self._should_generate = True\n\n def on_pre_composition(self, director):\n \"\"\"Check that all the required configuration exists.\"\"\"\n if not self._config.parser.has_section('blog'):\n raise AbortError(_(\n 'A blog section is missing in the configuration file.'))\n for metadata, option in self.required_metadata.items():\n self._add_atom_metadata(metadata, option)\n self.atom_output = self._get_option('atom_output')\n if self._config.parser.has_option('blog', 'list_template'):\n self.list_template = self._get_option('list_template')\n self.list_output = self._get_option('list_output')\n self._resolver = director.resolver\n\n def on_frontmatter_loaded(self, source_file, frontmatter):\n \"\"\"Record any new blog posts.\"\"\"\n if not self._is_post(frontmatter):\n return\n self._validate_post(source_file, frontmatter)\n post = BlogPost(date=frontmatter['date'], source_file=source_file,\n summary=frontmatter.get('summary', ''), title=frontmatter[\n 'title'], route=self._resolver.as_route(source_file), url=self.\n _resolver.as_url(source_file), posts=self.posts)\n frontmatter['post'] = post\n if post != self.posts.get(source_file):\n self.posts[source_file] = post\n self._should_generate = True\n\n def on_post_composition(self, director):\n \"\"\"Generate blog output.\"\"\"\n if not self._should_generate:\n return\n blog_posts = sorted(self.posts.values(), key=lambda p: p.date,\n reverse=True)\n self._generate_atom_feed(director, blog_posts)\n if self.list_template is not None:\n self._generate_list_page(director, blog_posts)\n self._should_generate = False\n\n def _is_post(self, frontmatter):\n \"\"\"Check if the front matter looks like a blog post.\"\"\"\n is_post = frontmatter.get('blog', False)\n if type(is_post) != bool:\n raise AbortError(_(\n 'Invalid blog frontmatter (expects True or False): {blog_value}'\n ).format(blog_value=is_post))\n return is_post\n\n def _validate_post(self, source_file, frontmatter):\n \"\"\"Validate that the post contains all the required fields.\"\"\"\n required = set(['date', 'title'])\n fields = set(frontmatter.keys())\n missing = required - fields\n if missing:\n raise AbortError(_(\n 'The blog post, {filename}, is missing required fields: {missing_fields}'\n .format(filename=source_file, missing_fields=', '.join(\n missing))))\n\n def _generate_atom_feed(self, director, blog_posts):\n \"\"\"Generate the atom feed.\"\"\"\n logger.info(_('Generating Atom XML feed ...'))\n builder = FeedBuilder(self.atom_metadata)\n builder.add(blog_posts)\n output_file = os.path.join(director.outdir, self.atom_output)\n builder.write_to(output_file)\n\n def _generate_list_page(self, director, blog_posts):\n \"\"\"Generate the list page.\"\"\"\n logger.info(_('Generating blog list page ...'))\n template = director.catalog.get_template(self.list_template)\n builder = ListPageBuilder(template)\n builder.add(blog_posts)\n output_file = os.path.join(director.outdir, self.list_output)\n builder.write_to(output_file)\n\n def _add_atom_metadata(self, name, option):\n \"\"\"Add atom metadata from the config parser.\"\"\"\n self.atom_metadata[name] = self._get_option(option)\n\n def _get_option(self, option):\n \"\"\"Get an option out of the blog section.\"\"\"\n try:\n return self._config.parser.get('blog', option)\n except configparser.NoOptionError:\n raise AbortError(_(\n 'The blog extension requires the {option} option.').format(\n option=option))\n\n\nclass BlogBuilder(object):\n \"\"\"A template pattern class for generating output related to a blog.\"\"\"\n\n def _generate_output(self):\n \"\"\"Generate output that belongs in the destination file.\n\n Subclasses must implement this method.\n \"\"\"\n raise NotImplementedError()\n\n def write_to(self, filepath):\n \"\"\"Write the output to the provided filepath.\"\"\"\n output = self._generate_output()\n with open(filepath, 'wb') as out:\n out.write(output.encode('utf-8'))\n out.write(b'<!-- handrolled for excellence -->\\n')\n\n\nclass FeedBuilder(BlogBuilder):\n \"\"\"Transform blog metadata and posts into an Atom feed.\"\"\"\n\n def __init__(self, metadata):\n self.metadata = metadata\n self._feed = AtomFeed(**metadata)\n\n def add(self, posts):\n \"\"\"Add blog posts to the feed.\"\"\"\n for post in posts:\n self._feed.add(FeedEntry(summary=post.summary, title=post.title,\n title_type='html', url=post.url, updated=post.date))\n\n def _generate_output(self):\n return self._feed.to_string()\n\n\nclass ListPageBuilder(BlogBuilder):\n \"\"\"Transform blog posts into a list page.\"\"\"\n\n def __init__(self, template):\n self._template = template\n self._blog_list = ''\n self._posts = None\n\n def add(self, posts):\n \"\"\"Add the posts and generate a blog list.\"\"\"\n li_html = []\n for post in posts:\n li_html.append(u'<li><a href=\"{route}\">{title}</a></li>'.format\n (route=post.route, title=post.title))\n self._blog_list = u'\\n'.join(li_html)\n self._posts = posts\n\n def _generate_output(self):\n context = {'blog_list': self._blog_list, 'posts': self._posts}\n return self._template.render(context)\n",
"<import token>\n<class token>\n\n\nclass BlogExtension(Extension):\n \"\"\"Track files marked as blog entries and generate a feed.\"\"\"\n handle_frontmatter_loaded = True\n handle_pre_composition = True\n handle_post_composition = True\n required_metadata = {'author': 'atom_author', 'id': 'atom_id', 'title':\n 'atom_title', 'url': 'atom_url'}\n\n def __init__(self, config):\n super(BlogExtension, self).__init__(config)\n self.posts = {}\n self.atom_metadata = {}\n self.atom_output = ''\n self.list_template = None\n self.list_output = None\n self._resolver = None\n self._should_generate = True\n\n def on_pre_composition(self, director):\n \"\"\"Check that all the required configuration exists.\"\"\"\n if not self._config.parser.has_section('blog'):\n raise AbortError(_(\n 'A blog section is missing in the configuration file.'))\n for metadata, option in self.required_metadata.items():\n self._add_atom_metadata(metadata, option)\n self.atom_output = self._get_option('atom_output')\n if self._config.parser.has_option('blog', 'list_template'):\n self.list_template = self._get_option('list_template')\n self.list_output = self._get_option('list_output')\n self._resolver = director.resolver\n\n def on_frontmatter_loaded(self, source_file, frontmatter):\n \"\"\"Record any new blog posts.\"\"\"\n if not self._is_post(frontmatter):\n return\n self._validate_post(source_file, frontmatter)\n post = BlogPost(date=frontmatter['date'], source_file=source_file,\n summary=frontmatter.get('summary', ''), title=frontmatter[\n 'title'], route=self._resolver.as_route(source_file), url=self.\n _resolver.as_url(source_file), posts=self.posts)\n frontmatter['post'] = post\n if post != self.posts.get(source_file):\n self.posts[source_file] = post\n self._should_generate = True\n\n def on_post_composition(self, director):\n \"\"\"Generate blog output.\"\"\"\n if not self._should_generate:\n return\n blog_posts = sorted(self.posts.values(), key=lambda p: p.date,\n reverse=True)\n self._generate_atom_feed(director, blog_posts)\n if self.list_template is not None:\n self._generate_list_page(director, blog_posts)\n self._should_generate = False\n\n def _is_post(self, frontmatter):\n \"\"\"Check if the front matter looks like a blog post.\"\"\"\n is_post = frontmatter.get('blog', False)\n if type(is_post) != bool:\n raise AbortError(_(\n 'Invalid blog frontmatter (expects True or False): {blog_value}'\n ).format(blog_value=is_post))\n return is_post\n\n def _validate_post(self, source_file, frontmatter):\n \"\"\"Validate that the post contains all the required fields.\"\"\"\n required = set(['date', 'title'])\n fields = set(frontmatter.keys())\n missing = required - fields\n if missing:\n raise AbortError(_(\n 'The blog post, {filename}, is missing required fields: {missing_fields}'\n .format(filename=source_file, missing_fields=', '.join(\n missing))))\n\n def _generate_atom_feed(self, director, blog_posts):\n \"\"\"Generate the atom feed.\"\"\"\n logger.info(_('Generating Atom XML feed ...'))\n builder = FeedBuilder(self.atom_metadata)\n builder.add(blog_posts)\n output_file = os.path.join(director.outdir, self.atom_output)\n builder.write_to(output_file)\n\n def _generate_list_page(self, director, blog_posts):\n \"\"\"Generate the list page.\"\"\"\n logger.info(_('Generating blog list page ...'))\n template = director.catalog.get_template(self.list_template)\n builder = ListPageBuilder(template)\n builder.add(blog_posts)\n output_file = os.path.join(director.outdir, self.list_output)\n builder.write_to(output_file)\n\n def _add_atom_metadata(self, name, option):\n \"\"\"Add atom metadata from the config parser.\"\"\"\n self.atom_metadata[name] = self._get_option(option)\n\n def _get_option(self, option):\n \"\"\"Get an option out of the blog section.\"\"\"\n try:\n return self._config.parser.get('blog', option)\n except configparser.NoOptionError:\n raise AbortError(_(\n 'The blog extension requires the {option} option.').format(\n option=option))\n\n\nclass BlogBuilder(object):\n \"\"\"A template pattern class for generating output related to a blog.\"\"\"\n\n def _generate_output(self):\n \"\"\"Generate output that belongs in the destination file.\n\n Subclasses must implement this method.\n \"\"\"\n raise NotImplementedError()\n\n def write_to(self, filepath):\n \"\"\"Write the output to the provided filepath.\"\"\"\n output = self._generate_output()\n with open(filepath, 'wb') as out:\n out.write(output.encode('utf-8'))\n out.write(b'<!-- handrolled for excellence -->\\n')\n\n\nclass FeedBuilder(BlogBuilder):\n \"\"\"Transform blog metadata and posts into an Atom feed.\"\"\"\n\n def __init__(self, metadata):\n self.metadata = metadata\n self._feed = AtomFeed(**metadata)\n\n def add(self, posts):\n \"\"\"Add blog posts to the feed.\"\"\"\n for post in posts:\n self._feed.add(FeedEntry(summary=post.summary, title=post.title,\n title_type='html', url=post.url, updated=post.date))\n\n def _generate_output(self):\n return self._feed.to_string()\n\n\nclass ListPageBuilder(BlogBuilder):\n \"\"\"Transform blog posts into a list page.\"\"\"\n\n def __init__(self, template):\n self._template = template\n self._blog_list = ''\n self._posts = None\n\n def add(self, posts):\n \"\"\"Add the posts and generate a blog list.\"\"\"\n li_html = []\n for post in posts:\n li_html.append(u'<li><a href=\"{route}\">{title}</a></li>'.format\n (route=post.route, title=post.title))\n self._blog_list = u'\\n'.join(li_html)\n self._posts = posts\n\n def _generate_output(self):\n context = {'blog_list': self._blog_list, 'posts': self._posts}\n return self._template.render(context)\n",
"<import token>\n<class token>\n\n\nclass BlogExtension(Extension):\n <docstring token>\n handle_frontmatter_loaded = True\n handle_pre_composition = True\n handle_post_composition = True\n required_metadata = {'author': 'atom_author', 'id': 'atom_id', 'title':\n 'atom_title', 'url': 'atom_url'}\n\n def __init__(self, config):\n super(BlogExtension, self).__init__(config)\n self.posts = {}\n self.atom_metadata = {}\n self.atom_output = ''\n self.list_template = None\n self.list_output = None\n self._resolver = None\n self._should_generate = True\n\n def on_pre_composition(self, director):\n \"\"\"Check that all the required configuration exists.\"\"\"\n if not self._config.parser.has_section('blog'):\n raise AbortError(_(\n 'A blog section is missing in the configuration file.'))\n for metadata, option in self.required_metadata.items():\n self._add_atom_metadata(metadata, option)\n self.atom_output = self._get_option('atom_output')\n if self._config.parser.has_option('blog', 'list_template'):\n self.list_template = self._get_option('list_template')\n self.list_output = self._get_option('list_output')\n self._resolver = director.resolver\n\n def on_frontmatter_loaded(self, source_file, frontmatter):\n \"\"\"Record any new blog posts.\"\"\"\n if not self._is_post(frontmatter):\n return\n self._validate_post(source_file, frontmatter)\n post = BlogPost(date=frontmatter['date'], source_file=source_file,\n summary=frontmatter.get('summary', ''), title=frontmatter[\n 'title'], route=self._resolver.as_route(source_file), url=self.\n _resolver.as_url(source_file), posts=self.posts)\n frontmatter['post'] = post\n if post != self.posts.get(source_file):\n self.posts[source_file] = post\n self._should_generate = True\n\n def on_post_composition(self, director):\n \"\"\"Generate blog output.\"\"\"\n if not self._should_generate:\n return\n blog_posts = sorted(self.posts.values(), key=lambda p: p.date,\n reverse=True)\n self._generate_atom_feed(director, blog_posts)\n if self.list_template is not None:\n self._generate_list_page(director, blog_posts)\n self._should_generate = False\n\n def _is_post(self, frontmatter):\n \"\"\"Check if the front matter looks like a blog post.\"\"\"\n is_post = frontmatter.get('blog', False)\n if type(is_post) != bool:\n raise AbortError(_(\n 'Invalid blog frontmatter (expects True or False): {blog_value}'\n ).format(blog_value=is_post))\n return is_post\n\n def _validate_post(self, source_file, frontmatter):\n \"\"\"Validate that the post contains all the required fields.\"\"\"\n required = set(['date', 'title'])\n fields = set(frontmatter.keys())\n missing = required - fields\n if missing:\n raise AbortError(_(\n 'The blog post, {filename}, is missing required fields: {missing_fields}'\n .format(filename=source_file, missing_fields=', '.join(\n missing))))\n\n def _generate_atom_feed(self, director, blog_posts):\n \"\"\"Generate the atom feed.\"\"\"\n logger.info(_('Generating Atom XML feed ...'))\n builder = FeedBuilder(self.atom_metadata)\n builder.add(blog_posts)\n output_file = os.path.join(director.outdir, self.atom_output)\n builder.write_to(output_file)\n\n def _generate_list_page(self, director, blog_posts):\n \"\"\"Generate the list page.\"\"\"\n logger.info(_('Generating blog list page ...'))\n template = director.catalog.get_template(self.list_template)\n builder = ListPageBuilder(template)\n builder.add(blog_posts)\n output_file = os.path.join(director.outdir, self.list_output)\n builder.write_to(output_file)\n\n def _add_atom_metadata(self, name, option):\n \"\"\"Add atom metadata from the config parser.\"\"\"\n self.atom_metadata[name] = self._get_option(option)\n\n def _get_option(self, option):\n \"\"\"Get an option out of the blog section.\"\"\"\n try:\n return self._config.parser.get('blog', option)\n except configparser.NoOptionError:\n raise AbortError(_(\n 'The blog extension requires the {option} option.').format(\n option=option))\n\n\nclass BlogBuilder(object):\n \"\"\"A template pattern class for generating output related to a blog.\"\"\"\n\n def _generate_output(self):\n \"\"\"Generate output that belongs in the destination file.\n\n Subclasses must implement this method.\n \"\"\"\n raise NotImplementedError()\n\n def write_to(self, filepath):\n \"\"\"Write the output to the provided filepath.\"\"\"\n output = self._generate_output()\n with open(filepath, 'wb') as out:\n out.write(output.encode('utf-8'))\n out.write(b'<!-- handrolled for excellence -->\\n')\n\n\nclass FeedBuilder(BlogBuilder):\n \"\"\"Transform blog metadata and posts into an Atom feed.\"\"\"\n\n def __init__(self, metadata):\n self.metadata = metadata\n self._feed = AtomFeed(**metadata)\n\n def add(self, posts):\n \"\"\"Add blog posts to the feed.\"\"\"\n for post in posts:\n self._feed.add(FeedEntry(summary=post.summary, title=post.title,\n title_type='html', url=post.url, updated=post.date))\n\n def _generate_output(self):\n return self._feed.to_string()\n\n\nclass ListPageBuilder(BlogBuilder):\n \"\"\"Transform blog posts into a list page.\"\"\"\n\n def __init__(self, template):\n self._template = template\n self._blog_list = ''\n self._posts = None\n\n def add(self, posts):\n \"\"\"Add the posts and generate a blog list.\"\"\"\n li_html = []\n for post in posts:\n li_html.append(u'<li><a href=\"{route}\">{title}</a></li>'.format\n (route=post.route, title=post.title))\n self._blog_list = u'\\n'.join(li_html)\n self._posts = posts\n\n def _generate_output(self):\n context = {'blog_list': self._blog_list, 'posts': self._posts}\n return self._template.render(context)\n",
"<import token>\n<class token>\n\n\nclass BlogExtension(Extension):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n def __init__(self, config):\n super(BlogExtension, self).__init__(config)\n self.posts = {}\n self.atom_metadata = {}\n self.atom_output = ''\n self.list_template = None\n self.list_output = None\n self._resolver = None\n self._should_generate = True\n\n def on_pre_composition(self, director):\n \"\"\"Check that all the required configuration exists.\"\"\"\n if not self._config.parser.has_section('blog'):\n raise AbortError(_(\n 'A blog section is missing in the configuration file.'))\n for metadata, option in self.required_metadata.items():\n self._add_atom_metadata(metadata, option)\n self.atom_output = self._get_option('atom_output')\n if self._config.parser.has_option('blog', 'list_template'):\n self.list_template = self._get_option('list_template')\n self.list_output = self._get_option('list_output')\n self._resolver = director.resolver\n\n def on_frontmatter_loaded(self, source_file, frontmatter):\n \"\"\"Record any new blog posts.\"\"\"\n if not self._is_post(frontmatter):\n return\n self._validate_post(source_file, frontmatter)\n post = BlogPost(date=frontmatter['date'], source_file=source_file,\n summary=frontmatter.get('summary', ''), title=frontmatter[\n 'title'], route=self._resolver.as_route(source_file), url=self.\n _resolver.as_url(source_file), posts=self.posts)\n frontmatter['post'] = post\n if post != self.posts.get(source_file):\n self.posts[source_file] = post\n self._should_generate = True\n\n def on_post_composition(self, director):\n \"\"\"Generate blog output.\"\"\"\n if not self._should_generate:\n return\n blog_posts = sorted(self.posts.values(), key=lambda p: p.date,\n reverse=True)\n self._generate_atom_feed(director, blog_posts)\n if self.list_template is not None:\n self._generate_list_page(director, blog_posts)\n self._should_generate = False\n\n def _is_post(self, frontmatter):\n \"\"\"Check if the front matter looks like a blog post.\"\"\"\n is_post = frontmatter.get('blog', False)\n if type(is_post) != bool:\n raise AbortError(_(\n 'Invalid blog frontmatter (expects True or False): {blog_value}'\n ).format(blog_value=is_post))\n return is_post\n\n def _validate_post(self, source_file, frontmatter):\n \"\"\"Validate that the post contains all the required fields.\"\"\"\n required = set(['date', 'title'])\n fields = set(frontmatter.keys())\n missing = required - fields\n if missing:\n raise AbortError(_(\n 'The blog post, {filename}, is missing required fields: {missing_fields}'\n .format(filename=source_file, missing_fields=', '.join(\n missing))))\n\n def _generate_atom_feed(self, director, blog_posts):\n \"\"\"Generate the atom feed.\"\"\"\n logger.info(_('Generating Atom XML feed ...'))\n builder = FeedBuilder(self.atom_metadata)\n builder.add(blog_posts)\n output_file = os.path.join(director.outdir, self.atom_output)\n builder.write_to(output_file)\n\n def _generate_list_page(self, director, blog_posts):\n \"\"\"Generate the list page.\"\"\"\n logger.info(_('Generating blog list page ...'))\n template = director.catalog.get_template(self.list_template)\n builder = ListPageBuilder(template)\n builder.add(blog_posts)\n output_file = os.path.join(director.outdir, self.list_output)\n builder.write_to(output_file)\n\n def _add_atom_metadata(self, name, option):\n \"\"\"Add atom metadata from the config parser.\"\"\"\n self.atom_metadata[name] = self._get_option(option)\n\n def _get_option(self, option):\n \"\"\"Get an option out of the blog section.\"\"\"\n try:\n return self._config.parser.get('blog', option)\n except configparser.NoOptionError:\n raise AbortError(_(\n 'The blog extension requires the {option} option.').format(\n option=option))\n\n\nclass BlogBuilder(object):\n \"\"\"A template pattern class for generating output related to a blog.\"\"\"\n\n def _generate_output(self):\n \"\"\"Generate output that belongs in the destination file.\n\n Subclasses must implement this method.\n \"\"\"\n raise NotImplementedError()\n\n def write_to(self, filepath):\n \"\"\"Write the output to the provided filepath.\"\"\"\n output = self._generate_output()\n with open(filepath, 'wb') as out:\n out.write(output.encode('utf-8'))\n out.write(b'<!-- handrolled for excellence -->\\n')\n\n\nclass FeedBuilder(BlogBuilder):\n \"\"\"Transform blog metadata and posts into an Atom feed.\"\"\"\n\n def __init__(self, metadata):\n self.metadata = metadata\n self._feed = AtomFeed(**metadata)\n\n def add(self, posts):\n \"\"\"Add blog posts to the feed.\"\"\"\n for post in posts:\n self._feed.add(FeedEntry(summary=post.summary, title=post.title,\n title_type='html', url=post.url, updated=post.date))\n\n def _generate_output(self):\n return self._feed.to_string()\n\n\nclass ListPageBuilder(BlogBuilder):\n \"\"\"Transform blog posts into a list page.\"\"\"\n\n def __init__(self, template):\n self._template = template\n self._blog_list = ''\n self._posts = None\n\n def add(self, posts):\n \"\"\"Add the posts and generate a blog list.\"\"\"\n li_html = []\n for post in posts:\n li_html.append(u'<li><a href=\"{route}\">{title}</a></li>'.format\n (route=post.route, title=post.title))\n self._blog_list = u'\\n'.join(li_html)\n self._posts = posts\n\n def _generate_output(self):\n context = {'blog_list': self._blog_list, 'posts': self._posts}\n return self._template.render(context)\n",
"<import token>\n<class token>\n\n\nclass BlogExtension(Extension):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n def __init__(self, config):\n super(BlogExtension, self).__init__(config)\n self.posts = {}\n self.atom_metadata = {}\n self.atom_output = ''\n self.list_template = None\n self.list_output = None\n self._resolver = None\n self._should_generate = True\n\n def on_pre_composition(self, director):\n \"\"\"Check that all the required configuration exists.\"\"\"\n if not self._config.parser.has_section('blog'):\n raise AbortError(_(\n 'A blog section is missing in the configuration file.'))\n for metadata, option in self.required_metadata.items():\n self._add_atom_metadata(metadata, option)\n self.atom_output = self._get_option('atom_output')\n if self._config.parser.has_option('blog', 'list_template'):\n self.list_template = self._get_option('list_template')\n self.list_output = self._get_option('list_output')\n self._resolver = director.resolver\n\n def on_frontmatter_loaded(self, source_file, frontmatter):\n \"\"\"Record any new blog posts.\"\"\"\n if not self._is_post(frontmatter):\n return\n self._validate_post(source_file, frontmatter)\n post = BlogPost(date=frontmatter['date'], source_file=source_file,\n summary=frontmatter.get('summary', ''), title=frontmatter[\n 'title'], route=self._resolver.as_route(source_file), url=self.\n _resolver.as_url(source_file), posts=self.posts)\n frontmatter['post'] = post\n if post != self.posts.get(source_file):\n self.posts[source_file] = post\n self._should_generate = True\n\n def on_post_composition(self, director):\n \"\"\"Generate blog output.\"\"\"\n if not self._should_generate:\n return\n blog_posts = sorted(self.posts.values(), key=lambda p: p.date,\n reverse=True)\n self._generate_atom_feed(director, blog_posts)\n if self.list_template is not None:\n self._generate_list_page(director, blog_posts)\n self._should_generate = False\n <function token>\n\n def _validate_post(self, source_file, frontmatter):\n \"\"\"Validate that the post contains all the required fields.\"\"\"\n required = set(['date', 'title'])\n fields = set(frontmatter.keys())\n missing = required - fields\n if missing:\n raise AbortError(_(\n 'The blog post, {filename}, is missing required fields: {missing_fields}'\n .format(filename=source_file, missing_fields=', '.join(\n missing))))\n\n def _generate_atom_feed(self, director, blog_posts):\n \"\"\"Generate the atom feed.\"\"\"\n logger.info(_('Generating Atom XML feed ...'))\n builder = FeedBuilder(self.atom_metadata)\n builder.add(blog_posts)\n output_file = os.path.join(director.outdir, self.atom_output)\n builder.write_to(output_file)\n\n def _generate_list_page(self, director, blog_posts):\n \"\"\"Generate the list page.\"\"\"\n logger.info(_('Generating blog list page ...'))\n template = director.catalog.get_template(self.list_template)\n builder = ListPageBuilder(template)\n builder.add(blog_posts)\n output_file = os.path.join(director.outdir, self.list_output)\n builder.write_to(output_file)\n\n def _add_atom_metadata(self, name, option):\n \"\"\"Add atom metadata from the config parser.\"\"\"\n self.atom_metadata[name] = self._get_option(option)\n\n def _get_option(self, option):\n \"\"\"Get an option out of the blog section.\"\"\"\n try:\n return self._config.parser.get('blog', option)\n except configparser.NoOptionError:\n raise AbortError(_(\n 'The blog extension requires the {option} option.').format(\n option=option))\n\n\nclass BlogBuilder(object):\n \"\"\"A template pattern class for generating output related to a blog.\"\"\"\n\n def _generate_output(self):\n \"\"\"Generate output that belongs in the destination file.\n\n Subclasses must implement this method.\n \"\"\"\n raise NotImplementedError()\n\n def write_to(self, filepath):\n \"\"\"Write the output to the provided filepath.\"\"\"\n output = self._generate_output()\n with open(filepath, 'wb') as out:\n out.write(output.encode('utf-8'))\n out.write(b'<!-- handrolled for excellence -->\\n')\n\n\nclass FeedBuilder(BlogBuilder):\n \"\"\"Transform blog metadata and posts into an Atom feed.\"\"\"\n\n def __init__(self, metadata):\n self.metadata = metadata\n self._feed = AtomFeed(**metadata)\n\n def add(self, posts):\n \"\"\"Add blog posts to the feed.\"\"\"\n for post in posts:\n self._feed.add(FeedEntry(summary=post.summary, title=post.title,\n title_type='html', url=post.url, updated=post.date))\n\n def _generate_output(self):\n return self._feed.to_string()\n\n\nclass ListPageBuilder(BlogBuilder):\n \"\"\"Transform blog posts into a list page.\"\"\"\n\n def __init__(self, template):\n self._template = template\n self._blog_list = ''\n self._posts = None\n\n def add(self, posts):\n \"\"\"Add the posts and generate a blog list.\"\"\"\n li_html = []\n for post in posts:\n li_html.append(u'<li><a href=\"{route}\">{title}</a></li>'.format\n (route=post.route, title=post.title))\n self._blog_list = u'\\n'.join(li_html)\n self._posts = posts\n\n def _generate_output(self):\n context = {'blog_list': self._blog_list, 'posts': self._posts}\n return self._template.render(context)\n",
"<import token>\n<class token>\n\n\nclass BlogExtension(Extension):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n def __init__(self, config):\n super(BlogExtension, self).__init__(config)\n self.posts = {}\n self.atom_metadata = {}\n self.atom_output = ''\n self.list_template = None\n self.list_output = None\n self._resolver = None\n self._should_generate = True\n\n def on_pre_composition(self, director):\n \"\"\"Check that all the required configuration exists.\"\"\"\n if not self._config.parser.has_section('blog'):\n raise AbortError(_(\n 'A blog section is missing in the configuration file.'))\n for metadata, option in self.required_metadata.items():\n self._add_atom_metadata(metadata, option)\n self.atom_output = self._get_option('atom_output')\n if self._config.parser.has_option('blog', 'list_template'):\n self.list_template = self._get_option('list_template')\n self.list_output = self._get_option('list_output')\n self._resolver = director.resolver\n\n def on_frontmatter_loaded(self, source_file, frontmatter):\n \"\"\"Record any new blog posts.\"\"\"\n if not self._is_post(frontmatter):\n return\n self._validate_post(source_file, frontmatter)\n post = BlogPost(date=frontmatter['date'], source_file=source_file,\n summary=frontmatter.get('summary', ''), title=frontmatter[\n 'title'], route=self._resolver.as_route(source_file), url=self.\n _resolver.as_url(source_file), posts=self.posts)\n frontmatter['post'] = post\n if post != self.posts.get(source_file):\n self.posts[source_file] = post\n self._should_generate = True\n\n def on_post_composition(self, director):\n \"\"\"Generate blog output.\"\"\"\n if not self._should_generate:\n return\n blog_posts = sorted(self.posts.values(), key=lambda p: p.date,\n reverse=True)\n self._generate_atom_feed(director, blog_posts)\n if self.list_template is not None:\n self._generate_list_page(director, blog_posts)\n self._should_generate = False\n <function token>\n <function token>\n\n def _generate_atom_feed(self, director, blog_posts):\n \"\"\"Generate the atom feed.\"\"\"\n logger.info(_('Generating Atom XML feed ...'))\n builder = FeedBuilder(self.atom_metadata)\n builder.add(blog_posts)\n output_file = os.path.join(director.outdir, self.atom_output)\n builder.write_to(output_file)\n\n def _generate_list_page(self, director, blog_posts):\n \"\"\"Generate the list page.\"\"\"\n logger.info(_('Generating blog list page ...'))\n template = director.catalog.get_template(self.list_template)\n builder = ListPageBuilder(template)\n builder.add(blog_posts)\n output_file = os.path.join(director.outdir, self.list_output)\n builder.write_to(output_file)\n\n def _add_atom_metadata(self, name, option):\n \"\"\"Add atom metadata from the config parser.\"\"\"\n self.atom_metadata[name] = self._get_option(option)\n\n def _get_option(self, option):\n \"\"\"Get an option out of the blog section.\"\"\"\n try:\n return self._config.parser.get('blog', option)\n except configparser.NoOptionError:\n raise AbortError(_(\n 'The blog extension requires the {option} option.').format(\n option=option))\n\n\nclass BlogBuilder(object):\n \"\"\"A template pattern class for generating output related to a blog.\"\"\"\n\n def _generate_output(self):\n \"\"\"Generate output that belongs in the destination file.\n\n Subclasses must implement this method.\n \"\"\"\n raise NotImplementedError()\n\n def write_to(self, filepath):\n \"\"\"Write the output to the provided filepath.\"\"\"\n output = self._generate_output()\n with open(filepath, 'wb') as out:\n out.write(output.encode('utf-8'))\n out.write(b'<!-- handrolled for excellence -->\\n')\n\n\nclass FeedBuilder(BlogBuilder):\n \"\"\"Transform blog metadata and posts into an Atom feed.\"\"\"\n\n def __init__(self, metadata):\n self.metadata = metadata\n self._feed = AtomFeed(**metadata)\n\n def add(self, posts):\n \"\"\"Add blog posts to the feed.\"\"\"\n for post in posts:\n self._feed.add(FeedEntry(summary=post.summary, title=post.title,\n title_type='html', url=post.url, updated=post.date))\n\n def _generate_output(self):\n return self._feed.to_string()\n\n\nclass ListPageBuilder(BlogBuilder):\n \"\"\"Transform blog posts into a list page.\"\"\"\n\n def __init__(self, template):\n self._template = template\n self._blog_list = ''\n self._posts = None\n\n def add(self, posts):\n \"\"\"Add the posts and generate a blog list.\"\"\"\n li_html = []\n for post in posts:\n li_html.append(u'<li><a href=\"{route}\">{title}</a></li>'.format\n (route=post.route, title=post.title))\n self._blog_list = u'\\n'.join(li_html)\n self._posts = posts\n\n def _generate_output(self):\n context = {'blog_list': self._blog_list, 'posts': self._posts}\n return self._template.render(context)\n",
"<import token>\n<class token>\n\n\nclass BlogExtension(Extension):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n def on_pre_composition(self, director):\n \"\"\"Check that all the required configuration exists.\"\"\"\n if not self._config.parser.has_section('blog'):\n raise AbortError(_(\n 'A blog section is missing in the configuration file.'))\n for metadata, option in self.required_metadata.items():\n self._add_atom_metadata(metadata, option)\n self.atom_output = self._get_option('atom_output')\n if self._config.parser.has_option('blog', 'list_template'):\n self.list_template = self._get_option('list_template')\n self.list_output = self._get_option('list_output')\n self._resolver = director.resolver\n\n def on_frontmatter_loaded(self, source_file, frontmatter):\n \"\"\"Record any new blog posts.\"\"\"\n if not self._is_post(frontmatter):\n return\n self._validate_post(source_file, frontmatter)\n post = BlogPost(date=frontmatter['date'], source_file=source_file,\n summary=frontmatter.get('summary', ''), title=frontmatter[\n 'title'], route=self._resolver.as_route(source_file), url=self.\n _resolver.as_url(source_file), posts=self.posts)\n frontmatter['post'] = post\n if post != self.posts.get(source_file):\n self.posts[source_file] = post\n self._should_generate = True\n\n def on_post_composition(self, director):\n \"\"\"Generate blog output.\"\"\"\n if not self._should_generate:\n return\n blog_posts = sorted(self.posts.values(), key=lambda p: p.date,\n reverse=True)\n self._generate_atom_feed(director, blog_posts)\n if self.list_template is not None:\n self._generate_list_page(director, blog_posts)\n self._should_generate = False\n <function token>\n <function token>\n\n def _generate_atom_feed(self, director, blog_posts):\n \"\"\"Generate the atom feed.\"\"\"\n logger.info(_('Generating Atom XML feed ...'))\n builder = FeedBuilder(self.atom_metadata)\n builder.add(blog_posts)\n output_file = os.path.join(director.outdir, self.atom_output)\n builder.write_to(output_file)\n\n def _generate_list_page(self, director, blog_posts):\n \"\"\"Generate the list page.\"\"\"\n logger.info(_('Generating blog list page ...'))\n template = director.catalog.get_template(self.list_template)\n builder = ListPageBuilder(template)\n builder.add(blog_posts)\n output_file = os.path.join(director.outdir, self.list_output)\n builder.write_to(output_file)\n\n def _add_atom_metadata(self, name, option):\n \"\"\"Add atom metadata from the config parser.\"\"\"\n self.atom_metadata[name] = self._get_option(option)\n\n def _get_option(self, option):\n \"\"\"Get an option out of the blog section.\"\"\"\n try:\n return self._config.parser.get('blog', option)\n except configparser.NoOptionError:\n raise AbortError(_(\n 'The blog extension requires the {option} option.').format(\n option=option))\n\n\nclass BlogBuilder(object):\n \"\"\"A template pattern class for generating output related to a blog.\"\"\"\n\n def _generate_output(self):\n \"\"\"Generate output that belongs in the destination file.\n\n Subclasses must implement this method.\n \"\"\"\n raise NotImplementedError()\n\n def write_to(self, filepath):\n \"\"\"Write the output to the provided filepath.\"\"\"\n output = self._generate_output()\n with open(filepath, 'wb') as out:\n out.write(output.encode('utf-8'))\n out.write(b'<!-- handrolled for excellence -->\\n')\n\n\nclass FeedBuilder(BlogBuilder):\n \"\"\"Transform blog metadata and posts into an Atom feed.\"\"\"\n\n def __init__(self, metadata):\n self.metadata = metadata\n self._feed = AtomFeed(**metadata)\n\n def add(self, posts):\n \"\"\"Add blog posts to the feed.\"\"\"\n for post in posts:\n self._feed.add(FeedEntry(summary=post.summary, title=post.title,\n title_type='html', url=post.url, updated=post.date))\n\n def _generate_output(self):\n return self._feed.to_string()\n\n\nclass ListPageBuilder(BlogBuilder):\n \"\"\"Transform blog posts into a list page.\"\"\"\n\n def __init__(self, template):\n self._template = template\n self._blog_list = ''\n self._posts = None\n\n def add(self, posts):\n \"\"\"Add the posts and generate a blog list.\"\"\"\n li_html = []\n for post in posts:\n li_html.append(u'<li><a href=\"{route}\">{title}</a></li>'.format\n (route=post.route, title=post.title))\n self._blog_list = u'\\n'.join(li_html)\n self._posts = posts\n\n def _generate_output(self):\n context = {'blog_list': self._blog_list, 'posts': self._posts}\n return self._template.render(context)\n",
"<import token>\n<class token>\n\n\nclass BlogExtension(Extension):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n def on_pre_composition(self, director):\n \"\"\"Check that all the required configuration exists.\"\"\"\n if not self._config.parser.has_section('blog'):\n raise AbortError(_(\n 'A blog section is missing in the configuration file.'))\n for metadata, option in self.required_metadata.items():\n self._add_atom_metadata(metadata, option)\n self.atom_output = self._get_option('atom_output')\n if self._config.parser.has_option('blog', 'list_template'):\n self.list_template = self._get_option('list_template')\n self.list_output = self._get_option('list_output')\n self._resolver = director.resolver\n\n def on_frontmatter_loaded(self, source_file, frontmatter):\n \"\"\"Record any new blog posts.\"\"\"\n if not self._is_post(frontmatter):\n return\n self._validate_post(source_file, frontmatter)\n post = BlogPost(date=frontmatter['date'], source_file=source_file,\n summary=frontmatter.get('summary', ''), title=frontmatter[\n 'title'], route=self._resolver.as_route(source_file), url=self.\n _resolver.as_url(source_file), posts=self.posts)\n frontmatter['post'] = post\n if post != self.posts.get(source_file):\n self.posts[source_file] = post\n self._should_generate = True\n\n def on_post_composition(self, director):\n \"\"\"Generate blog output.\"\"\"\n if not self._should_generate:\n return\n blog_posts = sorted(self.posts.values(), key=lambda p: p.date,\n reverse=True)\n self._generate_atom_feed(director, blog_posts)\n if self.list_template is not None:\n self._generate_list_page(director, blog_posts)\n self._should_generate = False\n <function token>\n <function token>\n\n def _generate_atom_feed(self, director, blog_posts):\n \"\"\"Generate the atom feed.\"\"\"\n logger.info(_('Generating Atom XML feed ...'))\n builder = FeedBuilder(self.atom_metadata)\n builder.add(blog_posts)\n output_file = os.path.join(director.outdir, self.atom_output)\n builder.write_to(output_file)\n <function token>\n\n def _add_atom_metadata(self, name, option):\n \"\"\"Add atom metadata from the config parser.\"\"\"\n self.atom_metadata[name] = self._get_option(option)\n\n def _get_option(self, option):\n \"\"\"Get an option out of the blog section.\"\"\"\n try:\n return self._config.parser.get('blog', option)\n except configparser.NoOptionError:\n raise AbortError(_(\n 'The blog extension requires the {option} option.').format(\n option=option))\n\n\nclass BlogBuilder(object):\n \"\"\"A template pattern class for generating output related to a blog.\"\"\"\n\n def _generate_output(self):\n \"\"\"Generate output that belongs in the destination file.\n\n Subclasses must implement this method.\n \"\"\"\n raise NotImplementedError()\n\n def write_to(self, filepath):\n \"\"\"Write the output to the provided filepath.\"\"\"\n output = self._generate_output()\n with open(filepath, 'wb') as out:\n out.write(output.encode('utf-8'))\n out.write(b'<!-- handrolled for excellence -->\\n')\n\n\nclass FeedBuilder(BlogBuilder):\n \"\"\"Transform blog metadata and posts into an Atom feed.\"\"\"\n\n def __init__(self, metadata):\n self.metadata = metadata\n self._feed = AtomFeed(**metadata)\n\n def add(self, posts):\n \"\"\"Add blog posts to the feed.\"\"\"\n for post in posts:\n self._feed.add(FeedEntry(summary=post.summary, title=post.title,\n title_type='html', url=post.url, updated=post.date))\n\n def _generate_output(self):\n return self._feed.to_string()\n\n\nclass ListPageBuilder(BlogBuilder):\n \"\"\"Transform blog posts into a list page.\"\"\"\n\n def __init__(self, template):\n self._template = template\n self._blog_list = ''\n self._posts = None\n\n def add(self, posts):\n \"\"\"Add the posts and generate a blog list.\"\"\"\n li_html = []\n for post in posts:\n li_html.append(u'<li><a href=\"{route}\">{title}</a></li>'.format\n (route=post.route, title=post.title))\n self._blog_list = u'\\n'.join(li_html)\n self._posts = posts\n\n def _generate_output(self):\n context = {'blog_list': self._blog_list, 'posts': self._posts}\n return self._template.render(context)\n",
"<import token>\n<class token>\n\n\nclass BlogExtension(Extension):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n def on_pre_composition(self, director):\n \"\"\"Check that all the required configuration exists.\"\"\"\n if not self._config.parser.has_section('blog'):\n raise AbortError(_(\n 'A blog section is missing in the configuration file.'))\n for metadata, option in self.required_metadata.items():\n self._add_atom_metadata(metadata, option)\n self.atom_output = self._get_option('atom_output')\n if self._config.parser.has_option('blog', 'list_template'):\n self.list_template = self._get_option('list_template')\n self.list_output = self._get_option('list_output')\n self._resolver = director.resolver\n <function token>\n\n def on_post_composition(self, director):\n \"\"\"Generate blog output.\"\"\"\n if not self._should_generate:\n return\n blog_posts = sorted(self.posts.values(), key=lambda p: p.date,\n reverse=True)\n self._generate_atom_feed(director, blog_posts)\n if self.list_template is not None:\n self._generate_list_page(director, blog_posts)\n self._should_generate = False\n <function token>\n <function token>\n\n def _generate_atom_feed(self, director, blog_posts):\n \"\"\"Generate the atom feed.\"\"\"\n logger.info(_('Generating Atom XML feed ...'))\n builder = FeedBuilder(self.atom_metadata)\n builder.add(blog_posts)\n output_file = os.path.join(director.outdir, self.atom_output)\n builder.write_to(output_file)\n <function token>\n\n def _add_atom_metadata(self, name, option):\n \"\"\"Add atom metadata from the config parser.\"\"\"\n self.atom_metadata[name] = self._get_option(option)\n\n def _get_option(self, option):\n \"\"\"Get an option out of the blog section.\"\"\"\n try:\n return self._config.parser.get('blog', option)\n except configparser.NoOptionError:\n raise AbortError(_(\n 'The blog extension requires the {option} option.').format(\n option=option))\n\n\nclass BlogBuilder(object):\n \"\"\"A template pattern class for generating output related to a blog.\"\"\"\n\n def _generate_output(self):\n \"\"\"Generate output that belongs in the destination file.\n\n Subclasses must implement this method.\n \"\"\"\n raise NotImplementedError()\n\n def write_to(self, filepath):\n \"\"\"Write the output to the provided filepath.\"\"\"\n output = self._generate_output()\n with open(filepath, 'wb') as out:\n out.write(output.encode('utf-8'))\n out.write(b'<!-- handrolled for excellence -->\\n')\n\n\nclass FeedBuilder(BlogBuilder):\n \"\"\"Transform blog metadata and posts into an Atom feed.\"\"\"\n\n def __init__(self, metadata):\n self.metadata = metadata\n self._feed = AtomFeed(**metadata)\n\n def add(self, posts):\n \"\"\"Add blog posts to the feed.\"\"\"\n for post in posts:\n self._feed.add(FeedEntry(summary=post.summary, title=post.title,\n title_type='html', url=post.url, updated=post.date))\n\n def _generate_output(self):\n return self._feed.to_string()\n\n\nclass ListPageBuilder(BlogBuilder):\n \"\"\"Transform blog posts into a list page.\"\"\"\n\n def __init__(self, template):\n self._template = template\n self._blog_list = ''\n self._posts = None\n\n def add(self, posts):\n \"\"\"Add the posts and generate a blog list.\"\"\"\n li_html = []\n for post in posts:\n li_html.append(u'<li><a href=\"{route}\">{title}</a></li>'.format\n (route=post.route, title=post.title))\n self._blog_list = u'\\n'.join(li_html)\n self._posts = posts\n\n def _generate_output(self):\n context = {'blog_list': self._blog_list, 'posts': self._posts}\n return self._template.render(context)\n",
"<import token>\n<class token>\n\n\nclass BlogExtension(Extension):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n def on_pre_composition(self, director):\n \"\"\"Check that all the required configuration exists.\"\"\"\n if not self._config.parser.has_section('blog'):\n raise AbortError(_(\n 'A blog section is missing in the configuration file.'))\n for metadata, option in self.required_metadata.items():\n self._add_atom_metadata(metadata, option)\n self.atom_output = self._get_option('atom_output')\n if self._config.parser.has_option('blog', 'list_template'):\n self.list_template = self._get_option('list_template')\n self.list_output = self._get_option('list_output')\n self._resolver = director.resolver\n <function token>\n\n def on_post_composition(self, director):\n \"\"\"Generate blog output.\"\"\"\n if not self._should_generate:\n return\n blog_posts = sorted(self.posts.values(), key=lambda p: p.date,\n reverse=True)\n self._generate_atom_feed(director, blog_posts)\n if self.list_template is not None:\n self._generate_list_page(director, blog_posts)\n self._should_generate = False\n <function token>\n <function token>\n <function token>\n <function token>\n\n def _add_atom_metadata(self, name, option):\n \"\"\"Add atom metadata from the config parser.\"\"\"\n self.atom_metadata[name] = self._get_option(option)\n\n def _get_option(self, option):\n \"\"\"Get an option out of the blog section.\"\"\"\n try:\n return self._config.parser.get('blog', option)\n except configparser.NoOptionError:\n raise AbortError(_(\n 'The blog extension requires the {option} option.').format(\n option=option))\n\n\nclass BlogBuilder(object):\n \"\"\"A template pattern class for generating output related to a blog.\"\"\"\n\n def _generate_output(self):\n \"\"\"Generate output that belongs in the destination file.\n\n Subclasses must implement this method.\n \"\"\"\n raise NotImplementedError()\n\n def write_to(self, filepath):\n \"\"\"Write the output to the provided filepath.\"\"\"\n output = self._generate_output()\n with open(filepath, 'wb') as out:\n out.write(output.encode('utf-8'))\n out.write(b'<!-- handrolled for excellence -->\\n')\n\n\nclass FeedBuilder(BlogBuilder):\n \"\"\"Transform blog metadata and posts into an Atom feed.\"\"\"\n\n def __init__(self, metadata):\n self.metadata = metadata\n self._feed = AtomFeed(**metadata)\n\n def add(self, posts):\n \"\"\"Add blog posts to the feed.\"\"\"\n for post in posts:\n self._feed.add(FeedEntry(summary=post.summary, title=post.title,\n title_type='html', url=post.url, updated=post.date))\n\n def _generate_output(self):\n return self._feed.to_string()\n\n\nclass ListPageBuilder(BlogBuilder):\n \"\"\"Transform blog posts into a list page.\"\"\"\n\n def __init__(self, template):\n self._template = template\n self._blog_list = ''\n self._posts = None\n\n def add(self, posts):\n \"\"\"Add the posts and generate a blog list.\"\"\"\n li_html = []\n for post in posts:\n li_html.append(u'<li><a href=\"{route}\">{title}</a></li>'.format\n (route=post.route, title=post.title))\n self._blog_list = u'\\n'.join(li_html)\n self._posts = posts\n\n def _generate_output(self):\n context = {'blog_list': self._blog_list, 'posts': self._posts}\n return self._template.render(context)\n",
"<import token>\n<class token>\n\n\nclass BlogExtension(Extension):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n <function token>\n <function token>\n\n def on_post_composition(self, director):\n \"\"\"Generate blog output.\"\"\"\n if not self._should_generate:\n return\n blog_posts = sorted(self.posts.values(), key=lambda p: p.date,\n reverse=True)\n self._generate_atom_feed(director, blog_posts)\n if self.list_template is not None:\n self._generate_list_page(director, blog_posts)\n self._should_generate = False\n <function token>\n <function token>\n <function token>\n <function token>\n\n def _add_atom_metadata(self, name, option):\n \"\"\"Add atom metadata from the config parser.\"\"\"\n self.atom_metadata[name] = self._get_option(option)\n\n def _get_option(self, option):\n \"\"\"Get an option out of the blog section.\"\"\"\n try:\n return self._config.parser.get('blog', option)\n except configparser.NoOptionError:\n raise AbortError(_(\n 'The blog extension requires the {option} option.').format(\n option=option))\n\n\nclass BlogBuilder(object):\n \"\"\"A template pattern class for generating output related to a blog.\"\"\"\n\n def _generate_output(self):\n \"\"\"Generate output that belongs in the destination file.\n\n Subclasses must implement this method.\n \"\"\"\n raise NotImplementedError()\n\n def write_to(self, filepath):\n \"\"\"Write the output to the provided filepath.\"\"\"\n output = self._generate_output()\n with open(filepath, 'wb') as out:\n out.write(output.encode('utf-8'))\n out.write(b'<!-- handrolled for excellence -->\\n')\n\n\nclass FeedBuilder(BlogBuilder):\n \"\"\"Transform blog metadata and posts into an Atom feed.\"\"\"\n\n def __init__(self, metadata):\n self.metadata = metadata\n self._feed = AtomFeed(**metadata)\n\n def add(self, posts):\n \"\"\"Add blog posts to the feed.\"\"\"\n for post in posts:\n self._feed.add(FeedEntry(summary=post.summary, title=post.title,\n title_type='html', url=post.url, updated=post.date))\n\n def _generate_output(self):\n return self._feed.to_string()\n\n\nclass ListPageBuilder(BlogBuilder):\n \"\"\"Transform blog posts into a list page.\"\"\"\n\n def __init__(self, template):\n self._template = template\n self._blog_list = ''\n self._posts = None\n\n def add(self, posts):\n \"\"\"Add the posts and generate a blog list.\"\"\"\n li_html = []\n for post in posts:\n li_html.append(u'<li><a href=\"{route}\">{title}</a></li>'.format\n (route=post.route, title=post.title))\n self._blog_list = u'\\n'.join(li_html)\n self._posts = posts\n\n def _generate_output(self):\n context = {'blog_list': self._blog_list, 'posts': self._posts}\n return self._template.render(context)\n",
"<import token>\n<class token>\n\n\nclass BlogExtension(Extension):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n <function token>\n <function token>\n\n def on_post_composition(self, director):\n \"\"\"Generate blog output.\"\"\"\n if not self._should_generate:\n return\n blog_posts = sorted(self.posts.values(), key=lambda p: p.date,\n reverse=True)\n self._generate_atom_feed(director, blog_posts)\n if self.list_template is not None:\n self._generate_list_page(director, blog_posts)\n self._should_generate = False\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def _get_option(self, option):\n \"\"\"Get an option out of the blog section.\"\"\"\n try:\n return self._config.parser.get('blog', option)\n except configparser.NoOptionError:\n raise AbortError(_(\n 'The blog extension requires the {option} option.').format(\n option=option))\n\n\nclass BlogBuilder(object):\n \"\"\"A template pattern class for generating output related to a blog.\"\"\"\n\n def _generate_output(self):\n \"\"\"Generate output that belongs in the destination file.\n\n Subclasses must implement this method.\n \"\"\"\n raise NotImplementedError()\n\n def write_to(self, filepath):\n \"\"\"Write the output to the provided filepath.\"\"\"\n output = self._generate_output()\n with open(filepath, 'wb') as out:\n out.write(output.encode('utf-8'))\n out.write(b'<!-- handrolled for excellence -->\\n')\n\n\nclass FeedBuilder(BlogBuilder):\n \"\"\"Transform blog metadata and posts into an Atom feed.\"\"\"\n\n def __init__(self, metadata):\n self.metadata = metadata\n self._feed = AtomFeed(**metadata)\n\n def add(self, posts):\n \"\"\"Add blog posts to the feed.\"\"\"\n for post in posts:\n self._feed.add(FeedEntry(summary=post.summary, title=post.title,\n title_type='html', url=post.url, updated=post.date))\n\n def _generate_output(self):\n return self._feed.to_string()\n\n\nclass ListPageBuilder(BlogBuilder):\n \"\"\"Transform blog posts into a list page.\"\"\"\n\n def __init__(self, template):\n self._template = template\n self._blog_list = ''\n self._posts = None\n\n def add(self, posts):\n \"\"\"Add the posts and generate a blog list.\"\"\"\n li_html = []\n for post in posts:\n li_html.append(u'<li><a href=\"{route}\">{title}</a></li>'.format\n (route=post.route, title=post.title))\n self._blog_list = u'\\n'.join(li_html)\n self._posts = posts\n\n def _generate_output(self):\n context = {'blog_list': self._blog_list, 'posts': self._posts}\n return self._template.render(context)\n",
"<import token>\n<class token>\n\n\nclass BlogExtension(Extension):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def _get_option(self, option):\n \"\"\"Get an option out of the blog section.\"\"\"\n try:\n return self._config.parser.get('blog', option)\n except configparser.NoOptionError:\n raise AbortError(_(\n 'The blog extension requires the {option} option.').format(\n option=option))\n\n\nclass BlogBuilder(object):\n \"\"\"A template pattern class for generating output related to a blog.\"\"\"\n\n def _generate_output(self):\n \"\"\"Generate output that belongs in the destination file.\n\n Subclasses must implement this method.\n \"\"\"\n raise NotImplementedError()\n\n def write_to(self, filepath):\n \"\"\"Write the output to the provided filepath.\"\"\"\n output = self._generate_output()\n with open(filepath, 'wb') as out:\n out.write(output.encode('utf-8'))\n out.write(b'<!-- handrolled for excellence -->\\n')\n\n\nclass FeedBuilder(BlogBuilder):\n \"\"\"Transform blog metadata and posts into an Atom feed.\"\"\"\n\n def __init__(self, metadata):\n self.metadata = metadata\n self._feed = AtomFeed(**metadata)\n\n def add(self, posts):\n \"\"\"Add blog posts to the feed.\"\"\"\n for post in posts:\n self._feed.add(FeedEntry(summary=post.summary, title=post.title,\n title_type='html', url=post.url, updated=post.date))\n\n def _generate_output(self):\n return self._feed.to_string()\n\n\nclass ListPageBuilder(BlogBuilder):\n \"\"\"Transform blog posts into a list page.\"\"\"\n\n def __init__(self, template):\n self._template = template\n self._blog_list = ''\n self._posts = None\n\n def add(self, posts):\n \"\"\"Add the posts and generate a blog list.\"\"\"\n li_html = []\n for post in posts:\n li_html.append(u'<li><a href=\"{route}\">{title}</a></li>'.format\n (route=post.route, title=post.title))\n self._blog_list = u'\\n'.join(li_html)\n self._posts = posts\n\n def _generate_output(self):\n context = {'blog_list': self._blog_list, 'posts': self._posts}\n return self._template.render(context)\n",
"<import token>\n<class token>\n\n\nclass BlogExtension(Extension):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\nclass BlogBuilder(object):\n \"\"\"A template pattern class for generating output related to a blog.\"\"\"\n\n def _generate_output(self):\n \"\"\"Generate output that belongs in the destination file.\n\n Subclasses must implement this method.\n \"\"\"\n raise NotImplementedError()\n\n def write_to(self, filepath):\n \"\"\"Write the output to the provided filepath.\"\"\"\n output = self._generate_output()\n with open(filepath, 'wb') as out:\n out.write(output.encode('utf-8'))\n out.write(b'<!-- handrolled for excellence -->\\n')\n\n\nclass FeedBuilder(BlogBuilder):\n \"\"\"Transform blog metadata and posts into an Atom feed.\"\"\"\n\n def __init__(self, metadata):\n self.metadata = metadata\n self._feed = AtomFeed(**metadata)\n\n def add(self, posts):\n \"\"\"Add blog posts to the feed.\"\"\"\n for post in posts:\n self._feed.add(FeedEntry(summary=post.summary, title=post.title,\n title_type='html', url=post.url, updated=post.date))\n\n def _generate_output(self):\n return self._feed.to_string()\n\n\nclass ListPageBuilder(BlogBuilder):\n \"\"\"Transform blog posts into a list page.\"\"\"\n\n def __init__(self, template):\n self._template = template\n self._blog_list = ''\n self._posts = None\n\n def add(self, posts):\n \"\"\"Add the posts and generate a blog list.\"\"\"\n li_html = []\n for post in posts:\n li_html.append(u'<li><a href=\"{route}\">{title}</a></li>'.format\n (route=post.route, title=post.title))\n self._blog_list = u'\\n'.join(li_html)\n self._posts = posts\n\n def _generate_output(self):\n context = {'blog_list': self._blog_list, 'posts': self._posts}\n return self._template.render(context)\n",
"<import token>\n<class token>\n<class token>\n\n\nclass BlogBuilder(object):\n \"\"\"A template pattern class for generating output related to a blog.\"\"\"\n\n def _generate_output(self):\n \"\"\"Generate output that belongs in the destination file.\n\n Subclasses must implement this method.\n \"\"\"\n raise NotImplementedError()\n\n def write_to(self, filepath):\n \"\"\"Write the output to the provided filepath.\"\"\"\n output = self._generate_output()\n with open(filepath, 'wb') as out:\n out.write(output.encode('utf-8'))\n out.write(b'<!-- handrolled for excellence -->\\n')\n\n\nclass FeedBuilder(BlogBuilder):\n \"\"\"Transform blog metadata and posts into an Atom feed.\"\"\"\n\n def __init__(self, metadata):\n self.metadata = metadata\n self._feed = AtomFeed(**metadata)\n\n def add(self, posts):\n \"\"\"Add blog posts to the feed.\"\"\"\n for post in posts:\n self._feed.add(FeedEntry(summary=post.summary, title=post.title,\n title_type='html', url=post.url, updated=post.date))\n\n def _generate_output(self):\n return self._feed.to_string()\n\n\nclass ListPageBuilder(BlogBuilder):\n \"\"\"Transform blog posts into a list page.\"\"\"\n\n def __init__(self, template):\n self._template = template\n self._blog_list = ''\n self._posts = None\n\n def add(self, posts):\n \"\"\"Add the posts and generate a blog list.\"\"\"\n li_html = []\n for post in posts:\n li_html.append(u'<li><a href=\"{route}\">{title}</a></li>'.format\n (route=post.route, title=post.title))\n self._blog_list = u'\\n'.join(li_html)\n self._posts = posts\n\n def _generate_output(self):\n context = {'blog_list': self._blog_list, 'posts': self._posts}\n return self._template.render(context)\n",
"<import token>\n<class token>\n<class token>\n\n\nclass BlogBuilder(object):\n <docstring token>\n\n def _generate_output(self):\n \"\"\"Generate output that belongs in the destination file.\n\n Subclasses must implement this method.\n \"\"\"\n raise NotImplementedError()\n\n def write_to(self, filepath):\n \"\"\"Write the output to the provided filepath.\"\"\"\n output = self._generate_output()\n with open(filepath, 'wb') as out:\n out.write(output.encode('utf-8'))\n out.write(b'<!-- handrolled for excellence -->\\n')\n\n\nclass FeedBuilder(BlogBuilder):\n \"\"\"Transform blog metadata and posts into an Atom feed.\"\"\"\n\n def __init__(self, metadata):\n self.metadata = metadata\n self._feed = AtomFeed(**metadata)\n\n def add(self, posts):\n \"\"\"Add blog posts to the feed.\"\"\"\n for post in posts:\n self._feed.add(FeedEntry(summary=post.summary, title=post.title,\n title_type='html', url=post.url, updated=post.date))\n\n def _generate_output(self):\n return self._feed.to_string()\n\n\nclass ListPageBuilder(BlogBuilder):\n \"\"\"Transform blog posts into a list page.\"\"\"\n\n def __init__(self, template):\n self._template = template\n self._blog_list = ''\n self._posts = None\n\n def add(self, posts):\n \"\"\"Add the posts and generate a blog list.\"\"\"\n li_html = []\n for post in posts:\n li_html.append(u'<li><a href=\"{route}\">{title}</a></li>'.format\n (route=post.route, title=post.title))\n self._blog_list = u'\\n'.join(li_html)\n self._posts = posts\n\n def _generate_output(self):\n context = {'blog_list': self._blog_list, 'posts': self._posts}\n return self._template.render(context)\n",
"<import token>\n<class token>\n<class token>\n\n\nclass BlogBuilder(object):\n <docstring token>\n <function token>\n\n def write_to(self, filepath):\n \"\"\"Write the output to the provided filepath.\"\"\"\n output = self._generate_output()\n with open(filepath, 'wb') as out:\n out.write(output.encode('utf-8'))\n out.write(b'<!-- handrolled for excellence -->\\n')\n\n\nclass FeedBuilder(BlogBuilder):\n \"\"\"Transform blog metadata and posts into an Atom feed.\"\"\"\n\n def __init__(self, metadata):\n self.metadata = metadata\n self._feed = AtomFeed(**metadata)\n\n def add(self, posts):\n \"\"\"Add blog posts to the feed.\"\"\"\n for post in posts:\n self._feed.add(FeedEntry(summary=post.summary, title=post.title,\n title_type='html', url=post.url, updated=post.date))\n\n def _generate_output(self):\n return self._feed.to_string()\n\n\nclass ListPageBuilder(BlogBuilder):\n \"\"\"Transform blog posts into a list page.\"\"\"\n\n def __init__(self, template):\n self._template = template\n self._blog_list = ''\n self._posts = None\n\n def add(self, posts):\n \"\"\"Add the posts and generate a blog list.\"\"\"\n li_html = []\n for post in posts:\n li_html.append(u'<li><a href=\"{route}\">{title}</a></li>'.format\n (route=post.route, title=post.title))\n self._blog_list = u'\\n'.join(li_html)\n self._posts = posts\n\n def _generate_output(self):\n context = {'blog_list': self._blog_list, 'posts': self._posts}\n return self._template.render(context)\n",
"<import token>\n<class token>\n<class token>\n\n\nclass BlogBuilder(object):\n <docstring token>\n <function token>\n <function token>\n\n\nclass FeedBuilder(BlogBuilder):\n \"\"\"Transform blog metadata and posts into an Atom feed.\"\"\"\n\n def __init__(self, metadata):\n self.metadata = metadata\n self._feed = AtomFeed(**metadata)\n\n def add(self, posts):\n \"\"\"Add blog posts to the feed.\"\"\"\n for post in posts:\n self._feed.add(FeedEntry(summary=post.summary, title=post.title,\n title_type='html', url=post.url, updated=post.date))\n\n def _generate_output(self):\n return self._feed.to_string()\n\n\nclass ListPageBuilder(BlogBuilder):\n \"\"\"Transform blog posts into a list page.\"\"\"\n\n def __init__(self, template):\n self._template = template\n self._blog_list = ''\n self._posts = None\n\n def add(self, posts):\n \"\"\"Add the posts and generate a blog list.\"\"\"\n li_html = []\n for post in posts:\n li_html.append(u'<li><a href=\"{route}\">{title}</a></li>'.format\n (route=post.route, title=post.title))\n self._blog_list = u'\\n'.join(li_html)\n self._posts = posts\n\n def _generate_output(self):\n context = {'blog_list': self._blog_list, 'posts': self._posts}\n return self._template.render(context)\n",
"<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass FeedBuilder(BlogBuilder):\n \"\"\"Transform blog metadata and posts into an Atom feed.\"\"\"\n\n def __init__(self, metadata):\n self.metadata = metadata\n self._feed = AtomFeed(**metadata)\n\n def add(self, posts):\n \"\"\"Add blog posts to the feed.\"\"\"\n for post in posts:\n self._feed.add(FeedEntry(summary=post.summary, title=post.title,\n title_type='html', url=post.url, updated=post.date))\n\n def _generate_output(self):\n return self._feed.to_string()\n\n\nclass ListPageBuilder(BlogBuilder):\n \"\"\"Transform blog posts into a list page.\"\"\"\n\n def __init__(self, template):\n self._template = template\n self._blog_list = ''\n self._posts = None\n\n def add(self, posts):\n \"\"\"Add the posts and generate a blog list.\"\"\"\n li_html = []\n for post in posts:\n li_html.append(u'<li><a href=\"{route}\">{title}</a></li>'.format\n (route=post.route, title=post.title))\n self._blog_list = u'\\n'.join(li_html)\n self._posts = posts\n\n def _generate_output(self):\n context = {'blog_list': self._blog_list, 'posts': self._posts}\n return self._template.render(context)\n",
"<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass FeedBuilder(BlogBuilder):\n <docstring token>\n\n def __init__(self, metadata):\n self.metadata = metadata\n self._feed = AtomFeed(**metadata)\n\n def add(self, posts):\n \"\"\"Add blog posts to the feed.\"\"\"\n for post in posts:\n self._feed.add(FeedEntry(summary=post.summary, title=post.title,\n title_type='html', url=post.url, updated=post.date))\n\n def _generate_output(self):\n return self._feed.to_string()\n\n\nclass ListPageBuilder(BlogBuilder):\n \"\"\"Transform blog posts into a list page.\"\"\"\n\n def __init__(self, template):\n self._template = template\n self._blog_list = ''\n self._posts = None\n\n def add(self, posts):\n \"\"\"Add the posts and generate a blog list.\"\"\"\n li_html = []\n for post in posts:\n li_html.append(u'<li><a href=\"{route}\">{title}</a></li>'.format\n (route=post.route, title=post.title))\n self._blog_list = u'\\n'.join(li_html)\n self._posts = posts\n\n def _generate_output(self):\n context = {'blog_list': self._blog_list, 'posts': self._posts}\n return self._template.render(context)\n",
"<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass FeedBuilder(BlogBuilder):\n <docstring token>\n\n def __init__(self, metadata):\n self.metadata = metadata\n self._feed = AtomFeed(**metadata)\n\n def add(self, posts):\n \"\"\"Add blog posts to the feed.\"\"\"\n for post in posts:\n self._feed.add(FeedEntry(summary=post.summary, title=post.title,\n title_type='html', url=post.url, updated=post.date))\n <function token>\n\n\nclass ListPageBuilder(BlogBuilder):\n \"\"\"Transform blog posts into a list page.\"\"\"\n\n def __init__(self, template):\n self._template = template\n self._blog_list = ''\n self._posts = None\n\n def add(self, posts):\n \"\"\"Add the posts and generate a blog list.\"\"\"\n li_html = []\n for post in posts:\n li_html.append(u'<li><a href=\"{route}\">{title}</a></li>'.format\n (route=post.route, title=post.title))\n self._blog_list = u'\\n'.join(li_html)\n self._posts = posts\n\n def _generate_output(self):\n context = {'blog_list': self._blog_list, 'posts': self._posts}\n return self._template.render(context)\n",
"<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass FeedBuilder(BlogBuilder):\n <docstring token>\n\n def __init__(self, metadata):\n self.metadata = metadata\n self._feed = AtomFeed(**metadata)\n <function token>\n <function token>\n\n\nclass ListPageBuilder(BlogBuilder):\n \"\"\"Transform blog posts into a list page.\"\"\"\n\n def __init__(self, template):\n self._template = template\n self._blog_list = ''\n self._posts = None\n\n def add(self, posts):\n \"\"\"Add the posts and generate a blog list.\"\"\"\n li_html = []\n for post in posts:\n li_html.append(u'<li><a href=\"{route}\">{title}</a></li>'.format\n (route=post.route, title=post.title))\n self._blog_list = u'\\n'.join(li_html)\n self._posts = posts\n\n def _generate_output(self):\n context = {'blog_list': self._blog_list, 'posts': self._posts}\n return self._template.render(context)\n",
"<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass FeedBuilder(BlogBuilder):\n <docstring token>\n <function token>\n <function token>\n <function token>\n\n\nclass ListPageBuilder(BlogBuilder):\n \"\"\"Transform blog posts into a list page.\"\"\"\n\n def __init__(self, template):\n self._template = template\n self._blog_list = ''\n self._posts = None\n\n def add(self, posts):\n \"\"\"Add the posts and generate a blog list.\"\"\"\n li_html = []\n for post in posts:\n li_html.append(u'<li><a href=\"{route}\">{title}</a></li>'.format\n (route=post.route, title=post.title))\n self._blog_list = u'\\n'.join(li_html)\n self._posts = posts\n\n def _generate_output(self):\n context = {'blog_list': self._blog_list, 'posts': self._posts}\n return self._template.render(context)\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass ListPageBuilder(BlogBuilder):\n \"\"\"Transform blog posts into a list page.\"\"\"\n\n def __init__(self, template):\n self._template = template\n self._blog_list = ''\n self._posts = None\n\n def add(self, posts):\n \"\"\"Add the posts and generate a blog list.\"\"\"\n li_html = []\n for post in posts:\n li_html.append(u'<li><a href=\"{route}\">{title}</a></li>'.format\n (route=post.route, title=post.title))\n self._blog_list = u'\\n'.join(li_html)\n self._posts = posts\n\n def _generate_output(self):\n context = {'blog_list': self._blog_list, 'posts': self._posts}\n return self._template.render(context)\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass ListPageBuilder(BlogBuilder):\n <docstring token>\n\n def __init__(self, template):\n self._template = template\n self._blog_list = ''\n self._posts = None\n\n def add(self, posts):\n \"\"\"Add the posts and generate a blog list.\"\"\"\n li_html = []\n for post in posts:\n li_html.append(u'<li><a href=\"{route}\">{title}</a></li>'.format\n (route=post.route, title=post.title))\n self._blog_list = u'\\n'.join(li_html)\n self._posts = posts\n\n def _generate_output(self):\n context = {'blog_list': self._blog_list, 'posts': self._posts}\n return self._template.render(context)\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass ListPageBuilder(BlogBuilder):\n <docstring token>\n\n def __init__(self, template):\n self._template = template\n self._blog_list = ''\n self._posts = None\n\n def add(self, posts):\n \"\"\"Add the posts and generate a blog list.\"\"\"\n li_html = []\n for post in posts:\n li_html.append(u'<li><a href=\"{route}\">{title}</a></li>'.format\n (route=post.route, title=post.title))\n self._blog_list = u'\\n'.join(li_html)\n self._posts = posts\n <function token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass ListPageBuilder(BlogBuilder):\n <docstring token>\n\n def __init__(self, template):\n self._template = template\n self._blog_list = ''\n self._posts = None\n <function token>\n <function token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass ListPageBuilder(BlogBuilder):\n <docstring token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n"
] | false |
831 |
7fa7a632078ce4f0052e3cadf11d5efd47a1fad5
|
import bpy
class TILA_Config_LogElement(bpy.types.PropertyGroup):
name: bpy.props.StringProperty(default='')
icon: bpy.props.StringProperty(default='BLANK1')
class TILA_Config_LogList(bpy.types.UIList):
bl_idname = "TILA_UL_Config_log_list"
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index):
row = layout.row(align=True)
row.label(text=item.name, icon=item.icon)
class TILA_Config_SatusList(bpy.types.UIList):
bl_idname = "TILA_UL_Config_status_list"
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index):
row = layout.row(align=True)
row.label(text=item.name, icon=item.icon)
class TILA_Config_Log():
def __init__(self, log, index_name):
self.log = log
self.index_name = index_name
def append(self, name, icon='BLANK1'):
element = self.log.add()
element.name = name
element.icon = icon
setattr(bpy.context.window_manager, self.index_name, len(self.log)-1)
def info(self, name):
self.append(name, icon='INFO')
def warning(self, name):
self.append(name, icon='ERROR')
def error(self, name):
self.append(name, icon='CANCEL')
def start(self, name):
self.append(name, icon='TRIA_RIGHT')
def done(self, name):
self.append(name, icon='CHECKMARK')
|
[
"import bpy\n\n\nclass TILA_Config_LogElement(bpy.types.PropertyGroup):\n\tname: bpy.props.StringProperty(default='')\n\ticon: bpy.props.StringProperty(default='BLANK1')\n\nclass TILA_Config_LogList(bpy.types.UIList):\n\tbl_idname = \"TILA_UL_Config_log_list\"\n\t\n\tdef draw_item(self, context, layout, data, item, icon, active_data, active_propname, index):\n\t\trow = layout.row(align=True)\n\t\trow.label(text=item.name, icon=item.icon)\n\nclass TILA_Config_SatusList(bpy.types.UIList):\n\tbl_idname = \"TILA_UL_Config_status_list\"\n\t\n\tdef draw_item(self, context, layout, data, item, icon, active_data, active_propname, index):\n\t\trow = layout.row(align=True)\n\t\trow.label(text=item.name, icon=item.icon)\n\nclass TILA_Config_Log():\n\tdef __init__(self, log, index_name):\n\t\tself.log = log\n\t\tself.index_name = index_name\n\n\tdef append(self, name, icon='BLANK1'):\n\t\telement = self.log.add()\n\t\telement.name = name\n\t\telement.icon = icon\n\t\tsetattr(bpy.context.window_manager, self.index_name, len(self.log)-1)\n\t\n\tdef info(self, name):\n\t\tself.append(name, icon='INFO')\n\n\tdef warning(self, name):\n\t\tself.append(name, icon='ERROR')\n\n\tdef error(self, name):\n\t\tself.append(name, icon='CANCEL')\n\n\tdef start(self, name):\n\t\tself.append(name, icon='TRIA_RIGHT')\n\t\n\tdef done(self, name):\n\t\tself.append(name, icon='CHECKMARK')\n",
"import bpy\n\n\nclass TILA_Config_LogElement(bpy.types.PropertyGroup):\n name: bpy.props.StringProperty(default='')\n icon: bpy.props.StringProperty(default='BLANK1')\n\n\nclass TILA_Config_LogList(bpy.types.UIList):\n bl_idname = 'TILA_UL_Config_log_list'\n\n def draw_item(self, context, layout, data, item, icon, active_data,\n active_propname, index):\n row = layout.row(align=True)\n row.label(text=item.name, icon=item.icon)\n\n\nclass TILA_Config_SatusList(bpy.types.UIList):\n bl_idname = 'TILA_UL_Config_status_list'\n\n def draw_item(self, context, layout, data, item, icon, active_data,\n active_propname, index):\n row = layout.row(align=True)\n row.label(text=item.name, icon=item.icon)\n\n\nclass TILA_Config_Log:\n\n def __init__(self, log, index_name):\n self.log = log\n self.index_name = index_name\n\n def append(self, name, icon='BLANK1'):\n element = self.log.add()\n element.name = name\n element.icon = icon\n setattr(bpy.context.window_manager, self.index_name, len(self.log) - 1)\n\n def info(self, name):\n self.append(name, icon='INFO')\n\n def warning(self, name):\n self.append(name, icon='ERROR')\n\n def error(self, name):\n self.append(name, icon='CANCEL')\n\n def start(self, name):\n self.append(name, icon='TRIA_RIGHT')\n\n def done(self, name):\n self.append(name, icon='CHECKMARK')\n",
"<import token>\n\n\nclass TILA_Config_LogElement(bpy.types.PropertyGroup):\n name: bpy.props.StringProperty(default='')\n icon: bpy.props.StringProperty(default='BLANK1')\n\n\nclass TILA_Config_LogList(bpy.types.UIList):\n bl_idname = 'TILA_UL_Config_log_list'\n\n def draw_item(self, context, layout, data, item, icon, active_data,\n active_propname, index):\n row = layout.row(align=True)\n row.label(text=item.name, icon=item.icon)\n\n\nclass TILA_Config_SatusList(bpy.types.UIList):\n bl_idname = 'TILA_UL_Config_status_list'\n\n def draw_item(self, context, layout, data, item, icon, active_data,\n active_propname, index):\n row = layout.row(align=True)\n row.label(text=item.name, icon=item.icon)\n\n\nclass TILA_Config_Log:\n\n def __init__(self, log, index_name):\n self.log = log\n self.index_name = index_name\n\n def append(self, name, icon='BLANK1'):\n element = self.log.add()\n element.name = name\n element.icon = icon\n setattr(bpy.context.window_manager, self.index_name, len(self.log) - 1)\n\n def info(self, name):\n self.append(name, icon='INFO')\n\n def warning(self, name):\n self.append(name, icon='ERROR')\n\n def error(self, name):\n self.append(name, icon='CANCEL')\n\n def start(self, name):\n self.append(name, icon='TRIA_RIGHT')\n\n def done(self, name):\n self.append(name, icon='CHECKMARK')\n",
"<import token>\n<class token>\n\n\nclass TILA_Config_LogList(bpy.types.UIList):\n bl_idname = 'TILA_UL_Config_log_list'\n\n def draw_item(self, context, layout, data, item, icon, active_data,\n active_propname, index):\n row = layout.row(align=True)\n row.label(text=item.name, icon=item.icon)\n\n\nclass TILA_Config_SatusList(bpy.types.UIList):\n bl_idname = 'TILA_UL_Config_status_list'\n\n def draw_item(self, context, layout, data, item, icon, active_data,\n active_propname, index):\n row = layout.row(align=True)\n row.label(text=item.name, icon=item.icon)\n\n\nclass TILA_Config_Log:\n\n def __init__(self, log, index_name):\n self.log = log\n self.index_name = index_name\n\n def append(self, name, icon='BLANK1'):\n element = self.log.add()\n element.name = name\n element.icon = icon\n setattr(bpy.context.window_manager, self.index_name, len(self.log) - 1)\n\n def info(self, name):\n self.append(name, icon='INFO')\n\n def warning(self, name):\n self.append(name, icon='ERROR')\n\n def error(self, name):\n self.append(name, icon='CANCEL')\n\n def start(self, name):\n self.append(name, icon='TRIA_RIGHT')\n\n def done(self, name):\n self.append(name, icon='CHECKMARK')\n",
"<import token>\n<class token>\n\n\nclass TILA_Config_LogList(bpy.types.UIList):\n <assignment token>\n\n def draw_item(self, context, layout, data, item, icon, active_data,\n active_propname, index):\n row = layout.row(align=True)\n row.label(text=item.name, icon=item.icon)\n\n\nclass TILA_Config_SatusList(bpy.types.UIList):\n bl_idname = 'TILA_UL_Config_status_list'\n\n def draw_item(self, context, layout, data, item, icon, active_data,\n active_propname, index):\n row = layout.row(align=True)\n row.label(text=item.name, icon=item.icon)\n\n\nclass TILA_Config_Log:\n\n def __init__(self, log, index_name):\n self.log = log\n self.index_name = index_name\n\n def append(self, name, icon='BLANK1'):\n element = self.log.add()\n element.name = name\n element.icon = icon\n setattr(bpy.context.window_manager, self.index_name, len(self.log) - 1)\n\n def info(self, name):\n self.append(name, icon='INFO')\n\n def warning(self, name):\n self.append(name, icon='ERROR')\n\n def error(self, name):\n self.append(name, icon='CANCEL')\n\n def start(self, name):\n self.append(name, icon='TRIA_RIGHT')\n\n def done(self, name):\n self.append(name, icon='CHECKMARK')\n",
"<import token>\n<class token>\n\n\nclass TILA_Config_LogList(bpy.types.UIList):\n <assignment token>\n <function token>\n\n\nclass TILA_Config_SatusList(bpy.types.UIList):\n bl_idname = 'TILA_UL_Config_status_list'\n\n def draw_item(self, context, layout, data, item, icon, active_data,\n active_propname, index):\n row = layout.row(align=True)\n row.label(text=item.name, icon=item.icon)\n\n\nclass TILA_Config_Log:\n\n def __init__(self, log, index_name):\n self.log = log\n self.index_name = index_name\n\n def append(self, name, icon='BLANK1'):\n element = self.log.add()\n element.name = name\n element.icon = icon\n setattr(bpy.context.window_manager, self.index_name, len(self.log) - 1)\n\n def info(self, name):\n self.append(name, icon='INFO')\n\n def warning(self, name):\n self.append(name, icon='ERROR')\n\n def error(self, name):\n self.append(name, icon='CANCEL')\n\n def start(self, name):\n self.append(name, icon='TRIA_RIGHT')\n\n def done(self, name):\n self.append(name, icon='CHECKMARK')\n",
"<import token>\n<class token>\n<class token>\n\n\nclass TILA_Config_SatusList(bpy.types.UIList):\n bl_idname = 'TILA_UL_Config_status_list'\n\n def draw_item(self, context, layout, data, item, icon, active_data,\n active_propname, index):\n row = layout.row(align=True)\n row.label(text=item.name, icon=item.icon)\n\n\nclass TILA_Config_Log:\n\n def __init__(self, log, index_name):\n self.log = log\n self.index_name = index_name\n\n def append(self, name, icon='BLANK1'):\n element = self.log.add()\n element.name = name\n element.icon = icon\n setattr(bpy.context.window_manager, self.index_name, len(self.log) - 1)\n\n def info(self, name):\n self.append(name, icon='INFO')\n\n def warning(self, name):\n self.append(name, icon='ERROR')\n\n def error(self, name):\n self.append(name, icon='CANCEL')\n\n def start(self, name):\n self.append(name, icon='TRIA_RIGHT')\n\n def done(self, name):\n self.append(name, icon='CHECKMARK')\n",
"<import token>\n<class token>\n<class token>\n\n\nclass TILA_Config_SatusList(bpy.types.UIList):\n <assignment token>\n\n def draw_item(self, context, layout, data, item, icon, active_data,\n active_propname, index):\n row = layout.row(align=True)\n row.label(text=item.name, icon=item.icon)\n\n\nclass TILA_Config_Log:\n\n def __init__(self, log, index_name):\n self.log = log\n self.index_name = index_name\n\n def append(self, name, icon='BLANK1'):\n element = self.log.add()\n element.name = name\n element.icon = icon\n setattr(bpy.context.window_manager, self.index_name, len(self.log) - 1)\n\n def info(self, name):\n self.append(name, icon='INFO')\n\n def warning(self, name):\n self.append(name, icon='ERROR')\n\n def error(self, name):\n self.append(name, icon='CANCEL')\n\n def start(self, name):\n self.append(name, icon='TRIA_RIGHT')\n\n def done(self, name):\n self.append(name, icon='CHECKMARK')\n",
"<import token>\n<class token>\n<class token>\n\n\nclass TILA_Config_SatusList(bpy.types.UIList):\n <assignment token>\n <function token>\n\n\nclass TILA_Config_Log:\n\n def __init__(self, log, index_name):\n self.log = log\n self.index_name = index_name\n\n def append(self, name, icon='BLANK1'):\n element = self.log.add()\n element.name = name\n element.icon = icon\n setattr(bpy.context.window_manager, self.index_name, len(self.log) - 1)\n\n def info(self, name):\n self.append(name, icon='INFO')\n\n def warning(self, name):\n self.append(name, icon='ERROR')\n\n def error(self, name):\n self.append(name, icon='CANCEL')\n\n def start(self, name):\n self.append(name, icon='TRIA_RIGHT')\n\n def done(self, name):\n self.append(name, icon='CHECKMARK')\n",
"<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass TILA_Config_Log:\n\n def __init__(self, log, index_name):\n self.log = log\n self.index_name = index_name\n\n def append(self, name, icon='BLANK1'):\n element = self.log.add()\n element.name = name\n element.icon = icon\n setattr(bpy.context.window_manager, self.index_name, len(self.log) - 1)\n\n def info(self, name):\n self.append(name, icon='INFO')\n\n def warning(self, name):\n self.append(name, icon='ERROR')\n\n def error(self, name):\n self.append(name, icon='CANCEL')\n\n def start(self, name):\n self.append(name, icon='TRIA_RIGHT')\n\n def done(self, name):\n self.append(name, icon='CHECKMARK')\n",
"<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass TILA_Config_Log:\n\n def __init__(self, log, index_name):\n self.log = log\n self.index_name = index_name\n\n def append(self, name, icon='BLANK1'):\n element = self.log.add()\n element.name = name\n element.icon = icon\n setattr(bpy.context.window_manager, self.index_name, len(self.log) - 1)\n\n def info(self, name):\n self.append(name, icon='INFO')\n\n def warning(self, name):\n self.append(name, icon='ERROR')\n <function token>\n\n def start(self, name):\n self.append(name, icon='TRIA_RIGHT')\n\n def done(self, name):\n self.append(name, icon='CHECKMARK')\n",
"<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass TILA_Config_Log:\n <function token>\n\n def append(self, name, icon='BLANK1'):\n element = self.log.add()\n element.name = name\n element.icon = icon\n setattr(bpy.context.window_manager, self.index_name, len(self.log) - 1)\n\n def info(self, name):\n self.append(name, icon='INFO')\n\n def warning(self, name):\n self.append(name, icon='ERROR')\n <function token>\n\n def start(self, name):\n self.append(name, icon='TRIA_RIGHT')\n\n def done(self, name):\n self.append(name, icon='CHECKMARK')\n",
"<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass TILA_Config_Log:\n <function token>\n\n def append(self, name, icon='BLANK1'):\n element = self.log.add()\n element.name = name\n element.icon = icon\n setattr(bpy.context.window_manager, self.index_name, len(self.log) - 1)\n\n def info(self, name):\n self.append(name, icon='INFO')\n\n def warning(self, name):\n self.append(name, icon='ERROR')\n <function token>\n\n def start(self, name):\n self.append(name, icon='TRIA_RIGHT')\n <function token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass TILA_Config_Log:\n <function token>\n\n def append(self, name, icon='BLANK1'):\n element = self.log.add()\n element.name = name\n element.icon = icon\n setattr(bpy.context.window_manager, self.index_name, len(self.log) - 1)\n <function token>\n\n def warning(self, name):\n self.append(name, icon='ERROR')\n <function token>\n\n def start(self, name):\n self.append(name, icon='TRIA_RIGHT')\n <function token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass TILA_Config_Log:\n <function token>\n\n def append(self, name, icon='BLANK1'):\n element = self.log.add()\n element.name = name\n element.icon = icon\n setattr(bpy.context.window_manager, self.index_name, len(self.log) - 1)\n <function token>\n <function token>\n <function token>\n\n def start(self, name):\n self.append(name, icon='TRIA_RIGHT')\n <function token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass TILA_Config_Log:\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def start(self, name):\n self.append(name, icon='TRIA_RIGHT')\n <function token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass TILA_Config_Log:\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n"
] | false |
832 |
77e4bbe625251254cdadaeeb23dddf51e729e747
|
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.utils.translation import ugettext_lazy as _
from django import forms
from programs.models import *
from programs.forms import CustomUserCreationForm, CustomUserChangeForm
import pdb
class ProgramAdmin(admin.ModelAdmin):
list_display = ('description','get_university')
search_fields=('description','department__university__code')
list_filter = ('department__university',)
def get_university(self,obj):
return obj.department.university
def save_model(self,request,obj,form,change):
obj.code = obj.description.replace(' ','_')
obj.save()
get_university.short_description = 'University'
def change_view(self,request,object_id,extra_content=None):
self.exclude = ('',)
return super(ProgramAdmin,self).change_view(request,object_id)
def add_view(self,request,extra_content=None):
self.exclude = ('code',)
return super(ProgramAdmin,self).add_view(request)
class ProgramInline(admin.TabularInline):
model = Program
extra = 0
fields = ('description',)
class DepartmentAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields':['description','university','tenured','nonTenured']}),
]
inlines = [ProgramInline]
search_fields = ('university__description','description')
list_filter = ('description','university')
def save_model(self,request,obj,form,change):
if obj.code == '':
obj.code = obj.name.replace(' ','_')
obj.save()
class DepartmentInline(admin.TabularInline):
model = Department
extra = 0
fields = ('description',)
class UniversityAdmin(admin.ModelAdmin):
inlines = [DepartmentInline]
search_fields = ('description',)
def save_model(self,request,obj,form,change):
obj.code = obj.description.replace(' ','_')
obj.save()
def change_view(self,request,object_id,extra_content=None):
self.exclude = ('',)
return super(UniversityAdmin,self).change_view(request,object_id)
def add_view(self,request,extra_content=None):
self.exclude = ('code',)
return super(UniversityAdmin,self).add_view(request)
class CourseForm(forms.ModelForm):
class Meta:
Model = Course
def __init__(self,*args,**kwargs):
super(CourseForm,self).__init__(*args,**kwargs)
self.fields['prerequisite'].queryset = Course.objects.exclude(id__exact=self.instance.id)
def clean(self):
#Need to handle validation for unique_together
cleaned_data = self.cleaned_data
if self.instance.pk is None:
if Course.objects.filter(code=cleaned_data['code'],university=cleaned_data['university']).exists():
raise forms.ValidationError('The course already exists at this university.')
return cleaned_data
class CourseAdmin(admin.ModelAdmin):
form = CourseForm
list_display = ('code','university',)
list_filter = ('university',)
search_fields = ('code',)
def save_model(self,request,obj,form,change):
if obj.code == '':
obj.code = obj.name.replace(' ','_')
obj.save()
class dbAdmin(UserAdmin):
fieldsets = (
(None, {'fields': ('email', 'password')}),
(_('Personal info'), {'fields': ('first_name', 'last_name')}),
(_('Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',
'groups', 'user_permissions')}),
(_('Important dates'), {'fields': ('last_login', 'date_joined')}),
)
add_fieldsets = (
(None, {
'classes': ('wide',),
'fields': ('email', 'password1', 'password2')}
),
)
form = CustomUserChangeForm
add_form = CustomUserCreationForm
list_display = ('email', 'first_name', 'last_name', 'is_staff')
search_fields = ('email', 'first_name', 'last_name')
ordering = ('email',)
admin.site.register(dbUser, dbAdmin)
admin.site.register(University,UniversityAdmin)
admin.site.register(Program,ProgramAdmin)
admin.site.register(Department,DepartmentAdmin)
admin.site.register(Course,CourseAdmin)
|
[
"from django.contrib import admin\nfrom django.contrib.auth.admin import UserAdmin\nfrom django.utils.translation import ugettext_lazy as _\nfrom django import forms\nfrom programs.models import *\nfrom programs.forms import CustomUserCreationForm, CustomUserChangeForm\nimport pdb\n\nclass ProgramAdmin(admin.ModelAdmin):\n\n\tlist_display = ('description','get_university')\n\tsearch_fields=('description','department__university__code')\n\tlist_filter = ('department__university',)\n\n\tdef get_university(self,obj):\n\t\treturn obj.department.university\n\n\tdef save_model(self,request,obj,form,change):\n\t\tobj.code = obj.description.replace(' ','_')\n\t\tobj.save()\n\n\tget_university.short_description = 'University'\n\n\tdef change_view(self,request,object_id,extra_content=None):\n\t\tself.exclude = ('',)\n\t\treturn super(ProgramAdmin,self).change_view(request,object_id)\n\n\tdef add_view(self,request,extra_content=None):\n\n\t\tself.exclude = ('code',)\n\t\treturn super(ProgramAdmin,self).add_view(request)\n\nclass ProgramInline(admin.TabularInline):\n\tmodel = Program\n\textra = 0\n\tfields = ('description',)\n\nclass DepartmentAdmin(admin.ModelAdmin):\n\n\tfieldsets = [\n\t(None, {'fields':['description','university','tenured','nonTenured']}),\n\t]\n\tinlines = [ProgramInline]\n\n\tsearch_fields = ('university__description','description')\n\tlist_filter = ('description','university')\n\n\tdef save_model(self,request,obj,form,change):\n\t\tif obj.code == '':\n\t\t\tobj.code = obj.name.replace(' ','_')\n\t\tobj.save()\n\n\nclass DepartmentInline(admin.TabularInline):\n\tmodel = Department\n\textra = 0\n\tfields = ('description',)\n\nclass UniversityAdmin(admin.ModelAdmin):\n\n\tinlines = [DepartmentInline]\n\n\tsearch_fields = ('description',)\n\n\tdef save_model(self,request,obj,form,change):\n\t\tobj.code = obj.description.replace(' ','_')\n\t\tobj.save()\n\n\tdef change_view(self,request,object_id,extra_content=None):\n\t\tself.exclude = ('',)\n\t\treturn super(UniversityAdmin,self).change_view(request,object_id)\n\n\tdef add_view(self,request,extra_content=None):\n\n\t\tself.exclude = ('code',)\n\t\treturn super(UniversityAdmin,self).add_view(request)\n\n\nclass CourseForm(forms.ModelForm):\n\n\tclass Meta:\n\t\tModel = Course\n\n\tdef __init__(self,*args,**kwargs):\n\t\tsuper(CourseForm,self).__init__(*args,**kwargs)\n\t\tself.fields['prerequisite'].queryset = Course.objects.exclude(id__exact=self.instance.id)\n\n\tdef clean(self):\n\t\t#Need to handle validation for unique_together\n\n\t\tcleaned_data = self.cleaned_data\n\t\tif self.instance.pk is None:\n\t\t\tif Course.objects.filter(code=cleaned_data['code'],university=cleaned_data['university']).exists():\n\t\t\t\traise forms.ValidationError('The course already exists at this university.')\n\n\t\treturn cleaned_data\n\nclass CourseAdmin(admin.ModelAdmin):\n\tform = CourseForm\n\n\tlist_display = ('code','university',)\n\tlist_filter = ('university',)\n\tsearch_fields = ('code',)\n\n\tdef save_model(self,request,obj,form,change):\n\t\tif obj.code == '':\n\t\t\tobj.code = obj.name.replace(' ','_')\n\n\t\tobj.save()\n\n\nclass dbAdmin(UserAdmin):\n\tfieldsets = (\n\t\t(None, {'fields': ('email', 'password')}),\n\t\t(_('Personal info'), {'fields': ('first_name', 'last_name')}),\n\t\t(_('Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',\n\t\t\t'groups', 'user_permissions')}),\n\t\t(_('Important dates'), {'fields': ('last_login', 'date_joined')}),\n\t\t)\n\n\tadd_fieldsets = (\n\t\t(None, {\n\t\t\t'classes': ('wide',),\n\t\t\t'fields': ('email', 'password1', 'password2')}\n\t\t\t),\n\t\t)\n\tform = CustomUserChangeForm\n\tadd_form = CustomUserCreationForm\n\tlist_display = ('email', 'first_name', 'last_name', 'is_staff')\n\tsearch_fields = ('email', 'first_name', 'last_name')\n\tordering = ('email',)\n\nadmin.site.register(dbUser, dbAdmin)\nadmin.site.register(University,UniversityAdmin)\nadmin.site.register(Program,ProgramAdmin)\nadmin.site.register(Department,DepartmentAdmin)\nadmin.site.register(Course,CourseAdmin)\n\n",
"from django.contrib import admin\nfrom django.contrib.auth.admin import UserAdmin\nfrom django.utils.translation import ugettext_lazy as _\nfrom django import forms\nfrom programs.models import *\nfrom programs.forms import CustomUserCreationForm, CustomUserChangeForm\nimport pdb\n\n\nclass ProgramAdmin(admin.ModelAdmin):\n list_display = 'description', 'get_university'\n search_fields = 'description', 'department__university__code'\n list_filter = 'department__university',\n\n def get_university(self, obj):\n return obj.department.university\n\n def save_model(self, request, obj, form, change):\n obj.code = obj.description.replace(' ', '_')\n obj.save()\n get_university.short_description = 'University'\n\n def change_view(self, request, object_id, extra_content=None):\n self.exclude = '',\n return super(ProgramAdmin, self).change_view(request, object_id)\n\n def add_view(self, request, extra_content=None):\n self.exclude = 'code',\n return super(ProgramAdmin, self).add_view(request)\n\n\nclass ProgramInline(admin.TabularInline):\n model = Program\n extra = 0\n fields = 'description',\n\n\nclass DepartmentAdmin(admin.ModelAdmin):\n fieldsets = [(None, {'fields': ['description', 'university', 'tenured',\n 'nonTenured']})]\n inlines = [ProgramInline]\n search_fields = 'university__description', 'description'\n list_filter = 'description', 'university'\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass DepartmentInline(admin.TabularInline):\n model = Department\n extra = 0\n fields = 'description',\n\n\nclass UniversityAdmin(admin.ModelAdmin):\n inlines = [DepartmentInline]\n search_fields = 'description',\n\n def save_model(self, request, obj, form, change):\n obj.code = obj.description.replace(' ', '_')\n obj.save()\n\n def change_view(self, request, object_id, extra_content=None):\n self.exclude = '',\n return super(UniversityAdmin, self).change_view(request, object_id)\n\n def add_view(self, request, extra_content=None):\n self.exclude = 'code',\n return super(UniversityAdmin, self).add_view(request)\n\n\nclass CourseForm(forms.ModelForm):\n\n\n class Meta:\n Model = Course\n\n def __init__(self, *args, **kwargs):\n super(CourseForm, self).__init__(*args, **kwargs)\n self.fields['prerequisite'].queryset = Course.objects.exclude(id__exact\n =self.instance.id)\n\n def clean(self):\n cleaned_data = self.cleaned_data\n if self.instance.pk is None:\n if Course.objects.filter(code=cleaned_data['code'], university=\n cleaned_data['university']).exists():\n raise forms.ValidationError(\n 'The course already exists at this university.')\n return cleaned_data\n\n\nclass CourseAdmin(admin.ModelAdmin):\n form = CourseForm\n list_display = 'code', 'university'\n list_filter = 'university',\n search_fields = 'code',\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass dbAdmin(UserAdmin):\n fieldsets = (None, {'fields': ('email', 'password')}), (_(\n 'Personal info'), {'fields': ('first_name', 'last_name')}), (_(\n 'Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',\n 'groups', 'user_permissions')}), (_('Important dates'), {'fields':\n ('last_login', 'date_joined')})\n add_fieldsets = (None, {'classes': ('wide',), 'fields': ('email',\n 'password1', 'password2')}),\n form = CustomUserChangeForm\n add_form = CustomUserCreationForm\n list_display = 'email', 'first_name', 'last_name', 'is_staff'\n search_fields = 'email', 'first_name', 'last_name'\n ordering = 'email',\n\n\nadmin.site.register(dbUser, dbAdmin)\nadmin.site.register(University, UniversityAdmin)\nadmin.site.register(Program, ProgramAdmin)\nadmin.site.register(Department, DepartmentAdmin)\nadmin.site.register(Course, CourseAdmin)\n",
"<import token>\n\n\nclass ProgramAdmin(admin.ModelAdmin):\n list_display = 'description', 'get_university'\n search_fields = 'description', 'department__university__code'\n list_filter = 'department__university',\n\n def get_university(self, obj):\n return obj.department.university\n\n def save_model(self, request, obj, form, change):\n obj.code = obj.description.replace(' ', '_')\n obj.save()\n get_university.short_description = 'University'\n\n def change_view(self, request, object_id, extra_content=None):\n self.exclude = '',\n return super(ProgramAdmin, self).change_view(request, object_id)\n\n def add_view(self, request, extra_content=None):\n self.exclude = 'code',\n return super(ProgramAdmin, self).add_view(request)\n\n\nclass ProgramInline(admin.TabularInline):\n model = Program\n extra = 0\n fields = 'description',\n\n\nclass DepartmentAdmin(admin.ModelAdmin):\n fieldsets = [(None, {'fields': ['description', 'university', 'tenured',\n 'nonTenured']})]\n inlines = [ProgramInline]\n search_fields = 'university__description', 'description'\n list_filter = 'description', 'university'\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass DepartmentInline(admin.TabularInline):\n model = Department\n extra = 0\n fields = 'description',\n\n\nclass UniversityAdmin(admin.ModelAdmin):\n inlines = [DepartmentInline]\n search_fields = 'description',\n\n def save_model(self, request, obj, form, change):\n obj.code = obj.description.replace(' ', '_')\n obj.save()\n\n def change_view(self, request, object_id, extra_content=None):\n self.exclude = '',\n return super(UniversityAdmin, self).change_view(request, object_id)\n\n def add_view(self, request, extra_content=None):\n self.exclude = 'code',\n return super(UniversityAdmin, self).add_view(request)\n\n\nclass CourseForm(forms.ModelForm):\n\n\n class Meta:\n Model = Course\n\n def __init__(self, *args, **kwargs):\n super(CourseForm, self).__init__(*args, **kwargs)\n self.fields['prerequisite'].queryset = Course.objects.exclude(id__exact\n =self.instance.id)\n\n def clean(self):\n cleaned_data = self.cleaned_data\n if self.instance.pk is None:\n if Course.objects.filter(code=cleaned_data['code'], university=\n cleaned_data['university']).exists():\n raise forms.ValidationError(\n 'The course already exists at this university.')\n return cleaned_data\n\n\nclass CourseAdmin(admin.ModelAdmin):\n form = CourseForm\n list_display = 'code', 'university'\n list_filter = 'university',\n search_fields = 'code',\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass dbAdmin(UserAdmin):\n fieldsets = (None, {'fields': ('email', 'password')}), (_(\n 'Personal info'), {'fields': ('first_name', 'last_name')}), (_(\n 'Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',\n 'groups', 'user_permissions')}), (_('Important dates'), {'fields':\n ('last_login', 'date_joined')})\n add_fieldsets = (None, {'classes': ('wide',), 'fields': ('email',\n 'password1', 'password2')}),\n form = CustomUserChangeForm\n add_form = CustomUserCreationForm\n list_display = 'email', 'first_name', 'last_name', 'is_staff'\n search_fields = 'email', 'first_name', 'last_name'\n ordering = 'email',\n\n\nadmin.site.register(dbUser, dbAdmin)\nadmin.site.register(University, UniversityAdmin)\nadmin.site.register(Program, ProgramAdmin)\nadmin.site.register(Department, DepartmentAdmin)\nadmin.site.register(Course, CourseAdmin)\n",
"<import token>\n\n\nclass ProgramAdmin(admin.ModelAdmin):\n list_display = 'description', 'get_university'\n search_fields = 'description', 'department__university__code'\n list_filter = 'department__university',\n\n def get_university(self, obj):\n return obj.department.university\n\n def save_model(self, request, obj, form, change):\n obj.code = obj.description.replace(' ', '_')\n obj.save()\n get_university.short_description = 'University'\n\n def change_view(self, request, object_id, extra_content=None):\n self.exclude = '',\n return super(ProgramAdmin, self).change_view(request, object_id)\n\n def add_view(self, request, extra_content=None):\n self.exclude = 'code',\n return super(ProgramAdmin, self).add_view(request)\n\n\nclass ProgramInline(admin.TabularInline):\n model = Program\n extra = 0\n fields = 'description',\n\n\nclass DepartmentAdmin(admin.ModelAdmin):\n fieldsets = [(None, {'fields': ['description', 'university', 'tenured',\n 'nonTenured']})]\n inlines = [ProgramInline]\n search_fields = 'university__description', 'description'\n list_filter = 'description', 'university'\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass DepartmentInline(admin.TabularInline):\n model = Department\n extra = 0\n fields = 'description',\n\n\nclass UniversityAdmin(admin.ModelAdmin):\n inlines = [DepartmentInline]\n search_fields = 'description',\n\n def save_model(self, request, obj, form, change):\n obj.code = obj.description.replace(' ', '_')\n obj.save()\n\n def change_view(self, request, object_id, extra_content=None):\n self.exclude = '',\n return super(UniversityAdmin, self).change_view(request, object_id)\n\n def add_view(self, request, extra_content=None):\n self.exclude = 'code',\n return super(UniversityAdmin, self).add_view(request)\n\n\nclass CourseForm(forms.ModelForm):\n\n\n class Meta:\n Model = Course\n\n def __init__(self, *args, **kwargs):\n super(CourseForm, self).__init__(*args, **kwargs)\n self.fields['prerequisite'].queryset = Course.objects.exclude(id__exact\n =self.instance.id)\n\n def clean(self):\n cleaned_data = self.cleaned_data\n if self.instance.pk is None:\n if Course.objects.filter(code=cleaned_data['code'], university=\n cleaned_data['university']).exists():\n raise forms.ValidationError(\n 'The course already exists at this university.')\n return cleaned_data\n\n\nclass CourseAdmin(admin.ModelAdmin):\n form = CourseForm\n list_display = 'code', 'university'\n list_filter = 'university',\n search_fields = 'code',\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass dbAdmin(UserAdmin):\n fieldsets = (None, {'fields': ('email', 'password')}), (_(\n 'Personal info'), {'fields': ('first_name', 'last_name')}), (_(\n 'Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',\n 'groups', 'user_permissions')}), (_('Important dates'), {'fields':\n ('last_login', 'date_joined')})\n add_fieldsets = (None, {'classes': ('wide',), 'fields': ('email',\n 'password1', 'password2')}),\n form = CustomUserChangeForm\n add_form = CustomUserCreationForm\n list_display = 'email', 'first_name', 'last_name', 'is_staff'\n search_fields = 'email', 'first_name', 'last_name'\n ordering = 'email',\n\n\n<code token>\n",
"<import token>\n\n\nclass ProgramAdmin(admin.ModelAdmin):\n <assignment token>\n <assignment token>\n <assignment token>\n\n def get_university(self, obj):\n return obj.department.university\n\n def save_model(self, request, obj, form, change):\n obj.code = obj.description.replace(' ', '_')\n obj.save()\n <assignment token>\n\n def change_view(self, request, object_id, extra_content=None):\n self.exclude = '',\n return super(ProgramAdmin, self).change_view(request, object_id)\n\n def add_view(self, request, extra_content=None):\n self.exclude = 'code',\n return super(ProgramAdmin, self).add_view(request)\n\n\nclass ProgramInline(admin.TabularInline):\n model = Program\n extra = 0\n fields = 'description',\n\n\nclass DepartmentAdmin(admin.ModelAdmin):\n fieldsets = [(None, {'fields': ['description', 'university', 'tenured',\n 'nonTenured']})]\n inlines = [ProgramInline]\n search_fields = 'university__description', 'description'\n list_filter = 'description', 'university'\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass DepartmentInline(admin.TabularInline):\n model = Department\n extra = 0\n fields = 'description',\n\n\nclass UniversityAdmin(admin.ModelAdmin):\n inlines = [DepartmentInline]\n search_fields = 'description',\n\n def save_model(self, request, obj, form, change):\n obj.code = obj.description.replace(' ', '_')\n obj.save()\n\n def change_view(self, request, object_id, extra_content=None):\n self.exclude = '',\n return super(UniversityAdmin, self).change_view(request, object_id)\n\n def add_view(self, request, extra_content=None):\n self.exclude = 'code',\n return super(UniversityAdmin, self).add_view(request)\n\n\nclass CourseForm(forms.ModelForm):\n\n\n class Meta:\n Model = Course\n\n def __init__(self, *args, **kwargs):\n super(CourseForm, self).__init__(*args, **kwargs)\n self.fields['prerequisite'].queryset = Course.objects.exclude(id__exact\n =self.instance.id)\n\n def clean(self):\n cleaned_data = self.cleaned_data\n if self.instance.pk is None:\n if Course.objects.filter(code=cleaned_data['code'], university=\n cleaned_data['university']).exists():\n raise forms.ValidationError(\n 'The course already exists at this university.')\n return cleaned_data\n\n\nclass CourseAdmin(admin.ModelAdmin):\n form = CourseForm\n list_display = 'code', 'university'\n list_filter = 'university',\n search_fields = 'code',\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass dbAdmin(UserAdmin):\n fieldsets = (None, {'fields': ('email', 'password')}), (_(\n 'Personal info'), {'fields': ('first_name', 'last_name')}), (_(\n 'Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',\n 'groups', 'user_permissions')}), (_('Important dates'), {'fields':\n ('last_login', 'date_joined')})\n add_fieldsets = (None, {'classes': ('wide',), 'fields': ('email',\n 'password1', 'password2')}),\n form = CustomUserChangeForm\n add_form = CustomUserCreationForm\n list_display = 'email', 'first_name', 'last_name', 'is_staff'\n search_fields = 'email', 'first_name', 'last_name'\n ordering = 'email',\n\n\n<code token>\n",
"<import token>\n\n\nclass ProgramAdmin(admin.ModelAdmin):\n <assignment token>\n <assignment token>\n <assignment token>\n\n def get_university(self, obj):\n return obj.department.university\n\n def save_model(self, request, obj, form, change):\n obj.code = obj.description.replace(' ', '_')\n obj.save()\n <assignment token>\n <function token>\n\n def add_view(self, request, extra_content=None):\n self.exclude = 'code',\n return super(ProgramAdmin, self).add_view(request)\n\n\nclass ProgramInline(admin.TabularInline):\n model = Program\n extra = 0\n fields = 'description',\n\n\nclass DepartmentAdmin(admin.ModelAdmin):\n fieldsets = [(None, {'fields': ['description', 'university', 'tenured',\n 'nonTenured']})]\n inlines = [ProgramInline]\n search_fields = 'university__description', 'description'\n list_filter = 'description', 'university'\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass DepartmentInline(admin.TabularInline):\n model = Department\n extra = 0\n fields = 'description',\n\n\nclass UniversityAdmin(admin.ModelAdmin):\n inlines = [DepartmentInline]\n search_fields = 'description',\n\n def save_model(self, request, obj, form, change):\n obj.code = obj.description.replace(' ', '_')\n obj.save()\n\n def change_view(self, request, object_id, extra_content=None):\n self.exclude = '',\n return super(UniversityAdmin, self).change_view(request, object_id)\n\n def add_view(self, request, extra_content=None):\n self.exclude = 'code',\n return super(UniversityAdmin, self).add_view(request)\n\n\nclass CourseForm(forms.ModelForm):\n\n\n class Meta:\n Model = Course\n\n def __init__(self, *args, **kwargs):\n super(CourseForm, self).__init__(*args, **kwargs)\n self.fields['prerequisite'].queryset = Course.objects.exclude(id__exact\n =self.instance.id)\n\n def clean(self):\n cleaned_data = self.cleaned_data\n if self.instance.pk is None:\n if Course.objects.filter(code=cleaned_data['code'], university=\n cleaned_data['university']).exists():\n raise forms.ValidationError(\n 'The course already exists at this university.')\n return cleaned_data\n\n\nclass CourseAdmin(admin.ModelAdmin):\n form = CourseForm\n list_display = 'code', 'university'\n list_filter = 'university',\n search_fields = 'code',\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass dbAdmin(UserAdmin):\n fieldsets = (None, {'fields': ('email', 'password')}), (_(\n 'Personal info'), {'fields': ('first_name', 'last_name')}), (_(\n 'Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',\n 'groups', 'user_permissions')}), (_('Important dates'), {'fields':\n ('last_login', 'date_joined')})\n add_fieldsets = (None, {'classes': ('wide',), 'fields': ('email',\n 'password1', 'password2')}),\n form = CustomUserChangeForm\n add_form = CustomUserCreationForm\n list_display = 'email', 'first_name', 'last_name', 'is_staff'\n search_fields = 'email', 'first_name', 'last_name'\n ordering = 'email',\n\n\n<code token>\n",
"<import token>\n\n\nclass ProgramAdmin(admin.ModelAdmin):\n <assignment token>\n <assignment token>\n <assignment token>\n\n def get_university(self, obj):\n return obj.department.university\n\n def save_model(self, request, obj, form, change):\n obj.code = obj.description.replace(' ', '_')\n obj.save()\n <assignment token>\n <function token>\n <function token>\n\n\nclass ProgramInline(admin.TabularInline):\n model = Program\n extra = 0\n fields = 'description',\n\n\nclass DepartmentAdmin(admin.ModelAdmin):\n fieldsets = [(None, {'fields': ['description', 'university', 'tenured',\n 'nonTenured']})]\n inlines = [ProgramInline]\n search_fields = 'university__description', 'description'\n list_filter = 'description', 'university'\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass DepartmentInline(admin.TabularInline):\n model = Department\n extra = 0\n fields = 'description',\n\n\nclass UniversityAdmin(admin.ModelAdmin):\n inlines = [DepartmentInline]\n search_fields = 'description',\n\n def save_model(self, request, obj, form, change):\n obj.code = obj.description.replace(' ', '_')\n obj.save()\n\n def change_view(self, request, object_id, extra_content=None):\n self.exclude = '',\n return super(UniversityAdmin, self).change_view(request, object_id)\n\n def add_view(self, request, extra_content=None):\n self.exclude = 'code',\n return super(UniversityAdmin, self).add_view(request)\n\n\nclass CourseForm(forms.ModelForm):\n\n\n class Meta:\n Model = Course\n\n def __init__(self, *args, **kwargs):\n super(CourseForm, self).__init__(*args, **kwargs)\n self.fields['prerequisite'].queryset = Course.objects.exclude(id__exact\n =self.instance.id)\n\n def clean(self):\n cleaned_data = self.cleaned_data\n if self.instance.pk is None:\n if Course.objects.filter(code=cleaned_data['code'], university=\n cleaned_data['university']).exists():\n raise forms.ValidationError(\n 'The course already exists at this university.')\n return cleaned_data\n\n\nclass CourseAdmin(admin.ModelAdmin):\n form = CourseForm\n list_display = 'code', 'university'\n list_filter = 'university',\n search_fields = 'code',\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass dbAdmin(UserAdmin):\n fieldsets = (None, {'fields': ('email', 'password')}), (_(\n 'Personal info'), {'fields': ('first_name', 'last_name')}), (_(\n 'Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',\n 'groups', 'user_permissions')}), (_('Important dates'), {'fields':\n ('last_login', 'date_joined')})\n add_fieldsets = (None, {'classes': ('wide',), 'fields': ('email',\n 'password1', 'password2')}),\n form = CustomUserChangeForm\n add_form = CustomUserCreationForm\n list_display = 'email', 'first_name', 'last_name', 'is_staff'\n search_fields = 'email', 'first_name', 'last_name'\n ordering = 'email',\n\n\n<code token>\n",
"<import token>\n\n\nclass ProgramAdmin(admin.ModelAdmin):\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n def save_model(self, request, obj, form, change):\n obj.code = obj.description.replace(' ', '_')\n obj.save()\n <assignment token>\n <function token>\n <function token>\n\n\nclass ProgramInline(admin.TabularInline):\n model = Program\n extra = 0\n fields = 'description',\n\n\nclass DepartmentAdmin(admin.ModelAdmin):\n fieldsets = [(None, {'fields': ['description', 'university', 'tenured',\n 'nonTenured']})]\n inlines = [ProgramInline]\n search_fields = 'university__description', 'description'\n list_filter = 'description', 'university'\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass DepartmentInline(admin.TabularInline):\n model = Department\n extra = 0\n fields = 'description',\n\n\nclass UniversityAdmin(admin.ModelAdmin):\n inlines = [DepartmentInline]\n search_fields = 'description',\n\n def save_model(self, request, obj, form, change):\n obj.code = obj.description.replace(' ', '_')\n obj.save()\n\n def change_view(self, request, object_id, extra_content=None):\n self.exclude = '',\n return super(UniversityAdmin, self).change_view(request, object_id)\n\n def add_view(self, request, extra_content=None):\n self.exclude = 'code',\n return super(UniversityAdmin, self).add_view(request)\n\n\nclass CourseForm(forms.ModelForm):\n\n\n class Meta:\n Model = Course\n\n def __init__(self, *args, **kwargs):\n super(CourseForm, self).__init__(*args, **kwargs)\n self.fields['prerequisite'].queryset = Course.objects.exclude(id__exact\n =self.instance.id)\n\n def clean(self):\n cleaned_data = self.cleaned_data\n if self.instance.pk is None:\n if Course.objects.filter(code=cleaned_data['code'], university=\n cleaned_data['university']).exists():\n raise forms.ValidationError(\n 'The course already exists at this university.')\n return cleaned_data\n\n\nclass CourseAdmin(admin.ModelAdmin):\n form = CourseForm\n list_display = 'code', 'university'\n list_filter = 'university',\n search_fields = 'code',\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass dbAdmin(UserAdmin):\n fieldsets = (None, {'fields': ('email', 'password')}), (_(\n 'Personal info'), {'fields': ('first_name', 'last_name')}), (_(\n 'Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',\n 'groups', 'user_permissions')}), (_('Important dates'), {'fields':\n ('last_login', 'date_joined')})\n add_fieldsets = (None, {'classes': ('wide',), 'fields': ('email',\n 'password1', 'password2')}),\n form = CustomUserChangeForm\n add_form = CustomUserCreationForm\n list_display = 'email', 'first_name', 'last_name', 'is_staff'\n search_fields = 'email', 'first_name', 'last_name'\n ordering = 'email',\n\n\n<code token>\n",
"<import token>\n\n\nclass ProgramAdmin(admin.ModelAdmin):\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n <function token>\n <assignment token>\n <function token>\n <function token>\n\n\nclass ProgramInline(admin.TabularInline):\n model = Program\n extra = 0\n fields = 'description',\n\n\nclass DepartmentAdmin(admin.ModelAdmin):\n fieldsets = [(None, {'fields': ['description', 'university', 'tenured',\n 'nonTenured']})]\n inlines = [ProgramInline]\n search_fields = 'university__description', 'description'\n list_filter = 'description', 'university'\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass DepartmentInline(admin.TabularInline):\n model = Department\n extra = 0\n fields = 'description',\n\n\nclass UniversityAdmin(admin.ModelAdmin):\n inlines = [DepartmentInline]\n search_fields = 'description',\n\n def save_model(self, request, obj, form, change):\n obj.code = obj.description.replace(' ', '_')\n obj.save()\n\n def change_view(self, request, object_id, extra_content=None):\n self.exclude = '',\n return super(UniversityAdmin, self).change_view(request, object_id)\n\n def add_view(self, request, extra_content=None):\n self.exclude = 'code',\n return super(UniversityAdmin, self).add_view(request)\n\n\nclass CourseForm(forms.ModelForm):\n\n\n class Meta:\n Model = Course\n\n def __init__(self, *args, **kwargs):\n super(CourseForm, self).__init__(*args, **kwargs)\n self.fields['prerequisite'].queryset = Course.objects.exclude(id__exact\n =self.instance.id)\n\n def clean(self):\n cleaned_data = self.cleaned_data\n if self.instance.pk is None:\n if Course.objects.filter(code=cleaned_data['code'], university=\n cleaned_data['university']).exists():\n raise forms.ValidationError(\n 'The course already exists at this university.')\n return cleaned_data\n\n\nclass CourseAdmin(admin.ModelAdmin):\n form = CourseForm\n list_display = 'code', 'university'\n list_filter = 'university',\n search_fields = 'code',\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass dbAdmin(UserAdmin):\n fieldsets = (None, {'fields': ('email', 'password')}), (_(\n 'Personal info'), {'fields': ('first_name', 'last_name')}), (_(\n 'Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',\n 'groups', 'user_permissions')}), (_('Important dates'), {'fields':\n ('last_login', 'date_joined')})\n add_fieldsets = (None, {'classes': ('wide',), 'fields': ('email',\n 'password1', 'password2')}),\n form = CustomUserChangeForm\n add_form = CustomUserCreationForm\n list_display = 'email', 'first_name', 'last_name', 'is_staff'\n search_fields = 'email', 'first_name', 'last_name'\n ordering = 'email',\n\n\n<code token>\n",
"<import token>\n<class token>\n\n\nclass ProgramInline(admin.TabularInline):\n model = Program\n extra = 0\n fields = 'description',\n\n\nclass DepartmentAdmin(admin.ModelAdmin):\n fieldsets = [(None, {'fields': ['description', 'university', 'tenured',\n 'nonTenured']})]\n inlines = [ProgramInline]\n search_fields = 'university__description', 'description'\n list_filter = 'description', 'university'\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass DepartmentInline(admin.TabularInline):\n model = Department\n extra = 0\n fields = 'description',\n\n\nclass UniversityAdmin(admin.ModelAdmin):\n inlines = [DepartmentInline]\n search_fields = 'description',\n\n def save_model(self, request, obj, form, change):\n obj.code = obj.description.replace(' ', '_')\n obj.save()\n\n def change_view(self, request, object_id, extra_content=None):\n self.exclude = '',\n return super(UniversityAdmin, self).change_view(request, object_id)\n\n def add_view(self, request, extra_content=None):\n self.exclude = 'code',\n return super(UniversityAdmin, self).add_view(request)\n\n\nclass CourseForm(forms.ModelForm):\n\n\n class Meta:\n Model = Course\n\n def __init__(self, *args, **kwargs):\n super(CourseForm, self).__init__(*args, **kwargs)\n self.fields['prerequisite'].queryset = Course.objects.exclude(id__exact\n =self.instance.id)\n\n def clean(self):\n cleaned_data = self.cleaned_data\n if self.instance.pk is None:\n if Course.objects.filter(code=cleaned_data['code'], university=\n cleaned_data['university']).exists():\n raise forms.ValidationError(\n 'The course already exists at this university.')\n return cleaned_data\n\n\nclass CourseAdmin(admin.ModelAdmin):\n form = CourseForm\n list_display = 'code', 'university'\n list_filter = 'university',\n search_fields = 'code',\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass dbAdmin(UserAdmin):\n fieldsets = (None, {'fields': ('email', 'password')}), (_(\n 'Personal info'), {'fields': ('first_name', 'last_name')}), (_(\n 'Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',\n 'groups', 'user_permissions')}), (_('Important dates'), {'fields':\n ('last_login', 'date_joined')})\n add_fieldsets = (None, {'classes': ('wide',), 'fields': ('email',\n 'password1', 'password2')}),\n form = CustomUserChangeForm\n add_form = CustomUserCreationForm\n list_display = 'email', 'first_name', 'last_name', 'is_staff'\n search_fields = 'email', 'first_name', 'last_name'\n ordering = 'email',\n\n\n<code token>\n",
"<import token>\n<class token>\n\n\nclass ProgramInline(admin.TabularInline):\n <assignment token>\n <assignment token>\n <assignment token>\n\n\nclass DepartmentAdmin(admin.ModelAdmin):\n fieldsets = [(None, {'fields': ['description', 'university', 'tenured',\n 'nonTenured']})]\n inlines = [ProgramInline]\n search_fields = 'university__description', 'description'\n list_filter = 'description', 'university'\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass DepartmentInline(admin.TabularInline):\n model = Department\n extra = 0\n fields = 'description',\n\n\nclass UniversityAdmin(admin.ModelAdmin):\n inlines = [DepartmentInline]\n search_fields = 'description',\n\n def save_model(self, request, obj, form, change):\n obj.code = obj.description.replace(' ', '_')\n obj.save()\n\n def change_view(self, request, object_id, extra_content=None):\n self.exclude = '',\n return super(UniversityAdmin, self).change_view(request, object_id)\n\n def add_view(self, request, extra_content=None):\n self.exclude = 'code',\n return super(UniversityAdmin, self).add_view(request)\n\n\nclass CourseForm(forms.ModelForm):\n\n\n class Meta:\n Model = Course\n\n def __init__(self, *args, **kwargs):\n super(CourseForm, self).__init__(*args, **kwargs)\n self.fields['prerequisite'].queryset = Course.objects.exclude(id__exact\n =self.instance.id)\n\n def clean(self):\n cleaned_data = self.cleaned_data\n if self.instance.pk is None:\n if Course.objects.filter(code=cleaned_data['code'], university=\n cleaned_data['university']).exists():\n raise forms.ValidationError(\n 'The course already exists at this university.')\n return cleaned_data\n\n\nclass CourseAdmin(admin.ModelAdmin):\n form = CourseForm\n list_display = 'code', 'university'\n list_filter = 'university',\n search_fields = 'code',\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass dbAdmin(UserAdmin):\n fieldsets = (None, {'fields': ('email', 'password')}), (_(\n 'Personal info'), {'fields': ('first_name', 'last_name')}), (_(\n 'Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',\n 'groups', 'user_permissions')}), (_('Important dates'), {'fields':\n ('last_login', 'date_joined')})\n add_fieldsets = (None, {'classes': ('wide',), 'fields': ('email',\n 'password1', 'password2')}),\n form = CustomUserChangeForm\n add_form = CustomUserCreationForm\n list_display = 'email', 'first_name', 'last_name', 'is_staff'\n search_fields = 'email', 'first_name', 'last_name'\n ordering = 'email',\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n\n\nclass DepartmentAdmin(admin.ModelAdmin):\n fieldsets = [(None, {'fields': ['description', 'university', 'tenured',\n 'nonTenured']})]\n inlines = [ProgramInline]\n search_fields = 'university__description', 'description'\n list_filter = 'description', 'university'\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass DepartmentInline(admin.TabularInline):\n model = Department\n extra = 0\n fields = 'description',\n\n\nclass UniversityAdmin(admin.ModelAdmin):\n inlines = [DepartmentInline]\n search_fields = 'description',\n\n def save_model(self, request, obj, form, change):\n obj.code = obj.description.replace(' ', '_')\n obj.save()\n\n def change_view(self, request, object_id, extra_content=None):\n self.exclude = '',\n return super(UniversityAdmin, self).change_view(request, object_id)\n\n def add_view(self, request, extra_content=None):\n self.exclude = 'code',\n return super(UniversityAdmin, self).add_view(request)\n\n\nclass CourseForm(forms.ModelForm):\n\n\n class Meta:\n Model = Course\n\n def __init__(self, *args, **kwargs):\n super(CourseForm, self).__init__(*args, **kwargs)\n self.fields['prerequisite'].queryset = Course.objects.exclude(id__exact\n =self.instance.id)\n\n def clean(self):\n cleaned_data = self.cleaned_data\n if self.instance.pk is None:\n if Course.objects.filter(code=cleaned_data['code'], university=\n cleaned_data['university']).exists():\n raise forms.ValidationError(\n 'The course already exists at this university.')\n return cleaned_data\n\n\nclass CourseAdmin(admin.ModelAdmin):\n form = CourseForm\n list_display = 'code', 'university'\n list_filter = 'university',\n search_fields = 'code',\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass dbAdmin(UserAdmin):\n fieldsets = (None, {'fields': ('email', 'password')}), (_(\n 'Personal info'), {'fields': ('first_name', 'last_name')}), (_(\n 'Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',\n 'groups', 'user_permissions')}), (_('Important dates'), {'fields':\n ('last_login', 'date_joined')})\n add_fieldsets = (None, {'classes': ('wide',), 'fields': ('email',\n 'password1', 'password2')}),\n form = CustomUserChangeForm\n add_form = CustomUserCreationForm\n list_display = 'email', 'first_name', 'last_name', 'is_staff'\n search_fields = 'email', 'first_name', 'last_name'\n ordering = 'email',\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n\n\nclass DepartmentAdmin(admin.ModelAdmin):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass DepartmentInline(admin.TabularInline):\n model = Department\n extra = 0\n fields = 'description',\n\n\nclass UniversityAdmin(admin.ModelAdmin):\n inlines = [DepartmentInline]\n search_fields = 'description',\n\n def save_model(self, request, obj, form, change):\n obj.code = obj.description.replace(' ', '_')\n obj.save()\n\n def change_view(self, request, object_id, extra_content=None):\n self.exclude = '',\n return super(UniversityAdmin, self).change_view(request, object_id)\n\n def add_view(self, request, extra_content=None):\n self.exclude = 'code',\n return super(UniversityAdmin, self).add_view(request)\n\n\nclass CourseForm(forms.ModelForm):\n\n\n class Meta:\n Model = Course\n\n def __init__(self, *args, **kwargs):\n super(CourseForm, self).__init__(*args, **kwargs)\n self.fields['prerequisite'].queryset = Course.objects.exclude(id__exact\n =self.instance.id)\n\n def clean(self):\n cleaned_data = self.cleaned_data\n if self.instance.pk is None:\n if Course.objects.filter(code=cleaned_data['code'], university=\n cleaned_data['university']).exists():\n raise forms.ValidationError(\n 'The course already exists at this university.')\n return cleaned_data\n\n\nclass CourseAdmin(admin.ModelAdmin):\n form = CourseForm\n list_display = 'code', 'university'\n list_filter = 'university',\n search_fields = 'code',\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass dbAdmin(UserAdmin):\n fieldsets = (None, {'fields': ('email', 'password')}), (_(\n 'Personal info'), {'fields': ('first_name', 'last_name')}), (_(\n 'Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',\n 'groups', 'user_permissions')}), (_('Important dates'), {'fields':\n ('last_login', 'date_joined')})\n add_fieldsets = (None, {'classes': ('wide',), 'fields': ('email',\n 'password1', 'password2')}),\n form = CustomUserChangeForm\n add_form = CustomUserCreationForm\n list_display = 'email', 'first_name', 'last_name', 'is_staff'\n search_fields = 'email', 'first_name', 'last_name'\n ordering = 'email',\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n\n\nclass DepartmentAdmin(admin.ModelAdmin):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n\nclass DepartmentInline(admin.TabularInline):\n model = Department\n extra = 0\n fields = 'description',\n\n\nclass UniversityAdmin(admin.ModelAdmin):\n inlines = [DepartmentInline]\n search_fields = 'description',\n\n def save_model(self, request, obj, form, change):\n obj.code = obj.description.replace(' ', '_')\n obj.save()\n\n def change_view(self, request, object_id, extra_content=None):\n self.exclude = '',\n return super(UniversityAdmin, self).change_view(request, object_id)\n\n def add_view(self, request, extra_content=None):\n self.exclude = 'code',\n return super(UniversityAdmin, self).add_view(request)\n\n\nclass CourseForm(forms.ModelForm):\n\n\n class Meta:\n Model = Course\n\n def __init__(self, *args, **kwargs):\n super(CourseForm, self).__init__(*args, **kwargs)\n self.fields['prerequisite'].queryset = Course.objects.exclude(id__exact\n =self.instance.id)\n\n def clean(self):\n cleaned_data = self.cleaned_data\n if self.instance.pk is None:\n if Course.objects.filter(code=cleaned_data['code'], university=\n cleaned_data['university']).exists():\n raise forms.ValidationError(\n 'The course already exists at this university.')\n return cleaned_data\n\n\nclass CourseAdmin(admin.ModelAdmin):\n form = CourseForm\n list_display = 'code', 'university'\n list_filter = 'university',\n search_fields = 'code',\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass dbAdmin(UserAdmin):\n fieldsets = (None, {'fields': ('email', 'password')}), (_(\n 'Personal info'), {'fields': ('first_name', 'last_name')}), (_(\n 'Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',\n 'groups', 'user_permissions')}), (_('Important dates'), {'fields':\n ('last_login', 'date_joined')})\n add_fieldsets = (None, {'classes': ('wide',), 'fields': ('email',\n 'password1', 'password2')}),\n form = CustomUserChangeForm\n add_form = CustomUserCreationForm\n list_display = 'email', 'first_name', 'last_name', 'is_staff'\n search_fields = 'email', 'first_name', 'last_name'\n ordering = 'email',\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass DepartmentInline(admin.TabularInline):\n model = Department\n extra = 0\n fields = 'description',\n\n\nclass UniversityAdmin(admin.ModelAdmin):\n inlines = [DepartmentInline]\n search_fields = 'description',\n\n def save_model(self, request, obj, form, change):\n obj.code = obj.description.replace(' ', '_')\n obj.save()\n\n def change_view(self, request, object_id, extra_content=None):\n self.exclude = '',\n return super(UniversityAdmin, self).change_view(request, object_id)\n\n def add_view(self, request, extra_content=None):\n self.exclude = 'code',\n return super(UniversityAdmin, self).add_view(request)\n\n\nclass CourseForm(forms.ModelForm):\n\n\n class Meta:\n Model = Course\n\n def __init__(self, *args, **kwargs):\n super(CourseForm, self).__init__(*args, **kwargs)\n self.fields['prerequisite'].queryset = Course.objects.exclude(id__exact\n =self.instance.id)\n\n def clean(self):\n cleaned_data = self.cleaned_data\n if self.instance.pk is None:\n if Course.objects.filter(code=cleaned_data['code'], university=\n cleaned_data['university']).exists():\n raise forms.ValidationError(\n 'The course already exists at this university.')\n return cleaned_data\n\n\nclass CourseAdmin(admin.ModelAdmin):\n form = CourseForm\n list_display = 'code', 'university'\n list_filter = 'university',\n search_fields = 'code',\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass dbAdmin(UserAdmin):\n fieldsets = (None, {'fields': ('email', 'password')}), (_(\n 'Personal info'), {'fields': ('first_name', 'last_name')}), (_(\n 'Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',\n 'groups', 'user_permissions')}), (_('Important dates'), {'fields':\n ('last_login', 'date_joined')})\n add_fieldsets = (None, {'classes': ('wide',), 'fields': ('email',\n 'password1', 'password2')}),\n form = CustomUserChangeForm\n add_form = CustomUserCreationForm\n list_display = 'email', 'first_name', 'last_name', 'is_staff'\n search_fields = 'email', 'first_name', 'last_name'\n ordering = 'email',\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass DepartmentInline(admin.TabularInline):\n <assignment token>\n <assignment token>\n <assignment token>\n\n\nclass UniversityAdmin(admin.ModelAdmin):\n inlines = [DepartmentInline]\n search_fields = 'description',\n\n def save_model(self, request, obj, form, change):\n obj.code = obj.description.replace(' ', '_')\n obj.save()\n\n def change_view(self, request, object_id, extra_content=None):\n self.exclude = '',\n return super(UniversityAdmin, self).change_view(request, object_id)\n\n def add_view(self, request, extra_content=None):\n self.exclude = 'code',\n return super(UniversityAdmin, self).add_view(request)\n\n\nclass CourseForm(forms.ModelForm):\n\n\n class Meta:\n Model = Course\n\n def __init__(self, *args, **kwargs):\n super(CourseForm, self).__init__(*args, **kwargs)\n self.fields['prerequisite'].queryset = Course.objects.exclude(id__exact\n =self.instance.id)\n\n def clean(self):\n cleaned_data = self.cleaned_data\n if self.instance.pk is None:\n if Course.objects.filter(code=cleaned_data['code'], university=\n cleaned_data['university']).exists():\n raise forms.ValidationError(\n 'The course already exists at this university.')\n return cleaned_data\n\n\nclass CourseAdmin(admin.ModelAdmin):\n form = CourseForm\n list_display = 'code', 'university'\n list_filter = 'university',\n search_fields = 'code',\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass dbAdmin(UserAdmin):\n fieldsets = (None, {'fields': ('email', 'password')}), (_(\n 'Personal info'), {'fields': ('first_name', 'last_name')}), (_(\n 'Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',\n 'groups', 'user_permissions')}), (_('Important dates'), {'fields':\n ('last_login', 'date_joined')})\n add_fieldsets = (None, {'classes': ('wide',), 'fields': ('email',\n 'password1', 'password2')}),\n form = CustomUserChangeForm\n add_form = CustomUserCreationForm\n list_display = 'email', 'first_name', 'last_name', 'is_staff'\n search_fields = 'email', 'first_name', 'last_name'\n ordering = 'email',\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass UniversityAdmin(admin.ModelAdmin):\n inlines = [DepartmentInline]\n search_fields = 'description',\n\n def save_model(self, request, obj, form, change):\n obj.code = obj.description.replace(' ', '_')\n obj.save()\n\n def change_view(self, request, object_id, extra_content=None):\n self.exclude = '',\n return super(UniversityAdmin, self).change_view(request, object_id)\n\n def add_view(self, request, extra_content=None):\n self.exclude = 'code',\n return super(UniversityAdmin, self).add_view(request)\n\n\nclass CourseForm(forms.ModelForm):\n\n\n class Meta:\n Model = Course\n\n def __init__(self, *args, **kwargs):\n super(CourseForm, self).__init__(*args, **kwargs)\n self.fields['prerequisite'].queryset = Course.objects.exclude(id__exact\n =self.instance.id)\n\n def clean(self):\n cleaned_data = self.cleaned_data\n if self.instance.pk is None:\n if Course.objects.filter(code=cleaned_data['code'], university=\n cleaned_data['university']).exists():\n raise forms.ValidationError(\n 'The course already exists at this university.')\n return cleaned_data\n\n\nclass CourseAdmin(admin.ModelAdmin):\n form = CourseForm\n list_display = 'code', 'university'\n list_filter = 'university',\n search_fields = 'code',\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass dbAdmin(UserAdmin):\n fieldsets = (None, {'fields': ('email', 'password')}), (_(\n 'Personal info'), {'fields': ('first_name', 'last_name')}), (_(\n 'Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',\n 'groups', 'user_permissions')}), (_('Important dates'), {'fields':\n ('last_login', 'date_joined')})\n add_fieldsets = (None, {'classes': ('wide',), 'fields': ('email',\n 'password1', 'password2')}),\n form = CustomUserChangeForm\n add_form = CustomUserCreationForm\n list_display = 'email', 'first_name', 'last_name', 'is_staff'\n search_fields = 'email', 'first_name', 'last_name'\n ordering = 'email',\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass UniversityAdmin(admin.ModelAdmin):\n <assignment token>\n <assignment token>\n\n def save_model(self, request, obj, form, change):\n obj.code = obj.description.replace(' ', '_')\n obj.save()\n\n def change_view(self, request, object_id, extra_content=None):\n self.exclude = '',\n return super(UniversityAdmin, self).change_view(request, object_id)\n\n def add_view(self, request, extra_content=None):\n self.exclude = 'code',\n return super(UniversityAdmin, self).add_view(request)\n\n\nclass CourseForm(forms.ModelForm):\n\n\n class Meta:\n Model = Course\n\n def __init__(self, *args, **kwargs):\n super(CourseForm, self).__init__(*args, **kwargs)\n self.fields['prerequisite'].queryset = Course.objects.exclude(id__exact\n =self.instance.id)\n\n def clean(self):\n cleaned_data = self.cleaned_data\n if self.instance.pk is None:\n if Course.objects.filter(code=cleaned_data['code'], university=\n cleaned_data['university']).exists():\n raise forms.ValidationError(\n 'The course already exists at this university.')\n return cleaned_data\n\n\nclass CourseAdmin(admin.ModelAdmin):\n form = CourseForm\n list_display = 'code', 'university'\n list_filter = 'university',\n search_fields = 'code',\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass dbAdmin(UserAdmin):\n fieldsets = (None, {'fields': ('email', 'password')}), (_(\n 'Personal info'), {'fields': ('first_name', 'last_name')}), (_(\n 'Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',\n 'groups', 'user_permissions')}), (_('Important dates'), {'fields':\n ('last_login', 'date_joined')})\n add_fieldsets = (None, {'classes': ('wide',), 'fields': ('email',\n 'password1', 'password2')}),\n form = CustomUserChangeForm\n add_form = CustomUserCreationForm\n list_display = 'email', 'first_name', 'last_name', 'is_staff'\n search_fields = 'email', 'first_name', 'last_name'\n ordering = 'email',\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass UniversityAdmin(admin.ModelAdmin):\n <assignment token>\n <assignment token>\n\n def save_model(self, request, obj, form, change):\n obj.code = obj.description.replace(' ', '_')\n obj.save()\n\n def change_view(self, request, object_id, extra_content=None):\n self.exclude = '',\n return super(UniversityAdmin, self).change_view(request, object_id)\n <function token>\n\n\nclass CourseForm(forms.ModelForm):\n\n\n class Meta:\n Model = Course\n\n def __init__(self, *args, **kwargs):\n super(CourseForm, self).__init__(*args, **kwargs)\n self.fields['prerequisite'].queryset = Course.objects.exclude(id__exact\n =self.instance.id)\n\n def clean(self):\n cleaned_data = self.cleaned_data\n if self.instance.pk is None:\n if Course.objects.filter(code=cleaned_data['code'], university=\n cleaned_data['university']).exists():\n raise forms.ValidationError(\n 'The course already exists at this university.')\n return cleaned_data\n\n\nclass CourseAdmin(admin.ModelAdmin):\n form = CourseForm\n list_display = 'code', 'university'\n list_filter = 'university',\n search_fields = 'code',\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass dbAdmin(UserAdmin):\n fieldsets = (None, {'fields': ('email', 'password')}), (_(\n 'Personal info'), {'fields': ('first_name', 'last_name')}), (_(\n 'Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',\n 'groups', 'user_permissions')}), (_('Important dates'), {'fields':\n ('last_login', 'date_joined')})\n add_fieldsets = (None, {'classes': ('wide',), 'fields': ('email',\n 'password1', 'password2')}),\n form = CustomUserChangeForm\n add_form = CustomUserCreationForm\n list_display = 'email', 'first_name', 'last_name', 'is_staff'\n search_fields = 'email', 'first_name', 'last_name'\n ordering = 'email',\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass UniversityAdmin(admin.ModelAdmin):\n <assignment token>\n <assignment token>\n <function token>\n\n def change_view(self, request, object_id, extra_content=None):\n self.exclude = '',\n return super(UniversityAdmin, self).change_view(request, object_id)\n <function token>\n\n\nclass CourseForm(forms.ModelForm):\n\n\n class Meta:\n Model = Course\n\n def __init__(self, *args, **kwargs):\n super(CourseForm, self).__init__(*args, **kwargs)\n self.fields['prerequisite'].queryset = Course.objects.exclude(id__exact\n =self.instance.id)\n\n def clean(self):\n cleaned_data = self.cleaned_data\n if self.instance.pk is None:\n if Course.objects.filter(code=cleaned_data['code'], university=\n cleaned_data['university']).exists():\n raise forms.ValidationError(\n 'The course already exists at this university.')\n return cleaned_data\n\n\nclass CourseAdmin(admin.ModelAdmin):\n form = CourseForm\n list_display = 'code', 'university'\n list_filter = 'university',\n search_fields = 'code',\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass dbAdmin(UserAdmin):\n fieldsets = (None, {'fields': ('email', 'password')}), (_(\n 'Personal info'), {'fields': ('first_name', 'last_name')}), (_(\n 'Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',\n 'groups', 'user_permissions')}), (_('Important dates'), {'fields':\n ('last_login', 'date_joined')})\n add_fieldsets = (None, {'classes': ('wide',), 'fields': ('email',\n 'password1', 'password2')}),\n form = CustomUserChangeForm\n add_form = CustomUserCreationForm\n list_display = 'email', 'first_name', 'last_name', 'is_staff'\n search_fields = 'email', 'first_name', 'last_name'\n ordering = 'email',\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass UniversityAdmin(admin.ModelAdmin):\n <assignment token>\n <assignment token>\n <function token>\n <function token>\n <function token>\n\n\nclass CourseForm(forms.ModelForm):\n\n\n class Meta:\n Model = Course\n\n def __init__(self, *args, **kwargs):\n super(CourseForm, self).__init__(*args, **kwargs)\n self.fields['prerequisite'].queryset = Course.objects.exclude(id__exact\n =self.instance.id)\n\n def clean(self):\n cleaned_data = self.cleaned_data\n if self.instance.pk is None:\n if Course.objects.filter(code=cleaned_data['code'], university=\n cleaned_data['university']).exists():\n raise forms.ValidationError(\n 'The course already exists at this university.')\n return cleaned_data\n\n\nclass CourseAdmin(admin.ModelAdmin):\n form = CourseForm\n list_display = 'code', 'university'\n list_filter = 'university',\n search_fields = 'code',\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass dbAdmin(UserAdmin):\n fieldsets = (None, {'fields': ('email', 'password')}), (_(\n 'Personal info'), {'fields': ('first_name', 'last_name')}), (_(\n 'Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',\n 'groups', 'user_permissions')}), (_('Important dates'), {'fields':\n ('last_login', 'date_joined')})\n add_fieldsets = (None, {'classes': ('wide',), 'fields': ('email',\n 'password1', 'password2')}),\n form = CustomUserChangeForm\n add_form = CustomUserCreationForm\n list_display = 'email', 'first_name', 'last_name', 'is_staff'\n search_fields = 'email', 'first_name', 'last_name'\n ordering = 'email',\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass CourseForm(forms.ModelForm):\n\n\n class Meta:\n Model = Course\n\n def __init__(self, *args, **kwargs):\n super(CourseForm, self).__init__(*args, **kwargs)\n self.fields['prerequisite'].queryset = Course.objects.exclude(id__exact\n =self.instance.id)\n\n def clean(self):\n cleaned_data = self.cleaned_data\n if self.instance.pk is None:\n if Course.objects.filter(code=cleaned_data['code'], university=\n cleaned_data['university']).exists():\n raise forms.ValidationError(\n 'The course already exists at this university.')\n return cleaned_data\n\n\nclass CourseAdmin(admin.ModelAdmin):\n form = CourseForm\n list_display = 'code', 'university'\n list_filter = 'university',\n search_fields = 'code',\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass dbAdmin(UserAdmin):\n fieldsets = (None, {'fields': ('email', 'password')}), (_(\n 'Personal info'), {'fields': ('first_name', 'last_name')}), (_(\n 'Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',\n 'groups', 'user_permissions')}), (_('Important dates'), {'fields':\n ('last_login', 'date_joined')})\n add_fieldsets = (None, {'classes': ('wide',), 'fields': ('email',\n 'password1', 'password2')}),\n form = CustomUserChangeForm\n add_form = CustomUserCreationForm\n list_display = 'email', 'first_name', 'last_name', 'is_staff'\n search_fields = 'email', 'first_name', 'last_name'\n ordering = 'email',\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass CourseForm(forms.ModelForm):\n\n\n class Meta:\n Model = Course\n\n def __init__(self, *args, **kwargs):\n super(CourseForm, self).__init__(*args, **kwargs)\n self.fields['prerequisite'].queryset = Course.objects.exclude(id__exact\n =self.instance.id)\n <function token>\n\n\nclass CourseAdmin(admin.ModelAdmin):\n form = CourseForm\n list_display = 'code', 'university'\n list_filter = 'university',\n search_fields = 'code',\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass dbAdmin(UserAdmin):\n fieldsets = (None, {'fields': ('email', 'password')}), (_(\n 'Personal info'), {'fields': ('first_name', 'last_name')}), (_(\n 'Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',\n 'groups', 'user_permissions')}), (_('Important dates'), {'fields':\n ('last_login', 'date_joined')})\n add_fieldsets = (None, {'classes': ('wide',), 'fields': ('email',\n 'password1', 'password2')}),\n form = CustomUserChangeForm\n add_form = CustomUserCreationForm\n list_display = 'email', 'first_name', 'last_name', 'is_staff'\n search_fields = 'email', 'first_name', 'last_name'\n ordering = 'email',\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass CourseForm(forms.ModelForm):\n\n\n class Meta:\n Model = Course\n <function token>\n <function token>\n\n\nclass CourseAdmin(admin.ModelAdmin):\n form = CourseForm\n list_display = 'code', 'university'\n list_filter = 'university',\n search_fields = 'code',\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass dbAdmin(UserAdmin):\n fieldsets = (None, {'fields': ('email', 'password')}), (_(\n 'Personal info'), {'fields': ('first_name', 'last_name')}), (_(\n 'Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',\n 'groups', 'user_permissions')}), (_('Important dates'), {'fields':\n ('last_login', 'date_joined')})\n add_fieldsets = (None, {'classes': ('wide',), 'fields': ('email',\n 'password1', 'password2')}),\n form = CustomUserChangeForm\n add_form = CustomUserCreationForm\n list_display = 'email', 'first_name', 'last_name', 'is_staff'\n search_fields = 'email', 'first_name', 'last_name'\n ordering = 'email',\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass CourseAdmin(admin.ModelAdmin):\n form = CourseForm\n list_display = 'code', 'university'\n list_filter = 'university',\n search_fields = 'code',\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass dbAdmin(UserAdmin):\n fieldsets = (None, {'fields': ('email', 'password')}), (_(\n 'Personal info'), {'fields': ('first_name', 'last_name')}), (_(\n 'Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',\n 'groups', 'user_permissions')}), (_('Important dates'), {'fields':\n ('last_login', 'date_joined')})\n add_fieldsets = (None, {'classes': ('wide',), 'fields': ('email',\n 'password1', 'password2')}),\n form = CustomUserChangeForm\n add_form = CustomUserCreationForm\n list_display = 'email', 'first_name', 'last_name', 'is_staff'\n search_fields = 'email', 'first_name', 'last_name'\n ordering = 'email',\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass CourseAdmin(admin.ModelAdmin):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass dbAdmin(UserAdmin):\n fieldsets = (None, {'fields': ('email', 'password')}), (_(\n 'Personal info'), {'fields': ('first_name', 'last_name')}), (_(\n 'Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',\n 'groups', 'user_permissions')}), (_('Important dates'), {'fields':\n ('last_login', 'date_joined')})\n add_fieldsets = (None, {'classes': ('wide',), 'fields': ('email',\n 'password1', 'password2')}),\n form = CustomUserChangeForm\n add_form = CustomUserCreationForm\n list_display = 'email', 'first_name', 'last_name', 'is_staff'\n search_fields = 'email', 'first_name', 'last_name'\n ordering = 'email',\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass CourseAdmin(admin.ModelAdmin):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n\nclass dbAdmin(UserAdmin):\n fieldsets = (None, {'fields': ('email', 'password')}), (_(\n 'Personal info'), {'fields': ('first_name', 'last_name')}), (_(\n 'Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',\n 'groups', 'user_permissions')}), (_('Important dates'), {'fields':\n ('last_login', 'date_joined')})\n add_fieldsets = (None, {'classes': ('wide',), 'fields': ('email',\n 'password1', 'password2')}),\n form = CustomUserChangeForm\n add_form = CustomUserCreationForm\n list_display = 'email', 'first_name', 'last_name', 'is_staff'\n search_fields = 'email', 'first_name', 'last_name'\n ordering = 'email',\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass dbAdmin(UserAdmin):\n fieldsets = (None, {'fields': ('email', 'password')}), (_(\n 'Personal info'), {'fields': ('first_name', 'last_name')}), (_(\n 'Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',\n 'groups', 'user_permissions')}), (_('Important dates'), {'fields':\n ('last_login', 'date_joined')})\n add_fieldsets = (None, {'classes': ('wide',), 'fields': ('email',\n 'password1', 'password2')}),\n form = CustomUserChangeForm\n add_form = CustomUserCreationForm\n list_display = 'email', 'first_name', 'last_name', 'is_staff'\n search_fields = 'email', 'first_name', 'last_name'\n ordering = 'email',\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass dbAdmin(UserAdmin):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<code token>\n"
] | false |
833 |
58ca520a2f43cef26a95de446f9c7a82819b0b66
|
import urllib.request
class GetData:
key = 'fDs8VW%2BvtwQA8Q9LhBW%2BT2ETVBWWJaITjKfpzDsNJO8ugDsvdboInI16ZD295Txxtxwhc4G3PwMAvxd%2FWvz2gQ%3D%3D&pageNo=1&numOfRows=999'
url = "http://apis.data.go.kr/B552657/ErmctInfoInqireService/getEgytBassInfoInqire?serviceKey=" + key
def main(self):
data = urllib.request.urlopen(self.url).read()
print(data)
f = open("sample.xml", "wb")
f.write(data)
f.close()
getData = GetData()
getData.main()
|
[
"import urllib.request\n\nclass GetData:\n key = 'fDs8VW%2BvtwQA8Q9LhBW%2BT2ETVBWWJaITjKfpzDsNJO8ugDsvdboInI16ZD295Txxtxwhc4G3PwMAvxd%2FWvz2gQ%3D%3D&pageNo=1&numOfRows=999'\n url = \"http://apis.data.go.kr/B552657/ErmctInfoInqireService/getEgytBassInfoInqire?serviceKey=\" + key\n\n def main(self):\n data = urllib.request.urlopen(self.url).read()\n print(data)\n f = open(\"sample.xml\", \"wb\")\n f.write(data)\n f.close()\n\ngetData = GetData()\ngetData.main()\n",
"import urllib.request\n\n\nclass GetData:\n key = (\n 'fDs8VW%2BvtwQA8Q9LhBW%2BT2ETVBWWJaITjKfpzDsNJO8ugDsvdboInI16ZD295Txxtxwhc4G3PwMAvxd%2FWvz2gQ%3D%3D&pageNo=1&numOfRows=999'\n )\n url = (\n 'http://apis.data.go.kr/B552657/ErmctInfoInqireService/getEgytBassInfoInqire?serviceKey='\n + key)\n\n def main(self):\n data = urllib.request.urlopen(self.url).read()\n print(data)\n f = open('sample.xml', 'wb')\n f.write(data)\n f.close()\n\n\ngetData = GetData()\ngetData.main()\n",
"<import token>\n\n\nclass GetData:\n key = (\n 'fDs8VW%2BvtwQA8Q9LhBW%2BT2ETVBWWJaITjKfpzDsNJO8ugDsvdboInI16ZD295Txxtxwhc4G3PwMAvxd%2FWvz2gQ%3D%3D&pageNo=1&numOfRows=999'\n )\n url = (\n 'http://apis.data.go.kr/B552657/ErmctInfoInqireService/getEgytBassInfoInqire?serviceKey='\n + key)\n\n def main(self):\n data = urllib.request.urlopen(self.url).read()\n print(data)\n f = open('sample.xml', 'wb')\n f.write(data)\n f.close()\n\n\ngetData = GetData()\ngetData.main()\n",
"<import token>\n\n\nclass GetData:\n key = (\n 'fDs8VW%2BvtwQA8Q9LhBW%2BT2ETVBWWJaITjKfpzDsNJO8ugDsvdboInI16ZD295Txxtxwhc4G3PwMAvxd%2FWvz2gQ%3D%3D&pageNo=1&numOfRows=999'\n )\n url = (\n 'http://apis.data.go.kr/B552657/ErmctInfoInqireService/getEgytBassInfoInqire?serviceKey='\n + key)\n\n def main(self):\n data = urllib.request.urlopen(self.url).read()\n print(data)\n f = open('sample.xml', 'wb')\n f.write(data)\n f.close()\n\n\n<assignment token>\ngetData.main()\n",
"<import token>\n\n\nclass GetData:\n key = (\n 'fDs8VW%2BvtwQA8Q9LhBW%2BT2ETVBWWJaITjKfpzDsNJO8ugDsvdboInI16ZD295Txxtxwhc4G3PwMAvxd%2FWvz2gQ%3D%3D&pageNo=1&numOfRows=999'\n )\n url = (\n 'http://apis.data.go.kr/B552657/ErmctInfoInqireService/getEgytBassInfoInqire?serviceKey='\n + key)\n\n def main(self):\n data = urllib.request.urlopen(self.url).read()\n print(data)\n f = open('sample.xml', 'wb')\n f.write(data)\n f.close()\n\n\n<assignment token>\n<code token>\n",
"<import token>\n\n\nclass GetData:\n <assignment token>\n <assignment token>\n\n def main(self):\n data = urllib.request.urlopen(self.url).read()\n print(data)\n f = open('sample.xml', 'wb')\n f.write(data)\n f.close()\n\n\n<assignment token>\n<code token>\n",
"<import token>\n\n\nclass GetData:\n <assignment token>\n <assignment token>\n <function token>\n\n\n<assignment token>\n<code token>\n",
"<import token>\n<class token>\n<assignment token>\n<code token>\n"
] | false |
834 |
9535973f9714926269490b8550a67c74d04d8f0a
|
'''Autogenerated by xml_generate script, do not edit!'''
from OpenGL import platform as _p, arrays
from OpenGL.constant import Constant as _C
# End users want this...
from OpenGL.raw.GLES2 import _errors
# Code generation uses this
from OpenGL.raw.GLES2 import _types as _cs
_EXTENSION_NAME = 'GLES2_NV_viewport_array'
def _f(function):
return _p.createFunction(function, _p.PLATFORM.GLES2, 'GLES2_NV_viewport_array',
error_checker=_errors._error_checker)
GL_DEPTH_RANGE = _C('GL_DEPTH_RANGE', 0x0B70)
GL_MAX_VIEWPORTS_NV = _C('GL_MAX_VIEWPORTS_NV', 0x825B)
GL_SCISSOR_BOX = _C('GL_SCISSOR_BOX', 0x0C10)
GL_SCISSOR_TEST = _C('GL_SCISSOR_TEST', 0x0C11)
GL_VIEWPORT = _C('GL_VIEWPORT', 0x0BA2)
GL_VIEWPORT_BOUNDS_RANGE_NV = _C('GL_VIEWPORT_BOUNDS_RANGE_NV', 0x825D)
GL_VIEWPORT_INDEX_PROVOKING_VERTEX_NV=_C('GL_VIEWPORT_INDEX_PROVOKING_VERTEX_NV',0x825F)
GL_VIEWPORT_SUBPIXEL_BITS_NV=_C('GL_VIEWPORT_SUBPIXEL_BITS_NV',0x825C)
@_f
@_p.types(None,_cs.GLuint,_cs.GLsizei,arrays.GLfloatArray)
def glDepthRangeArrayfvNV(first,count,v):pass
@_f
@_p.types(None,_cs.GLuint,_cs.GLfloat,_cs.GLfloat)
def glDepthRangeIndexedfNV(index,n,f):pass
@_f
@_p.types(None,_cs.GLenum,_cs.GLuint)
def glDisableiNV(target,index):pass
@_f
@_p.types(None,_cs.GLenum,_cs.GLuint)
def glEnableiNV(target,index):pass
@_f
@_p.types(None,_cs.GLenum,_cs.GLuint,arrays.GLfloatArray)
def glGetFloati_vNV(target,index,data):pass
@_f
@_p.types(_cs.GLboolean,_cs.GLenum,_cs.GLuint)
def glIsEnablediNV(target,index):pass
@_f
@_p.types(None,_cs.GLuint,_cs.GLsizei,arrays.GLintArray)
def glScissorArrayvNV(first,count,v):pass
@_f
@_p.types(None,_cs.GLuint,_cs.GLint,_cs.GLint,_cs.GLsizei,_cs.GLsizei)
def glScissorIndexedNV(index,left,bottom,width,height):pass
@_f
@_p.types(None,_cs.GLuint,arrays.GLintArray)
def glScissorIndexedvNV(index,v):pass
@_f
@_p.types(None,_cs.GLuint,_cs.GLsizei,arrays.GLfloatArray)
def glViewportArrayvNV(first,count,v):pass
@_f
@_p.types(None,_cs.GLuint,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat)
def glViewportIndexedfNV(index,x,y,w,h):pass
@_f
@_p.types(None,_cs.GLuint,arrays.GLfloatArray)
def glViewportIndexedfvNV(index,v):pass
|
[
"'''Autogenerated by xml_generate script, do not edit!'''\nfrom OpenGL import platform as _p, arrays\nfrom OpenGL.constant import Constant as _C\n# End users want this...\nfrom OpenGL.raw.GLES2 import _errors\n# Code generation uses this\nfrom OpenGL.raw.GLES2 import _types as _cs\n\n_EXTENSION_NAME = 'GLES2_NV_viewport_array'\n\n\ndef _f(function):\n return _p.createFunction(function, _p.PLATFORM.GLES2, 'GLES2_NV_viewport_array',\n error_checker=_errors._error_checker)\n\n\nGL_DEPTH_RANGE = _C('GL_DEPTH_RANGE', 0x0B70)\nGL_MAX_VIEWPORTS_NV = _C('GL_MAX_VIEWPORTS_NV', 0x825B)\nGL_SCISSOR_BOX = _C('GL_SCISSOR_BOX', 0x0C10)\nGL_SCISSOR_TEST = _C('GL_SCISSOR_TEST', 0x0C11)\nGL_VIEWPORT = _C('GL_VIEWPORT', 0x0BA2)\nGL_VIEWPORT_BOUNDS_RANGE_NV = _C('GL_VIEWPORT_BOUNDS_RANGE_NV', 0x825D)\nGL_VIEWPORT_INDEX_PROVOKING_VERTEX_NV=_C('GL_VIEWPORT_INDEX_PROVOKING_VERTEX_NV',0x825F)\nGL_VIEWPORT_SUBPIXEL_BITS_NV=_C('GL_VIEWPORT_SUBPIXEL_BITS_NV',0x825C)\n@_f\n@_p.types(None,_cs.GLuint,_cs.GLsizei,arrays.GLfloatArray)\ndef glDepthRangeArrayfvNV(first,count,v):pass\n@_f\n@_p.types(None,_cs.GLuint,_cs.GLfloat,_cs.GLfloat)\ndef glDepthRangeIndexedfNV(index,n,f):pass\n@_f\n@_p.types(None,_cs.GLenum,_cs.GLuint)\ndef glDisableiNV(target,index):pass\n@_f\n@_p.types(None,_cs.GLenum,_cs.GLuint)\ndef glEnableiNV(target,index):pass\n@_f\n@_p.types(None,_cs.GLenum,_cs.GLuint,arrays.GLfloatArray)\ndef glGetFloati_vNV(target,index,data):pass\n@_f\n@_p.types(_cs.GLboolean,_cs.GLenum,_cs.GLuint)\ndef glIsEnablediNV(target,index):pass\n@_f\n@_p.types(None,_cs.GLuint,_cs.GLsizei,arrays.GLintArray)\ndef glScissorArrayvNV(first,count,v):pass\n@_f\n@_p.types(None,_cs.GLuint,_cs.GLint,_cs.GLint,_cs.GLsizei,_cs.GLsizei)\ndef glScissorIndexedNV(index,left,bottom,width,height):pass\n@_f\n@_p.types(None,_cs.GLuint,arrays.GLintArray)\ndef glScissorIndexedvNV(index,v):pass\n@_f\n@_p.types(None,_cs.GLuint,_cs.GLsizei,arrays.GLfloatArray)\ndef glViewportArrayvNV(first,count,v):pass\n@_f\n@_p.types(None,_cs.GLuint,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat)\ndef glViewportIndexedfNV(index,x,y,w,h):pass\n@_f\n@_p.types(None,_cs.GLuint,arrays.GLfloatArray)\ndef glViewportIndexedfvNV(index,v):pass\n",
"<docstring token>\nfrom OpenGL import platform as _p, arrays\nfrom OpenGL.constant import Constant as _C\nfrom OpenGL.raw.GLES2 import _errors\nfrom OpenGL.raw.GLES2 import _types as _cs\n_EXTENSION_NAME = 'GLES2_NV_viewport_array'\n\n\ndef _f(function):\n return _p.createFunction(function, _p.PLATFORM.GLES2,\n 'GLES2_NV_viewport_array', error_checker=_errors._error_checker)\n\n\nGL_DEPTH_RANGE = _C('GL_DEPTH_RANGE', 2928)\nGL_MAX_VIEWPORTS_NV = _C('GL_MAX_VIEWPORTS_NV', 33371)\nGL_SCISSOR_BOX = _C('GL_SCISSOR_BOX', 3088)\nGL_SCISSOR_TEST = _C('GL_SCISSOR_TEST', 3089)\nGL_VIEWPORT = _C('GL_VIEWPORT', 2978)\nGL_VIEWPORT_BOUNDS_RANGE_NV = _C('GL_VIEWPORT_BOUNDS_RANGE_NV', 33373)\nGL_VIEWPORT_INDEX_PROVOKING_VERTEX_NV = _C(\n 'GL_VIEWPORT_INDEX_PROVOKING_VERTEX_NV', 33375)\nGL_VIEWPORT_SUBPIXEL_BITS_NV = _C('GL_VIEWPORT_SUBPIXEL_BITS_NV', 33372)\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLsizei, arrays.GLfloatArray)\ndef glDepthRangeArrayfvNV(first, count, v):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLfloat, _cs.GLfloat)\ndef glDepthRangeIndexedfNV(index, n, f):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLenum, _cs.GLuint)\ndef glDisableiNV(target, index):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLenum, _cs.GLuint)\ndef glEnableiNV(target, index):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLenum, _cs.GLuint, arrays.GLfloatArray)\ndef glGetFloati_vNV(target, index, data):\n pass\n\n\n@_f\n@_p.types(_cs.GLboolean, _cs.GLenum, _cs.GLuint)\ndef glIsEnablediNV(target, index):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLsizei, arrays.GLintArray)\ndef glScissorArrayvNV(first, count, v):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLint, _cs.GLint, _cs.GLsizei, _cs.GLsizei)\ndef glScissorIndexedNV(index, left, bottom, width, height):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, arrays.GLintArray)\ndef glScissorIndexedvNV(index, v):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLsizei, arrays.GLfloatArray)\ndef glViewportArrayvNV(first, count, v):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLfloat, _cs.GLfloat, _cs.GLfloat, _cs.GLfloat)\ndef glViewportIndexedfNV(index, x, y, w, h):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, arrays.GLfloatArray)\ndef glViewportIndexedfvNV(index, v):\n pass\n",
"<docstring token>\n<import token>\n_EXTENSION_NAME = 'GLES2_NV_viewport_array'\n\n\ndef _f(function):\n return _p.createFunction(function, _p.PLATFORM.GLES2,\n 'GLES2_NV_viewport_array', error_checker=_errors._error_checker)\n\n\nGL_DEPTH_RANGE = _C('GL_DEPTH_RANGE', 2928)\nGL_MAX_VIEWPORTS_NV = _C('GL_MAX_VIEWPORTS_NV', 33371)\nGL_SCISSOR_BOX = _C('GL_SCISSOR_BOX', 3088)\nGL_SCISSOR_TEST = _C('GL_SCISSOR_TEST', 3089)\nGL_VIEWPORT = _C('GL_VIEWPORT', 2978)\nGL_VIEWPORT_BOUNDS_RANGE_NV = _C('GL_VIEWPORT_BOUNDS_RANGE_NV', 33373)\nGL_VIEWPORT_INDEX_PROVOKING_VERTEX_NV = _C(\n 'GL_VIEWPORT_INDEX_PROVOKING_VERTEX_NV', 33375)\nGL_VIEWPORT_SUBPIXEL_BITS_NV = _C('GL_VIEWPORT_SUBPIXEL_BITS_NV', 33372)\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLsizei, arrays.GLfloatArray)\ndef glDepthRangeArrayfvNV(first, count, v):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLfloat, _cs.GLfloat)\ndef glDepthRangeIndexedfNV(index, n, f):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLenum, _cs.GLuint)\ndef glDisableiNV(target, index):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLenum, _cs.GLuint)\ndef glEnableiNV(target, index):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLenum, _cs.GLuint, arrays.GLfloatArray)\ndef glGetFloati_vNV(target, index, data):\n pass\n\n\n@_f\n@_p.types(_cs.GLboolean, _cs.GLenum, _cs.GLuint)\ndef glIsEnablediNV(target, index):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLsizei, arrays.GLintArray)\ndef glScissorArrayvNV(first, count, v):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLint, _cs.GLint, _cs.GLsizei, _cs.GLsizei)\ndef glScissorIndexedNV(index, left, bottom, width, height):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, arrays.GLintArray)\ndef glScissorIndexedvNV(index, v):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLsizei, arrays.GLfloatArray)\ndef glViewportArrayvNV(first, count, v):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLfloat, _cs.GLfloat, _cs.GLfloat, _cs.GLfloat)\ndef glViewportIndexedfNV(index, x, y, w, h):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, arrays.GLfloatArray)\ndef glViewportIndexedfvNV(index, v):\n pass\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\ndef _f(function):\n return _p.createFunction(function, _p.PLATFORM.GLES2,\n 'GLES2_NV_viewport_array', error_checker=_errors._error_checker)\n\n\n<assignment token>\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLsizei, arrays.GLfloatArray)\ndef glDepthRangeArrayfvNV(first, count, v):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLfloat, _cs.GLfloat)\ndef glDepthRangeIndexedfNV(index, n, f):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLenum, _cs.GLuint)\ndef glDisableiNV(target, index):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLenum, _cs.GLuint)\ndef glEnableiNV(target, index):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLenum, _cs.GLuint, arrays.GLfloatArray)\ndef glGetFloati_vNV(target, index, data):\n pass\n\n\n@_f\n@_p.types(_cs.GLboolean, _cs.GLenum, _cs.GLuint)\ndef glIsEnablediNV(target, index):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLsizei, arrays.GLintArray)\ndef glScissorArrayvNV(first, count, v):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLint, _cs.GLint, _cs.GLsizei, _cs.GLsizei)\ndef glScissorIndexedNV(index, left, bottom, width, height):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, arrays.GLintArray)\ndef glScissorIndexedvNV(index, v):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLsizei, arrays.GLfloatArray)\ndef glViewportArrayvNV(first, count, v):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLfloat, _cs.GLfloat, _cs.GLfloat, _cs.GLfloat)\ndef glViewportIndexedfNV(index, x, y, w, h):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, arrays.GLfloatArray)\ndef glViewportIndexedfvNV(index, v):\n pass\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\ndef _f(function):\n return _p.createFunction(function, _p.PLATFORM.GLES2,\n 'GLES2_NV_viewport_array', error_checker=_errors._error_checker)\n\n\n<assignment token>\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLsizei, arrays.GLfloatArray)\ndef glDepthRangeArrayfvNV(first, count, v):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLfloat, _cs.GLfloat)\ndef glDepthRangeIndexedfNV(index, n, f):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLenum, _cs.GLuint)\ndef glDisableiNV(target, index):\n pass\n\n\n<function token>\n\n\n@_f\n@_p.types(None, _cs.GLenum, _cs.GLuint, arrays.GLfloatArray)\ndef glGetFloati_vNV(target, index, data):\n pass\n\n\n@_f\n@_p.types(_cs.GLboolean, _cs.GLenum, _cs.GLuint)\ndef glIsEnablediNV(target, index):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLsizei, arrays.GLintArray)\ndef glScissorArrayvNV(first, count, v):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLint, _cs.GLint, _cs.GLsizei, _cs.GLsizei)\ndef glScissorIndexedNV(index, left, bottom, width, height):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, arrays.GLintArray)\ndef glScissorIndexedvNV(index, v):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLsizei, arrays.GLfloatArray)\ndef glViewportArrayvNV(first, count, v):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLfloat, _cs.GLfloat, _cs.GLfloat, _cs.GLfloat)\ndef glViewportIndexedfNV(index, x, y, w, h):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, arrays.GLfloatArray)\ndef glViewportIndexedfvNV(index, v):\n pass\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\ndef _f(function):\n return _p.createFunction(function, _p.PLATFORM.GLES2,\n 'GLES2_NV_viewport_array', error_checker=_errors._error_checker)\n\n\n<assignment token>\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLsizei, arrays.GLfloatArray)\ndef glDepthRangeArrayfvNV(first, count, v):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLfloat, _cs.GLfloat)\ndef glDepthRangeIndexedfNV(index, n, f):\n pass\n\n\n<function token>\n<function token>\n\n\n@_f\n@_p.types(None, _cs.GLenum, _cs.GLuint, arrays.GLfloatArray)\ndef glGetFloati_vNV(target, index, data):\n pass\n\n\n@_f\n@_p.types(_cs.GLboolean, _cs.GLenum, _cs.GLuint)\ndef glIsEnablediNV(target, index):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLsizei, arrays.GLintArray)\ndef glScissorArrayvNV(first, count, v):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLint, _cs.GLint, _cs.GLsizei, _cs.GLsizei)\ndef glScissorIndexedNV(index, left, bottom, width, height):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, arrays.GLintArray)\ndef glScissorIndexedvNV(index, v):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLsizei, arrays.GLfloatArray)\ndef glViewportArrayvNV(first, count, v):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLfloat, _cs.GLfloat, _cs.GLfloat, _cs.GLfloat)\ndef glViewportIndexedfNV(index, x, y, w, h):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, arrays.GLfloatArray)\ndef glViewportIndexedfvNV(index, v):\n pass\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\ndef _f(function):\n return _p.createFunction(function, _p.PLATFORM.GLES2,\n 'GLES2_NV_viewport_array', error_checker=_errors._error_checker)\n\n\n<assignment token>\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLsizei, arrays.GLfloatArray)\ndef glDepthRangeArrayfvNV(first, count, v):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLfloat, _cs.GLfloat)\ndef glDepthRangeIndexedfNV(index, n, f):\n pass\n\n\n<function token>\n<function token>\n\n\n@_f\n@_p.types(None, _cs.GLenum, _cs.GLuint, arrays.GLfloatArray)\ndef glGetFloati_vNV(target, index, data):\n pass\n\n\n@_f\n@_p.types(_cs.GLboolean, _cs.GLenum, _cs.GLuint)\ndef glIsEnablediNV(target, index):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLsizei, arrays.GLintArray)\ndef glScissorArrayvNV(first, count, v):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLint, _cs.GLint, _cs.GLsizei, _cs.GLsizei)\ndef glScissorIndexedNV(index, left, bottom, width, height):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, arrays.GLintArray)\ndef glScissorIndexedvNV(index, v):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLsizei, arrays.GLfloatArray)\ndef glViewportArrayvNV(first, count, v):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLfloat, _cs.GLfloat, _cs.GLfloat, _cs.GLfloat)\ndef glViewportIndexedfNV(index, x, y, w, h):\n pass\n\n\n<function token>\n",
"<docstring token>\n<import token>\n<assignment token>\n<function token>\n<assignment token>\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLsizei, arrays.GLfloatArray)\ndef glDepthRangeArrayfvNV(first, count, v):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLfloat, _cs.GLfloat)\ndef glDepthRangeIndexedfNV(index, n, f):\n pass\n\n\n<function token>\n<function token>\n\n\n@_f\n@_p.types(None, _cs.GLenum, _cs.GLuint, arrays.GLfloatArray)\ndef glGetFloati_vNV(target, index, data):\n pass\n\n\n@_f\n@_p.types(_cs.GLboolean, _cs.GLenum, _cs.GLuint)\ndef glIsEnablediNV(target, index):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLsizei, arrays.GLintArray)\ndef glScissorArrayvNV(first, count, v):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLint, _cs.GLint, _cs.GLsizei, _cs.GLsizei)\ndef glScissorIndexedNV(index, left, bottom, width, height):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, arrays.GLintArray)\ndef glScissorIndexedvNV(index, v):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLsizei, arrays.GLfloatArray)\ndef glViewportArrayvNV(first, count, v):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLfloat, _cs.GLfloat, _cs.GLfloat, _cs.GLfloat)\ndef glViewportIndexedfNV(index, x, y, w, h):\n pass\n\n\n<function token>\n",
"<docstring token>\n<import token>\n<assignment token>\n<function token>\n<assignment token>\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLsizei, arrays.GLfloatArray)\ndef glDepthRangeArrayfvNV(first, count, v):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLfloat, _cs.GLfloat)\ndef glDepthRangeIndexedfNV(index, n, f):\n pass\n\n\n<function token>\n<function token>\n\n\n@_f\n@_p.types(None, _cs.GLenum, _cs.GLuint, arrays.GLfloatArray)\ndef glGetFloati_vNV(target, index, data):\n pass\n\n\n@_f\n@_p.types(_cs.GLboolean, _cs.GLenum, _cs.GLuint)\ndef glIsEnablediNV(target, index):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLsizei, arrays.GLintArray)\ndef glScissorArrayvNV(first, count, v):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLint, _cs.GLint, _cs.GLsizei, _cs.GLsizei)\ndef glScissorIndexedNV(index, left, bottom, width, height):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, arrays.GLintArray)\ndef glScissorIndexedvNV(index, v):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLsizei, arrays.GLfloatArray)\ndef glViewportArrayvNV(first, count, v):\n pass\n\n\n<function token>\n<function token>\n",
"<docstring token>\n<import token>\n<assignment token>\n<function token>\n<assignment token>\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLsizei, arrays.GLfloatArray)\ndef glDepthRangeArrayfvNV(first, count, v):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLfloat, _cs.GLfloat)\ndef glDepthRangeIndexedfNV(index, n, f):\n pass\n\n\n<function token>\n<function token>\n<function token>\n\n\n@_f\n@_p.types(_cs.GLboolean, _cs.GLenum, _cs.GLuint)\ndef glIsEnablediNV(target, index):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLsizei, arrays.GLintArray)\ndef glScissorArrayvNV(first, count, v):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLint, _cs.GLint, _cs.GLsizei, _cs.GLsizei)\ndef glScissorIndexedNV(index, left, bottom, width, height):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, arrays.GLintArray)\ndef glScissorIndexedvNV(index, v):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLsizei, arrays.GLfloatArray)\ndef glViewportArrayvNV(first, count, v):\n pass\n\n\n<function token>\n<function token>\n",
"<docstring token>\n<import token>\n<assignment token>\n<function token>\n<assignment token>\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLsizei, arrays.GLfloatArray)\ndef glDepthRangeArrayfvNV(first, count, v):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLfloat, _cs.GLfloat)\ndef glDepthRangeIndexedfNV(index, n, f):\n pass\n\n\n<function token>\n<function token>\n<function token>\n\n\n@_f\n@_p.types(_cs.GLboolean, _cs.GLenum, _cs.GLuint)\ndef glIsEnablediNV(target, index):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLsizei, arrays.GLintArray)\ndef glScissorArrayvNV(first, count, v):\n pass\n\n\n<function token>\n\n\n@_f\n@_p.types(None, _cs.GLuint, arrays.GLintArray)\ndef glScissorIndexedvNV(index, v):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLsizei, arrays.GLfloatArray)\ndef glViewportArrayvNV(first, count, v):\n pass\n\n\n<function token>\n<function token>\n",
"<docstring token>\n<import token>\n<assignment token>\n<function token>\n<assignment token>\n<function token>\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLfloat, _cs.GLfloat)\ndef glDepthRangeIndexedfNV(index, n, f):\n pass\n\n\n<function token>\n<function token>\n<function token>\n\n\n@_f\n@_p.types(_cs.GLboolean, _cs.GLenum, _cs.GLuint)\ndef glIsEnablediNV(target, index):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLsizei, arrays.GLintArray)\ndef glScissorArrayvNV(first, count, v):\n pass\n\n\n<function token>\n\n\n@_f\n@_p.types(None, _cs.GLuint, arrays.GLintArray)\ndef glScissorIndexedvNV(index, v):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLsizei, arrays.GLfloatArray)\ndef glViewportArrayvNV(first, count, v):\n pass\n\n\n<function token>\n<function token>\n",
"<docstring token>\n<import token>\n<assignment token>\n<function token>\n<assignment token>\n<function token>\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLfloat, _cs.GLfloat)\ndef glDepthRangeIndexedfNV(index, n, f):\n pass\n\n\n<function token>\n<function token>\n<function token>\n\n\n@_f\n@_p.types(_cs.GLboolean, _cs.GLenum, _cs.GLuint)\ndef glIsEnablediNV(target, index):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLsizei, arrays.GLintArray)\ndef glScissorArrayvNV(first, count, v):\n pass\n\n\n<function token>\n\n\n@_f\n@_p.types(None, _cs.GLuint, arrays.GLintArray)\ndef glScissorIndexedvNV(index, v):\n pass\n\n\n<function token>\n<function token>\n<function token>\n",
"<docstring token>\n<import token>\n<assignment token>\n<function token>\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\n@_f\n@_p.types(_cs.GLboolean, _cs.GLenum, _cs.GLuint)\ndef glIsEnablediNV(target, index):\n pass\n\n\n@_f\n@_p.types(None, _cs.GLuint, _cs.GLsizei, arrays.GLintArray)\ndef glScissorArrayvNV(first, count, v):\n pass\n\n\n<function token>\n\n\n@_f\n@_p.types(None, _cs.GLuint, arrays.GLintArray)\ndef glScissorIndexedvNV(index, v):\n pass\n\n\n<function token>\n<function token>\n<function token>\n",
"<docstring token>\n<import token>\n<assignment token>\n<function token>\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\n@_f\n@_p.types(_cs.GLboolean, _cs.GLenum, _cs.GLuint)\ndef glIsEnablediNV(target, index):\n pass\n\n\n<function token>\n<function token>\n\n\n@_f\n@_p.types(None, _cs.GLuint, arrays.GLintArray)\ndef glScissorIndexedvNV(index, v):\n pass\n\n\n<function token>\n<function token>\n<function token>\n",
"<docstring token>\n<import token>\n<assignment token>\n<function token>\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\n@_f\n@_p.types(None, _cs.GLuint, arrays.GLintArray)\ndef glScissorIndexedvNV(index, v):\n pass\n\n\n<function token>\n<function token>\n<function token>\n",
"<docstring token>\n<import token>\n<assignment token>\n<function token>\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n"
] | false |
835 |
77d7fb49ed4c3e78b148cd446e9a5c6a0e6fac8b
|
#GUIcal.py
from tkinter import *
from tkinter import ttk
import math
GUI=Tk()
GUI.title('My Cal Program')
GUI.geometry('500x500')
def calc():
height=v_height.get()
base=v_base.get()#ดึงค่ามาจากv_base
print(f'height is {height}')
print(f'Basal length is {base}')
length= math.isqrt((height*height)+(base*base))
print('Lenght is {:.2f}'.format(length))
###For attach picture
'''
IMG=PhotoImage(file='pythagorus-theorem.png').subsample(3)
IM1=Label(GUI,image=IMG)
IM1.pack()
'''
v_height=IntVar()
v_base=IntVar()
L1=Label(text='Please input height',foreground='red',font=('Angsana New',15))
L1.pack()
E1=ttk.Entry(GUI,textvariable=v_height)
E1.pack(pady=8,ipady=7,ipadx=17)
L2=Label(text='Please input basal length',foreground='red',font=('Angsana New',15))
L2.pack()
E2=ttk.Entry(GUI,textvariable=v_base)
E2.pack(pady=8,ipady=7,ipadx=17)
B1=ttk.Button(text='Calculate',command=calc)
B1.pack()
v_result=StringVar()
v_result.set('----Result----')
Result=ttk.Label(GUI,textvariable=v_result,foreground='green',font=('Angsana New',15))
Result.pack()
GUI.mainloop()
|
[
"#GUIcal.py\r\nfrom tkinter import *\r\nfrom tkinter import ttk\r\nimport math\r\n\r\nGUI=Tk()\r\nGUI.title('My Cal Program')\r\nGUI.geometry('500x500')\r\n\r\ndef calc():\r\n\theight=v_height.get()\r\n\tbase=v_base.get()#ดึงค่ามาจากv_base\r\n\tprint(f'height is {height}')\r\n\tprint(f'Basal length is {base}')\r\n\tlength= math.isqrt((height*height)+(base*base))\r\n\tprint('Lenght is {:.2f}'.format(length))\r\n\t\r\n###For attach picture\r\n'''\r\nIMG=PhotoImage(file='pythagorus-theorem.png').subsample(3)\r\nIM1=Label(GUI,image=IMG)\r\nIM1.pack()\r\n'''\r\nv_height=IntVar()\r\nv_base=IntVar()\r\n\r\nL1=Label(text='Please input height',foreground='red',font=('Angsana New',15))\r\nL1.pack()\r\nE1=ttk.Entry(GUI,textvariable=v_height)\r\nE1.pack(pady=8,ipady=7,ipadx=17)\r\n\r\n\r\nL2=Label(text='Please input basal length',foreground='red',font=('Angsana New',15))\r\nL2.pack()\r\nE2=ttk.Entry(GUI,textvariable=v_base)\r\nE2.pack(pady=8,ipady=7,ipadx=17)\r\n\r\n\r\nB1=ttk.Button(text='Calculate',command=calc)\r\nB1.pack()\r\n\r\nv_result=StringVar()\r\nv_result.set('----Result----')\r\nResult=ttk.Label(GUI,textvariable=v_result,foreground='green',font=('Angsana New',15))\r\nResult.pack()\r\n\r\nGUI.mainloop()\r\n",
"from tkinter import *\nfrom tkinter import ttk\nimport math\nGUI = Tk()\nGUI.title('My Cal Program')\nGUI.geometry('500x500')\n\n\ndef calc():\n height = v_height.get()\n base = v_base.get()\n print(f'height is {height}')\n print(f'Basal length is {base}')\n length = math.isqrt(height * height + base * base)\n print('Lenght is {:.2f}'.format(length))\n\n\n<docstring token>\nv_height = IntVar()\nv_base = IntVar()\nL1 = Label(text='Please input height', foreground='red', font=(\n 'Angsana New', 15))\nL1.pack()\nE1 = ttk.Entry(GUI, textvariable=v_height)\nE1.pack(pady=8, ipady=7, ipadx=17)\nL2 = Label(text='Please input basal length', foreground='red', font=(\n 'Angsana New', 15))\nL2.pack()\nE2 = ttk.Entry(GUI, textvariable=v_base)\nE2.pack(pady=8, ipady=7, ipadx=17)\nB1 = ttk.Button(text='Calculate', command=calc)\nB1.pack()\nv_result = StringVar()\nv_result.set('----Result----')\nResult = ttk.Label(GUI, textvariable=v_result, foreground='green', font=(\n 'Angsana New', 15))\nResult.pack()\nGUI.mainloop()\n",
"<import token>\nGUI = Tk()\nGUI.title('My Cal Program')\nGUI.geometry('500x500')\n\n\ndef calc():\n height = v_height.get()\n base = v_base.get()\n print(f'height is {height}')\n print(f'Basal length is {base}')\n length = math.isqrt(height * height + base * base)\n print('Lenght is {:.2f}'.format(length))\n\n\n<docstring token>\nv_height = IntVar()\nv_base = IntVar()\nL1 = Label(text='Please input height', foreground='red', font=(\n 'Angsana New', 15))\nL1.pack()\nE1 = ttk.Entry(GUI, textvariable=v_height)\nE1.pack(pady=8, ipady=7, ipadx=17)\nL2 = Label(text='Please input basal length', foreground='red', font=(\n 'Angsana New', 15))\nL2.pack()\nE2 = ttk.Entry(GUI, textvariable=v_base)\nE2.pack(pady=8, ipady=7, ipadx=17)\nB1 = ttk.Button(text='Calculate', command=calc)\nB1.pack()\nv_result = StringVar()\nv_result.set('----Result----')\nResult = ttk.Label(GUI, textvariable=v_result, foreground='green', font=(\n 'Angsana New', 15))\nResult.pack()\nGUI.mainloop()\n",
"<import token>\n<assignment token>\nGUI.title('My Cal Program')\nGUI.geometry('500x500')\n\n\ndef calc():\n height = v_height.get()\n base = v_base.get()\n print(f'height is {height}')\n print(f'Basal length is {base}')\n length = math.isqrt(height * height + base * base)\n print('Lenght is {:.2f}'.format(length))\n\n\n<docstring token>\n<assignment token>\nL1.pack()\n<assignment token>\nE1.pack(pady=8, ipady=7, ipadx=17)\n<assignment token>\nL2.pack()\n<assignment token>\nE2.pack(pady=8, ipady=7, ipadx=17)\n<assignment token>\nB1.pack()\n<assignment token>\nv_result.set('----Result----')\n<assignment token>\nResult.pack()\nGUI.mainloop()\n",
"<import token>\n<assignment token>\n<code token>\n\n\ndef calc():\n height = v_height.get()\n base = v_base.get()\n print(f'height is {height}')\n print(f'Basal length is {base}')\n length = math.isqrt(height * height + base * base)\n print('Lenght is {:.2f}'.format(length))\n\n\n<docstring token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n<assignment token>\n<code token>\n<function token>\n<docstring token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n"
] | false |
836 |
63069f03d17862b8ea6aa74d0acd1370bbea0dcb
|
import os
import xml.etree.ElementTree as Et
import copy
from .common import CommonRouteExchangeService
class DataRoutes(CommonRouteExchangeService):
"""Класс для работы с данными аршрутов"""
def get_route_from_file(self, path_route):
"""Считывание маршрута из файла
:param path_route: Путь до маршрута в формате XML
:return: ElementTree
"""
path_file = os.path.join(os.getcwd(), path_route)
return Et.parse(path_file)
def change_uvid_in_route(self, tree_route, uvid):
"""Замена UVID в маршруте
:param tree_route: Маршрут в формате XML
:param uvid: UVID
:return: ElementTree
"""
tree_route_copy = copy.deepcopy(tree_route)
root = tree_route_copy.getroot()
root.find('.//*[@vesselVoyage]').attrib.update({'vesselVoyage': uvid})
return tree_route_copy
def change_status_in_route(self, tree_route, status):
"""Замена статуса маршрута в маршруте
:param tree_route: Маршрут в формате XML
:param status: Cтатус маршрута 1 - ORIGINAL
2 - PLANNED_FOR_VOYAGE
3 - OPTIMIZED
4 - CROSS_CHECKED
5 - SAFETY_CHECKED
6 - APPROVED
7 - USED_FOR_MONITORING
8 - INACTIVE
:return: ElementTree
"""
tree_route_copy = copy.deepcopy(tree_route)
root = tree_route_copy.getroot()
root.find('.//*[@routeStatus]').attrib.update({'routeStatus': str(status)})
return tree_route_copy
def change_route_name_in_route(self, tree_route, route_name):
"""Замена routeName в маршруте
:param tree_route: Маршрут в формате XML
:param route_name: Имя маршрута
:return: ElementTree
"""
tree_route_copy = copy.deepcopy(tree_route)
root = tree_route_copy.getroot()
root.find('.//*[@routeName]').attrib.update({'routeName': route_name})
return tree_route_copy
def convert_route_to_str(self, tree_route):
return Et.tostring(tree_route.getroot(), encoding='UTF-8')
|
[
"import os\nimport xml.etree.ElementTree as Et\nimport copy\n\nfrom .common import CommonRouteExchangeService\n\n\nclass DataRoutes(CommonRouteExchangeService):\n \"\"\"Класс для работы с данными аршрутов\"\"\"\n\n def get_route_from_file(self, path_route):\n \"\"\"Считывание маршрута из файла\n :param path_route: Путь до маршрута в формате XML\n :return: ElementTree\n \"\"\"\n path_file = os.path.join(os.getcwd(), path_route)\n return Et.parse(path_file)\n\n def change_uvid_in_route(self, tree_route, uvid):\n \"\"\"Замена UVID в маршруте\n :param tree_route: Маршрут в формате XML\n :param uvid: UVID\n :return: ElementTree\n \"\"\"\n tree_route_copy = copy.deepcopy(tree_route)\n root = tree_route_copy.getroot()\n root.find('.//*[@vesselVoyage]').attrib.update({'vesselVoyage': uvid})\n return tree_route_copy\n\n def change_status_in_route(self, tree_route, status):\n \"\"\"Замена статуса маршрута в маршруте\n :param tree_route: Маршрут в формате XML\n :param status: Cтатус маршрута 1 - ORIGINAL\n 2 - PLANNED_FOR_VOYAGE\n 3 - OPTIMIZED\n 4 - CROSS_CHECKED\n 5 - SAFETY_CHECKED\n 6 - APPROVED\n 7 - USED_FOR_MONITORING\n 8 - INACTIVE\n :return: ElementTree\n \"\"\"\n tree_route_copy = copy.deepcopy(tree_route)\n root = tree_route_copy.getroot()\n root.find('.//*[@routeStatus]').attrib.update({'routeStatus': str(status)})\n return tree_route_copy\n\n def change_route_name_in_route(self, tree_route, route_name):\n \"\"\"Замена routeName в маршруте\n :param tree_route: Маршрут в формате XML\n :param route_name: Имя маршрута\n :return: ElementTree\n \"\"\"\n tree_route_copy = copy.deepcopy(tree_route)\n root = tree_route_copy.getroot()\n root.find('.//*[@routeName]').attrib.update({'routeName': route_name})\n return tree_route_copy\n\n def convert_route_to_str(self, tree_route):\n return Et.tostring(tree_route.getroot(), encoding='UTF-8')",
"import os\nimport xml.etree.ElementTree as Et\nimport copy\nfrom .common import CommonRouteExchangeService\n\n\nclass DataRoutes(CommonRouteExchangeService):\n \"\"\"Класс для работы с данными аршрутов\"\"\"\n\n def get_route_from_file(self, path_route):\n \"\"\"Считывание маршрута из файла\n :param path_route: Путь до маршрута в формате XML\n :return: ElementTree\n \"\"\"\n path_file = os.path.join(os.getcwd(), path_route)\n return Et.parse(path_file)\n\n def change_uvid_in_route(self, tree_route, uvid):\n \"\"\"Замена UVID в маршруте\n :param tree_route: Маршрут в формате XML\n :param uvid: UVID\n :return: ElementTree\n \"\"\"\n tree_route_copy = copy.deepcopy(tree_route)\n root = tree_route_copy.getroot()\n root.find('.//*[@vesselVoyage]').attrib.update({'vesselVoyage': uvid})\n return tree_route_copy\n\n def change_status_in_route(self, tree_route, status):\n \"\"\"Замена статуса маршрута в маршруте\n :param tree_route: Маршрут в формате XML\n :param status: Cтатус маршрута 1 - ORIGINAL\n 2 - PLANNED_FOR_VOYAGE\n 3 - OPTIMIZED\n 4 - CROSS_CHECKED\n 5 - SAFETY_CHECKED\n 6 - APPROVED\n 7 - USED_FOR_MONITORING\n 8 - INACTIVE\n :return: ElementTree\n \"\"\"\n tree_route_copy = copy.deepcopy(tree_route)\n root = tree_route_copy.getroot()\n root.find('.//*[@routeStatus]').attrib.update({'routeStatus': str(\n status)})\n return tree_route_copy\n\n def change_route_name_in_route(self, tree_route, route_name):\n \"\"\"Замена routeName в маршруте\n :param tree_route: Маршрут в формате XML\n :param route_name: Имя маршрута\n :return: ElementTree\n \"\"\"\n tree_route_copy = copy.deepcopy(tree_route)\n root = tree_route_copy.getroot()\n root.find('.//*[@routeName]').attrib.update({'routeName': route_name})\n return tree_route_copy\n\n def convert_route_to_str(self, tree_route):\n return Et.tostring(tree_route.getroot(), encoding='UTF-8')\n",
"<import token>\n\n\nclass DataRoutes(CommonRouteExchangeService):\n \"\"\"Класс для работы с данными аршрутов\"\"\"\n\n def get_route_from_file(self, path_route):\n \"\"\"Считывание маршрута из файла\n :param path_route: Путь до маршрута в формате XML\n :return: ElementTree\n \"\"\"\n path_file = os.path.join(os.getcwd(), path_route)\n return Et.parse(path_file)\n\n def change_uvid_in_route(self, tree_route, uvid):\n \"\"\"Замена UVID в маршруте\n :param tree_route: Маршрут в формате XML\n :param uvid: UVID\n :return: ElementTree\n \"\"\"\n tree_route_copy = copy.deepcopy(tree_route)\n root = tree_route_copy.getroot()\n root.find('.//*[@vesselVoyage]').attrib.update({'vesselVoyage': uvid})\n return tree_route_copy\n\n def change_status_in_route(self, tree_route, status):\n \"\"\"Замена статуса маршрута в маршруте\n :param tree_route: Маршрут в формате XML\n :param status: Cтатус маршрута 1 - ORIGINAL\n 2 - PLANNED_FOR_VOYAGE\n 3 - OPTIMIZED\n 4 - CROSS_CHECKED\n 5 - SAFETY_CHECKED\n 6 - APPROVED\n 7 - USED_FOR_MONITORING\n 8 - INACTIVE\n :return: ElementTree\n \"\"\"\n tree_route_copy = copy.deepcopy(tree_route)\n root = tree_route_copy.getroot()\n root.find('.//*[@routeStatus]').attrib.update({'routeStatus': str(\n status)})\n return tree_route_copy\n\n def change_route_name_in_route(self, tree_route, route_name):\n \"\"\"Замена routeName в маршруте\n :param tree_route: Маршрут в формате XML\n :param route_name: Имя маршрута\n :return: ElementTree\n \"\"\"\n tree_route_copy = copy.deepcopy(tree_route)\n root = tree_route_copy.getroot()\n root.find('.//*[@routeName]').attrib.update({'routeName': route_name})\n return tree_route_copy\n\n def convert_route_to_str(self, tree_route):\n return Et.tostring(tree_route.getroot(), encoding='UTF-8')\n",
"<import token>\n\n\nclass DataRoutes(CommonRouteExchangeService):\n <docstring token>\n\n def get_route_from_file(self, path_route):\n \"\"\"Считывание маршрута из файла\n :param path_route: Путь до маршрута в формате XML\n :return: ElementTree\n \"\"\"\n path_file = os.path.join(os.getcwd(), path_route)\n return Et.parse(path_file)\n\n def change_uvid_in_route(self, tree_route, uvid):\n \"\"\"Замена UVID в маршруте\n :param tree_route: Маршрут в формате XML\n :param uvid: UVID\n :return: ElementTree\n \"\"\"\n tree_route_copy = copy.deepcopy(tree_route)\n root = tree_route_copy.getroot()\n root.find('.//*[@vesselVoyage]').attrib.update({'vesselVoyage': uvid})\n return tree_route_copy\n\n def change_status_in_route(self, tree_route, status):\n \"\"\"Замена статуса маршрута в маршруте\n :param tree_route: Маршрут в формате XML\n :param status: Cтатус маршрута 1 - ORIGINAL\n 2 - PLANNED_FOR_VOYAGE\n 3 - OPTIMIZED\n 4 - CROSS_CHECKED\n 5 - SAFETY_CHECKED\n 6 - APPROVED\n 7 - USED_FOR_MONITORING\n 8 - INACTIVE\n :return: ElementTree\n \"\"\"\n tree_route_copy = copy.deepcopy(tree_route)\n root = tree_route_copy.getroot()\n root.find('.//*[@routeStatus]').attrib.update({'routeStatus': str(\n status)})\n return tree_route_copy\n\n def change_route_name_in_route(self, tree_route, route_name):\n \"\"\"Замена routeName в маршруте\n :param tree_route: Маршрут в формате XML\n :param route_name: Имя маршрута\n :return: ElementTree\n \"\"\"\n tree_route_copy = copy.deepcopy(tree_route)\n root = tree_route_copy.getroot()\n root.find('.//*[@routeName]').attrib.update({'routeName': route_name})\n return tree_route_copy\n\n def convert_route_to_str(self, tree_route):\n return Et.tostring(tree_route.getroot(), encoding='UTF-8')\n",
"<import token>\n\n\nclass DataRoutes(CommonRouteExchangeService):\n <docstring token>\n\n def get_route_from_file(self, path_route):\n \"\"\"Считывание маршрута из файла\n :param path_route: Путь до маршрута в формате XML\n :return: ElementTree\n \"\"\"\n path_file = os.path.join(os.getcwd(), path_route)\n return Et.parse(path_file)\n\n def change_uvid_in_route(self, tree_route, uvid):\n \"\"\"Замена UVID в маршруте\n :param tree_route: Маршрут в формате XML\n :param uvid: UVID\n :return: ElementTree\n \"\"\"\n tree_route_copy = copy.deepcopy(tree_route)\n root = tree_route_copy.getroot()\n root.find('.//*[@vesselVoyage]').attrib.update({'vesselVoyage': uvid})\n return tree_route_copy\n <function token>\n\n def change_route_name_in_route(self, tree_route, route_name):\n \"\"\"Замена routeName в маршруте\n :param tree_route: Маршрут в формате XML\n :param route_name: Имя маршрута\n :return: ElementTree\n \"\"\"\n tree_route_copy = copy.deepcopy(tree_route)\n root = tree_route_copy.getroot()\n root.find('.//*[@routeName]').attrib.update({'routeName': route_name})\n return tree_route_copy\n\n def convert_route_to_str(self, tree_route):\n return Et.tostring(tree_route.getroot(), encoding='UTF-8')\n",
"<import token>\n\n\nclass DataRoutes(CommonRouteExchangeService):\n <docstring token>\n <function token>\n\n def change_uvid_in_route(self, tree_route, uvid):\n \"\"\"Замена UVID в маршруте\n :param tree_route: Маршрут в формате XML\n :param uvid: UVID\n :return: ElementTree\n \"\"\"\n tree_route_copy = copy.deepcopy(tree_route)\n root = tree_route_copy.getroot()\n root.find('.//*[@vesselVoyage]').attrib.update({'vesselVoyage': uvid})\n return tree_route_copy\n <function token>\n\n def change_route_name_in_route(self, tree_route, route_name):\n \"\"\"Замена routeName в маршруте\n :param tree_route: Маршрут в формате XML\n :param route_name: Имя маршрута\n :return: ElementTree\n \"\"\"\n tree_route_copy = copy.deepcopy(tree_route)\n root = tree_route_copy.getroot()\n root.find('.//*[@routeName]').attrib.update({'routeName': route_name})\n return tree_route_copy\n\n def convert_route_to_str(self, tree_route):\n return Et.tostring(tree_route.getroot(), encoding='UTF-8')\n",
"<import token>\n\n\nclass DataRoutes(CommonRouteExchangeService):\n <docstring token>\n <function token>\n\n def change_uvid_in_route(self, tree_route, uvid):\n \"\"\"Замена UVID в маршруте\n :param tree_route: Маршрут в формате XML\n :param uvid: UVID\n :return: ElementTree\n \"\"\"\n tree_route_copy = copy.deepcopy(tree_route)\n root = tree_route_copy.getroot()\n root.find('.//*[@vesselVoyage]').attrib.update({'vesselVoyage': uvid})\n return tree_route_copy\n <function token>\n\n def change_route_name_in_route(self, tree_route, route_name):\n \"\"\"Замена routeName в маршруте\n :param tree_route: Маршрут в формате XML\n :param route_name: Имя маршрута\n :return: ElementTree\n \"\"\"\n tree_route_copy = copy.deepcopy(tree_route)\n root = tree_route_copy.getroot()\n root.find('.//*[@routeName]').attrib.update({'routeName': route_name})\n return tree_route_copy\n <function token>\n",
"<import token>\n\n\nclass DataRoutes(CommonRouteExchangeService):\n <docstring token>\n <function token>\n\n def change_uvid_in_route(self, tree_route, uvid):\n \"\"\"Замена UVID в маршруте\n :param tree_route: Маршрут в формате XML\n :param uvid: UVID\n :return: ElementTree\n \"\"\"\n tree_route_copy = copy.deepcopy(tree_route)\n root = tree_route_copy.getroot()\n root.find('.//*[@vesselVoyage]').attrib.update({'vesselVoyage': uvid})\n return tree_route_copy\n <function token>\n <function token>\n <function token>\n",
"<import token>\n\n\nclass DataRoutes(CommonRouteExchangeService):\n <docstring token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n<class token>\n"
] | false |
837 |
41eef711c79fb084c9780b6d2638d863266e569d
|
import random
responses = ['Seems so','Never','Untrue','Always no matter what','You decide your fate','Not sure','Yep','Nope','Maybe','Nein','Qui','Ask the person next to you','That question is not for me']
def answer():
question = input('Ask me anything: ')
print(random.choice(responses))
answer()
secondQuestion = (input('Another question? Yes/No: '))
while secondQuestion == str('Yes'):
answer()
secondQuestion = (input('Another question? Yes/No: '))
else:
print('Thank you for asking the wise magic 8 ball')
|
[
"import random\nresponses = ['Seems so','Never','Untrue','Always no matter what','You decide your fate','Not sure','Yep','Nope','Maybe','Nein','Qui','Ask the person next to you','That question is not for me']\n\ndef answer():\n question = input('Ask me anything: ')\n print(random.choice(responses))\nanswer()\n\nsecondQuestion = (input('Another question? Yes/No: '))\nwhile secondQuestion == str('Yes'):\n answer()\n secondQuestion = (input('Another question? Yes/No: '))\n \nelse: \n print('Thank you for asking the wise magic 8 ball')\n",
"import random\nresponses = ['Seems so', 'Never', 'Untrue', 'Always no matter what',\n 'You decide your fate', 'Not sure', 'Yep', 'Nope', 'Maybe', 'Nein',\n 'Qui', 'Ask the person next to you', 'That question is not for me']\n\n\ndef answer():\n question = input('Ask me anything: ')\n print(random.choice(responses))\n\n\nanswer()\nsecondQuestion = input('Another question? Yes/No: ')\nwhile secondQuestion == str('Yes'):\n answer()\n secondQuestion = input('Another question? Yes/No: ')\nelse:\n print('Thank you for asking the wise magic 8 ball')\n",
"<import token>\nresponses = ['Seems so', 'Never', 'Untrue', 'Always no matter what',\n 'You decide your fate', 'Not sure', 'Yep', 'Nope', 'Maybe', 'Nein',\n 'Qui', 'Ask the person next to you', 'That question is not for me']\n\n\ndef answer():\n question = input('Ask me anything: ')\n print(random.choice(responses))\n\n\nanswer()\nsecondQuestion = input('Another question? Yes/No: ')\nwhile secondQuestion == str('Yes'):\n answer()\n secondQuestion = input('Another question? Yes/No: ')\nelse:\n print('Thank you for asking the wise magic 8 ball')\n",
"<import token>\n<assignment token>\n\n\ndef answer():\n question = input('Ask me anything: ')\n print(random.choice(responses))\n\n\nanswer()\n<assignment token>\nwhile secondQuestion == str('Yes'):\n answer()\n secondQuestion = input('Another question? Yes/No: ')\nelse:\n print('Thank you for asking the wise magic 8 ball')\n",
"<import token>\n<assignment token>\n\n\ndef answer():\n question = input('Ask me anything: ')\n print(random.choice(responses))\n\n\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n<assignment token>\n<function token>\n<code token>\n<assignment token>\n<code token>\n"
] | false |
838 |
f4bc5663ab2b2a6dbb41a2fc3d7ca67100b455a4
|
# Compute grid scores using the new dataset format
import matplotlib
import os
# allow code to work on machines without a display or in a screen session
display = os.environ.get('DISPLAY')
if display is None or 'localhost' in display:
matplotlib.use('agg')
import argparse
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from datasets import train_test_loaders, angular_train_test_loaders, tf_train_test_loaders, load_from_cache
from models import SSPPathIntegrationModel
from datetime import datetime
from tensorboardX import SummaryWriter
import json
from spatial_semantic_pointers.utils import get_heatmap_vectors, ssp_to_loc, ssp_to_loc_v
from spatial_semantic_pointers.plots import plot_predictions, plot_predictions_v
import matplotlib.pyplot as plt
from path_integration_utils import pc_to_loc_v, encoding_func_from_model, pc_gauss_encoding_func, ssp_encoding_func, \
hd_gauss_encoding_func, hex_trig_encoding_func
from ssp_navigation.utils.encodings import get_encoding_function
import grid_scoring.scores as scores
import grid_scoring.utils as utils
# from grid_scoring.run_network import run_and_gather_activations, run_and_gather_localization_activations
from path_integration_utils import encoding_func_from_model, pc_gauss_encoding_func
parser = argparse.ArgumentParser('Compute grid scores for a path integration model')
parser.add_argument('--n-samples', type=int, default=5000)
parser.add_argument('--use-localization', action='store_true')
# TODO: use these parameters
parser.add_argument('--dataset', type=str, default='')
parser.add_argument('--model', type=str, default='')
parser.add_argument('--fname-prefix', type=str, default='sac')
parser.add_argument('--spatial-encoding', type=str, default='ssp',
choices=[
'ssp', 'hex-ssp', 'periodic-hex-ssp', 'grid-ssp', 'ind-ssp', 'orth-proj-ssp',
'rec-ssp', 'rec-hex-ssp', 'rec-ind-ssp', 'sub-toroid-ssp', 'var-sub-toroid-ssp',
'random', '2d', '2d-normalized', 'one-hot', 'hex-trig',
'trig', 'random-trig', 'random-rotated-trig', 'random-proj', 'legendre',
'learned', 'learned-normalized', 'frozen-learned', 'frozen-learned-normalized',
'pc-gauss', 'pc-dog', 'tile-coding'
])
# choices=['ssp', '2d', 'frozen-learned', 'pc-gauss', 'pc-dog', 'pc-gauss-softmax', 'hex-trig', 'hex-trig-all-freq'])
parser.add_argument('--frozen-model', type=str, default='', help='model to use frozen encoding weights from')
parser.add_argument('--pc-gauss-sigma', type=float, default=0.25)
parser.add_argument('--pc-diff-sigma', type=float, default=0.5)
parser.add_argument('--hex-freq-coef', type=float, default=2.5, help='constant to scale frequencies by')
parser.add_argument('--n-tiles', type=int, default=8, help='number of layers for tile coding')
parser.add_argument('--n-bins', type=int, default=8, help='number of bins for tile coding')
parser.add_argument('--ssp-scaling', type=float, default=1.0)
parser.add_argument('--grid-ssp-min', type=float, default=0.25, help='minimum plane wave scale')
parser.add_argument('--grid-ssp-max', type=float, default=2.0, help='maximum plane wave scale')
parser.add_argument('--phi', type=float, default=0.5, help='phi as a fraction of pi for orth-proj-ssp')
parser.add_argument('--n-proj', type=int, default=3, help='projection dimension for sub toroids')
parser.add_argument('--scale-ratio', type=float, default=0, help='ratio between sub toroid scales')
parser.add_argument('--hilbert-points', type=int, default=1, choices=[0, 1, 2, 3],
help='pc centers. 0: random uniform. 1: hilbert curve. 2: evenly spaced grid. 3: hex grid')
parser.add_argument('--seed', type=int, default=13)
parser.add_argument('--dropout-p', type=float, default=0.5)
parser.add_argument('--dim', type=int, default=512)
parser.add_argument('--train-split', type=float, default=0.8, help='Training fraction of the train/test split')
parser.add_argument('--allow-cache', action='store_true',
help='once the dataset has been generated, it will be saved to a file to be loaded faster')
parser.add_argument('--trajectory-length', type=int, default=100)
parser.add_argument('--minibatch-size', type=int, default=10)
parser.add_argument('--n-image-bins', type=int, default=20)
parser.add_argument('--n-hd-cells', type=int, default=0, help='If non-zero, use linear and angular velocity as well as HD cell output')
parser.add_argument('--sin-cos-ang', type=int, default=1, choices=[0, 1],
help='Use the sin and cos of the angular velocity if angular velocities are used')
parser.add_argument('--use-lmu', action='store_true')
parser.add_argument('--lmu-order', type=int, default=6)
parser.add_argument('--no-cache-load', action='store_true', help='do not load from cache')
args = parser.parse_args()
ssp_scaling = args.ssp_scaling
torch.manual_seed(args.seed)
np.random.seed(args.seed)
data = np.load(args.dataset)
# only used for frozen-learned and other custom encoding functions
# encoding_func = None
limit_low = 0 #* args.ssp_scaling
limit_high = 2.2 #* args.ssp_scaling
res = 128 #256
encoding_func, dim = get_encoding_function(args, limit_low=limit_low, limit_high=limit_high)
xs = np.linspace(limit_low, limit_high, res)
ys = np.linspace(limit_low, limit_high, res)
# FIXME: inefficient but will work for now
heatmap_vectors = np.zeros((len(xs), len(ys), dim))
print("Generating Heatmap Vectors")
for i, x in enumerate(xs):
for j, y in enumerate(ys):
heatmap_vectors[i, j, :] = encoding_func(
# batch dim
# np.array(
# [[x, y]]
# )
# no batch dim
# np.array(
# [x, y]
# )
# new signature
x=x, y=y
)
heatmap_vectors[i, j, :] /= np.linalg.norm(heatmap_vectors[i, j, :])
print("Heatmap Vector Generation Complete")
n_samples = args.n_samples
rollout_length = args.trajectory_length
batch_size = args.minibatch_size
if args.n_hd_cells > 0:
hd_encoding_func = hd_gauss_encoding_func(dim=args.n_hd_cells, sigma=0.25, use_softmax=False, rng=np.random.RandomState(args.seed))
if args.sin_cos_ang:
input_size = 3
else:
input_size = 2
model = SSPPathIntegrationModel(
input_size=input_size, unroll_length=rollout_length,
sp_dim=dim + args.n_hd_cells, dropout_p=args.dropout_p, use_lmu=args.use_lmu, order=args.lmu_order
)
else:
hd_encoding_func = None
model = SSPPathIntegrationModel(
input_size=2, unroll_length=rollout_length,
sp_dim=dim, dropout_p=args.dropout_p, use_lmu=args.use_lmu, order=args.lmu_order
)
# model = SSPPathIntegrationModel(unroll_length=rollout_length, sp_dim=dim, dropout_p=args.dropout_p)
model.load_state_dict(torch.load(args.model), strict=False)
model.eval()
# encoding specific cache string
encoding_specific = ''
if 'ssp' in args.spatial_encoding:
encoding_specific = args.ssp_scaling
elif args.spatial_encoding == 'frozen-learned':
encoding_specific = args.frozen_model
elif args.spatial_encoding == 'pc-gauss' or args.spatial_encoding == 'pc-gauss-softmax':
encoding_specific = args.pc_gauss_sigma
elif args.spatial_encoding == 'pc-dog':
encoding_specific = '{}-{}'.format(args.pc_gauss_sigma, args.pc_diff_sigma)
elif args.spatial_encoding == 'hex-trig':
encoding_specific = args.hex_freq_coef
if 'tf' in args.dataset:
cache_fname = 'dataset_cache/tf_{}_{}_{}_{}_{}_{}.npz'.format(
args.spatial_encoding, args.dim, args.seed, args.n_samples, args.n_hd_cells, encoding_specific
)
else:
cache_fname = 'dataset_cache/{}_{}_{}_{}_{}_{}.npz'.format(
args.spatial_encoding, args.dim, args.seed, args.n_samples, args.n_hd_cells, encoding_specific
)
# if the file exists, load it from cache
if os.path.exists(cache_fname) and not args.no_cache_load:
print("Generating Train and Test Loaders from Cache")
trainloader, testloader = load_from_cache(cache_fname, batch_size=batch_size, n_samples=n_samples)
else:
print("Generating Train and Test Loaders")
if 'tf' in args.dataset:
# tfrecord dataset only supports using the sin and cos of angular velocity
assert args.sin_cos_ang == 1
trainloader, testloader = tf_train_test_loaders(
data,
n_train_samples=n_samples,
n_test_samples=n_samples,
rollout_length=rollout_length,
batch_size=batch_size,
encoding=args.spatial_encoding,
encoding_func=encoding_func,
encoding_dim=args.dim,
train_split=args.train_split,
hd_dim=args.n_hd_cells,
hd_encoding_func=hd_encoding_func,
sin_cos_ang=args.sin_cos_ang,
)
else:
if args.n_hd_cells > 0:
trainloader, testloader = angular_train_test_loaders(
data,
n_train_samples=n_samples,
n_test_samples=n_samples,
rollout_length=rollout_length,
batch_size=batch_size,
encoding=args.spatial_encoding,
encoding_func=encoding_func,
encoding_dim=args.dim,
train_split=args.train_split,
hd_dim=args.n_hd_cells,
hd_encoding_func=hd_encoding_func,
sin_cos_ang=args.sin_cos_ang,
)
else:
trainloader, testloader = train_test_loaders(
data,
n_train_samples=n_samples,
n_test_samples=n_samples,
rollout_length=rollout_length,
batch_size=batch_size,
encoding=args.spatial_encoding,
encoding_func=encoding_func,
encoding_dim=args.dim,
train_split=args.train_split,
)
if args.allow_cache:
if not os.path.exists('dataset_cache'):
os.makedirs('dataset_cache')
np.savez(
cache_fname,
train_velocity_inputs=trainloader.dataset.velocity_inputs,
train_ssp_inputs=trainloader.dataset.ssp_inputs,
train_ssp_outputs=trainloader.dataset.ssp_outputs,
test_velocity_inputs=testloader.dataset.velocity_inputs,
test_ssp_inputs=testloader.dataset.ssp_inputs,
test_ssp_outputs=testloader.dataset.ssp_outputs,
)
print("Train and Test Loaders Generation Complete")
starts = [0.2] * 10
ends = np.linspace(0.4, 1.0, num=10)
masks_parameters = zip(starts, ends.tolist())
latest_epoch_scorer = scores.GridScorer(
nbins=args.n_image_bins,
coords_range=((0, 2.2), (0, 2.2)), # data_reader.get_coord_range(),
mask_parameters=masks_parameters,
)
fname_lstm_pred = '{}_{}samples_lstm_pred.pdf'.format(args.fname_prefix, args.n_samples)
fname_lstm_truth = '{}_{}samples_lstm_truth.pdf'.format(args.fname_prefix, args.n_samples)
fname_dense_pred = '{}_{}samples_dense_pred.pdf'.format(args.fname_prefix, args.n_samples)
fname_dense_truth = '{}_{}samples_dense_truth.pdf'.format(args.fname_prefix, args.n_samples)
# Run and gather activations
print("Testing")
with torch.no_grad():
# Everything is in one batch, so this loop will only happen once
for i, data in enumerate(testloader):
velocity_inputs, ssp_inputs, ssp_outputs = data
ssp_pred, lstm_outputs, dense_outputs = model.forward_activations(velocity_inputs, ssp_inputs)
predictions = np.zeros((ssp_pred.shape[0]*ssp_pred.shape[1], 2))
coords = np.zeros((ssp_pred.shape[0]*ssp_pred.shape[1], 2))
lstm_activations = np.zeros((ssp_pred.shape[0]*ssp_pred.shape[1], model.lstm_hidden_size))
dense_activations = np.zeros((ssp_pred.shape[0] * ssp_pred.shape[1], model.linear_hidden_size))
assert rollout_length == ssp_pred.shape[0]
# # For each neuron, contains the average activity at each spatial bin
# # Computing for both ground truth and predicted location
# rate_maps_pred = np.zeros((model.lstm_hidden_size, len(xs), len(ys)))
# rate_maps_truth = np.zeros((model.lstm_hidden_size, len(xs), len(ys)))
print("Computing predicted locations and true locations")
# Using all data, one chunk at a time
for ri in range(rollout_length):
# trim out head direction info if that was included by only looking up to args.encoding_dim
# computing 'predicted' coordinates, where the agent thinks it is
pred = ssp_pred.detach().numpy()[ri, :, :args.dim]
# pred = pred / pred.sum(axis=1)[:, np.newaxis]
predictions[ri * ssp_pred.shape[1]:(ri + 1) * ssp_pred.shape[1], :] = ssp_to_loc_v(
pred,
heatmap_vectors, xs, ys
)
# computing 'ground truth' coordinates, where the agent should be
coord = ssp_outputs.detach().numpy()[:, ri, :args.dim]
# coord = coord / coord.sum(axis=1)[:, np.newaxis]
coords[ri * ssp_pred.shape[1]:(ri + 1) * ssp_pred.shape[1], :] = ssp_to_loc_v(
coord,
heatmap_vectors, xs, ys
)
# reshaping activations and converting to numpy array
lstm_activations[ri*ssp_pred.shape[1]:(ri+1)*ssp_pred.shape[1], :] = lstm_outputs.detach().numpy()[ri, :, :]
dense_activations[ri * ssp_pred.shape[1]:(ri + 1) * ssp_pred.shape[1], :] = dense_outputs.detach().numpy()[ri, :, :]
# predictions = predictions / args.ssp_scaling
# coords = coords / args.ssp_scaling
print(np.max(predictions))
print(np.min(predictions))
grid_scores_60_pred, grid_scores_90_pred, grid_scores_60_separation_pred, grid_scores_90_separation_pred = utils.get_scores_and_plot(
scorer=latest_epoch_scorer,
data_abs_xy=predictions, #res['pos_xy'],
activations=lstm_activations, #res['bottleneck'],
directory='output_grid_scores', #FLAGS.saver_results_directory,
filename=fname_lstm_pred,
)
grid_scores_60_truth, grid_scores_90_truth, grid_scores_60_separation_truth, grid_scores_90_separation_truth = utils.get_scores_and_plot(
scorer=latest_epoch_scorer,
data_abs_xy=coords, #res['pos_xy'],
activations=lstm_activations, #res['bottleneck'],
directory='output_grid_scores', #FLAGS.saver_results_directory,
filename=fname_lstm_truth,
)
grid_scores_60_dense_pred, grid_scores_90_dense_pred, grid_scores_60_separation_dense_pred, grid_scores_90_separation_dense_pred = utils.get_scores_and_plot(
scorer=latest_epoch_scorer,
data_abs_xy=predictions, #res['pos_xy'],
activations=dense_activations, #res['bottleneck'],
directory='output_grid_scores', #FLAGS.saver_results_directory,
filename=fname_dense_pred,
)
grid_scores_60_dense_truth, grid_scores_90_dense_truth, grid_scores_60_separation_dense_truth, grid_scores_90_separation_dense_truth = utils.get_scores_and_plot(
scorer=latest_epoch_scorer,
data_abs_xy=coords, #res['pos_xy'],
activations=dense_activations, #res['bottleneck'],
directory='output_grid_scores', #FLAGS.saver_results_directory,
filename=fname_dense_truth,
)
print(grid_scores_60_truth, grid_scores_90_truth, grid_scores_60_separation_truth, grid_scores_90_separation_truth)
# Saving to make grid score values easy to compare for different variations
fname = 'output_grid_scores/{}_{}samples.npz'.format(args.fname_prefix, args.n_samples)
np.savez(
fname,
grid_scores_60_pred=grid_scores_60_pred,
grid_scores_90_pred=grid_scores_90_pred,
grid_scores_60_separation_pred=grid_scores_60_separation_pred,
grid_scores_90_separation_pred=grid_scores_90_separation_pred,
grid_scores_60_truth=grid_scores_60_truth,
grid_scores_90_truth=grid_scores_90_truth,
grid_scores_60_separation_truth=grid_scores_60_separation_truth,
grid_scores_90_separation_truth=grid_scores_90_separation_truth,
grid_scores_60_dense_pred=grid_scores_60_dense_pred,
grid_scores_90_dense_pred=grid_scores_90_dense_pred,
grid_scores_60_separation_dense_pred=grid_scores_60_separation_dense_pred,
grid_scores_90_separation_dense_pred=grid_scores_90_separation_dense_pred,
grid_scores_60_dense_truth=grid_scores_60_dense_truth,
grid_scores_90_dense_truth=grid_scores_90_dense_truth,
grid_scores_60_separation_dense_truth=grid_scores_60_separation_dense_truth,
grid_scores_90_separation_dense_truth=grid_scores_90_separation_dense_truth,
)
|
[
"# Compute grid scores using the new dataset format\n\nimport matplotlib\nimport os\n# allow code to work on machines without a display or in a screen session\ndisplay = os.environ.get('DISPLAY')\nif display is None or 'localhost' in display:\n matplotlib.use('agg')\n\nimport argparse\nimport numpy as np\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport torch.optim as optim\nfrom datasets import train_test_loaders, angular_train_test_loaders, tf_train_test_loaders, load_from_cache\nfrom models import SSPPathIntegrationModel\nfrom datetime import datetime\nfrom tensorboardX import SummaryWriter\nimport json\nfrom spatial_semantic_pointers.utils import get_heatmap_vectors, ssp_to_loc, ssp_to_loc_v\nfrom spatial_semantic_pointers.plots import plot_predictions, plot_predictions_v\nimport matplotlib.pyplot as plt\nfrom path_integration_utils import pc_to_loc_v, encoding_func_from_model, pc_gauss_encoding_func, ssp_encoding_func, \\\n hd_gauss_encoding_func, hex_trig_encoding_func\nfrom ssp_navigation.utils.encodings import get_encoding_function\n\nimport grid_scoring.scores as scores\nimport grid_scoring.utils as utils\n# from grid_scoring.run_network import run_and_gather_activations, run_and_gather_localization_activations\nfrom path_integration_utils import encoding_func_from_model, pc_gauss_encoding_func\n\n\nparser = argparse.ArgumentParser('Compute grid scores for a path integration model')\nparser.add_argument('--n-samples', type=int, default=5000)\nparser.add_argument('--use-localization', action='store_true')\n# TODO: use these parameters\nparser.add_argument('--dataset', type=str, default='')\nparser.add_argument('--model', type=str, default='')\nparser.add_argument('--fname-prefix', type=str, default='sac')\n\nparser.add_argument('--spatial-encoding', type=str, default='ssp',\n choices=[\n 'ssp', 'hex-ssp', 'periodic-hex-ssp', 'grid-ssp', 'ind-ssp', 'orth-proj-ssp',\n 'rec-ssp', 'rec-hex-ssp', 'rec-ind-ssp', 'sub-toroid-ssp', 'var-sub-toroid-ssp',\n 'random', '2d', '2d-normalized', 'one-hot', 'hex-trig',\n 'trig', 'random-trig', 'random-rotated-trig', 'random-proj', 'legendre',\n 'learned', 'learned-normalized', 'frozen-learned', 'frozen-learned-normalized',\n 'pc-gauss', 'pc-dog', 'tile-coding'\n ])\n # choices=['ssp', '2d', 'frozen-learned', 'pc-gauss', 'pc-dog', 'pc-gauss-softmax', 'hex-trig', 'hex-trig-all-freq'])\nparser.add_argument('--frozen-model', type=str, default='', help='model to use frozen encoding weights from')\nparser.add_argument('--pc-gauss-sigma', type=float, default=0.25)\nparser.add_argument('--pc-diff-sigma', type=float, default=0.5)\nparser.add_argument('--hex-freq-coef', type=float, default=2.5, help='constant to scale frequencies by')\nparser.add_argument('--n-tiles', type=int, default=8, help='number of layers for tile coding')\nparser.add_argument('--n-bins', type=int, default=8, help='number of bins for tile coding')\nparser.add_argument('--ssp-scaling', type=float, default=1.0)\nparser.add_argument('--grid-ssp-min', type=float, default=0.25, help='minimum plane wave scale')\nparser.add_argument('--grid-ssp-max', type=float, default=2.0, help='maximum plane wave scale')\nparser.add_argument('--phi', type=float, default=0.5, help='phi as a fraction of pi for orth-proj-ssp')\nparser.add_argument('--n-proj', type=int, default=3, help='projection dimension for sub toroids')\nparser.add_argument('--scale-ratio', type=float, default=0, help='ratio between sub toroid scales')\nparser.add_argument('--hilbert-points', type=int, default=1, choices=[0, 1, 2, 3],\n help='pc centers. 0: random uniform. 1: hilbert curve. 2: evenly spaced grid. 3: hex grid')\n\nparser.add_argument('--seed', type=int, default=13)\nparser.add_argument('--dropout-p', type=float, default=0.5)\nparser.add_argument('--dim', type=int, default=512)\nparser.add_argument('--train-split', type=float, default=0.8, help='Training fraction of the train/test split')\nparser.add_argument('--allow-cache', action='store_true',\n help='once the dataset has been generated, it will be saved to a file to be loaded faster')\n\nparser.add_argument('--trajectory-length', type=int, default=100)\nparser.add_argument('--minibatch-size', type=int, default=10)\n\nparser.add_argument('--n-image-bins', type=int, default=20)\n\nparser.add_argument('--n-hd-cells', type=int, default=0, help='If non-zero, use linear and angular velocity as well as HD cell output')\nparser.add_argument('--sin-cos-ang', type=int, default=1, choices=[0, 1],\n help='Use the sin and cos of the angular velocity if angular velocities are used')\nparser.add_argument('--use-lmu', action='store_true')\nparser.add_argument('--lmu-order', type=int, default=6)\n\nparser.add_argument('--no-cache-load', action='store_true', help='do not load from cache')\n\nargs = parser.parse_args()\n\nssp_scaling = args.ssp_scaling\n\ntorch.manual_seed(args.seed)\nnp.random.seed(args.seed)\n\ndata = np.load(args.dataset)\n\n# only used for frozen-learned and other custom encoding functions\n# encoding_func = None\n\nlimit_low = 0 #* args.ssp_scaling\nlimit_high = 2.2 #* args.ssp_scaling\nres = 128 #256\n\nencoding_func, dim = get_encoding_function(args, limit_low=limit_low, limit_high=limit_high)\n\nxs = np.linspace(limit_low, limit_high, res)\nys = np.linspace(limit_low, limit_high, res)\n\n# FIXME: inefficient but will work for now\nheatmap_vectors = np.zeros((len(xs), len(ys), dim))\n\nprint(\"Generating Heatmap Vectors\")\n\nfor i, x in enumerate(xs):\n for j, y in enumerate(ys):\n heatmap_vectors[i, j, :] = encoding_func(\n # batch dim\n # np.array(\n # [[x, y]]\n # )\n # no batch dim\n # np.array(\n # [x, y]\n # )\n # new signature\n x=x, y=y\n )\n\n heatmap_vectors[i, j, :] /= np.linalg.norm(heatmap_vectors[i, j, :])\n\nprint(\"Heatmap Vector Generation Complete\")\n\nn_samples = args.n_samples\nrollout_length = args.trajectory_length\nbatch_size = args.minibatch_size\n\n\nif args.n_hd_cells > 0:\n hd_encoding_func = hd_gauss_encoding_func(dim=args.n_hd_cells, sigma=0.25, use_softmax=False, rng=np.random.RandomState(args.seed))\n if args.sin_cos_ang:\n input_size = 3\n else:\n input_size = 2\n model = SSPPathIntegrationModel(\n input_size=input_size, unroll_length=rollout_length,\n sp_dim=dim + args.n_hd_cells, dropout_p=args.dropout_p, use_lmu=args.use_lmu, order=args.lmu_order\n )\nelse:\n hd_encoding_func = None\n model = SSPPathIntegrationModel(\n input_size=2, unroll_length=rollout_length,\n sp_dim=dim, dropout_p=args.dropout_p, use_lmu=args.use_lmu, order=args.lmu_order\n )\n\n\n# model = SSPPathIntegrationModel(unroll_length=rollout_length, sp_dim=dim, dropout_p=args.dropout_p)\n\nmodel.load_state_dict(torch.load(args.model), strict=False)\n\nmodel.eval()\n\n# encoding specific cache string\nencoding_specific = ''\nif 'ssp' in args.spatial_encoding:\n encoding_specific = args.ssp_scaling\nelif args.spatial_encoding == 'frozen-learned':\n encoding_specific = args.frozen_model\nelif args.spatial_encoding == 'pc-gauss' or args.spatial_encoding == 'pc-gauss-softmax':\n encoding_specific = args.pc_gauss_sigma\nelif args.spatial_encoding == 'pc-dog':\n encoding_specific = '{}-{}'.format(args.pc_gauss_sigma, args.pc_diff_sigma)\nelif args.spatial_encoding == 'hex-trig':\n encoding_specific = args.hex_freq_coef\n\nif 'tf' in args.dataset:\n cache_fname = 'dataset_cache/tf_{}_{}_{}_{}_{}_{}.npz'.format(\n args.spatial_encoding, args.dim, args.seed, args.n_samples, args.n_hd_cells, encoding_specific\n )\nelse:\n cache_fname = 'dataset_cache/{}_{}_{}_{}_{}_{}.npz'.format(\n args.spatial_encoding, args.dim, args.seed, args.n_samples, args.n_hd_cells, encoding_specific\n )\n\n# if the file exists, load it from cache\nif os.path.exists(cache_fname) and not args.no_cache_load:\n print(\"Generating Train and Test Loaders from Cache\")\n trainloader, testloader = load_from_cache(cache_fname, batch_size=batch_size, n_samples=n_samples)\nelse:\n print(\"Generating Train and Test Loaders\")\n\n if 'tf' in args.dataset:\n # tfrecord dataset only supports using the sin and cos of angular velocity\n assert args.sin_cos_ang == 1\n\n trainloader, testloader = tf_train_test_loaders(\n data,\n n_train_samples=n_samples,\n n_test_samples=n_samples,\n rollout_length=rollout_length,\n batch_size=batch_size,\n encoding=args.spatial_encoding,\n encoding_func=encoding_func,\n encoding_dim=args.dim,\n train_split=args.train_split,\n hd_dim=args.n_hd_cells,\n hd_encoding_func=hd_encoding_func,\n sin_cos_ang=args.sin_cos_ang,\n )\n\n else:\n\n if args.n_hd_cells > 0:\n trainloader, testloader = angular_train_test_loaders(\n data,\n n_train_samples=n_samples,\n n_test_samples=n_samples,\n rollout_length=rollout_length,\n batch_size=batch_size,\n encoding=args.spatial_encoding,\n encoding_func=encoding_func,\n encoding_dim=args.dim,\n train_split=args.train_split,\n hd_dim=args.n_hd_cells,\n hd_encoding_func=hd_encoding_func,\n sin_cos_ang=args.sin_cos_ang,\n )\n else:\n trainloader, testloader = train_test_loaders(\n data,\n n_train_samples=n_samples,\n n_test_samples=n_samples,\n rollout_length=rollout_length,\n batch_size=batch_size,\n encoding=args.spatial_encoding,\n encoding_func=encoding_func,\n encoding_dim=args.dim,\n train_split=args.train_split,\n )\n\n if args.allow_cache:\n\n if not os.path.exists('dataset_cache'):\n os.makedirs('dataset_cache')\n\n np.savez(\n cache_fname,\n train_velocity_inputs=trainloader.dataset.velocity_inputs,\n train_ssp_inputs=trainloader.dataset.ssp_inputs,\n train_ssp_outputs=trainloader.dataset.ssp_outputs,\n test_velocity_inputs=testloader.dataset.velocity_inputs,\n test_ssp_inputs=testloader.dataset.ssp_inputs,\n test_ssp_outputs=testloader.dataset.ssp_outputs,\n )\n\nprint(\"Train and Test Loaders Generation Complete\")\n\nstarts = [0.2] * 10\nends = np.linspace(0.4, 1.0, num=10)\nmasks_parameters = zip(starts, ends.tolist())\nlatest_epoch_scorer = scores.GridScorer(\n nbins=args.n_image_bins,\n coords_range=((0, 2.2), (0, 2.2)), # data_reader.get_coord_range(),\n mask_parameters=masks_parameters,\n)\n\n\nfname_lstm_pred = '{}_{}samples_lstm_pred.pdf'.format(args.fname_prefix, args.n_samples)\nfname_lstm_truth = '{}_{}samples_lstm_truth.pdf'.format(args.fname_prefix, args.n_samples)\nfname_dense_pred = '{}_{}samples_dense_pred.pdf'.format(args.fname_prefix, args.n_samples)\nfname_dense_truth = '{}_{}samples_dense_truth.pdf'.format(args.fname_prefix, args.n_samples)\n\n# Run and gather activations\n\nprint(\"Testing\")\nwith torch.no_grad():\n # Everything is in one batch, so this loop will only happen once\n for i, data in enumerate(testloader):\n velocity_inputs, ssp_inputs, ssp_outputs = data\n\n ssp_pred, lstm_outputs, dense_outputs = model.forward_activations(velocity_inputs, ssp_inputs)\n\n predictions = np.zeros((ssp_pred.shape[0]*ssp_pred.shape[1], 2))\n coords = np.zeros((ssp_pred.shape[0]*ssp_pred.shape[1], 2))\n lstm_activations = np.zeros((ssp_pred.shape[0]*ssp_pred.shape[1], model.lstm_hidden_size))\n dense_activations = np.zeros((ssp_pred.shape[0] * ssp_pred.shape[1], model.linear_hidden_size))\n\n assert rollout_length == ssp_pred.shape[0]\n\n # # For each neuron, contains the average activity at each spatial bin\n # # Computing for both ground truth and predicted location\n # rate_maps_pred = np.zeros((model.lstm_hidden_size, len(xs), len(ys)))\n # rate_maps_truth = np.zeros((model.lstm_hidden_size, len(xs), len(ys)))\n\n print(\"Computing predicted locations and true locations\")\n # Using all data, one chunk at a time\n for ri in range(rollout_length):\n\n # trim out head direction info if that was included by only looking up to args.encoding_dim\n\n # computing 'predicted' coordinates, where the agent thinks it is\n pred = ssp_pred.detach().numpy()[ri, :, :args.dim]\n # pred = pred / pred.sum(axis=1)[:, np.newaxis]\n predictions[ri * ssp_pred.shape[1]:(ri + 1) * ssp_pred.shape[1], :] = ssp_to_loc_v(\n pred,\n heatmap_vectors, xs, ys\n )\n\n # computing 'ground truth' coordinates, where the agent should be\n coord = ssp_outputs.detach().numpy()[:, ri, :args.dim]\n # coord = coord / coord.sum(axis=1)[:, np.newaxis]\n coords[ri * ssp_pred.shape[1]:(ri + 1) * ssp_pred.shape[1], :] = ssp_to_loc_v(\n coord,\n heatmap_vectors, xs, ys\n )\n\n # reshaping activations and converting to numpy array\n lstm_activations[ri*ssp_pred.shape[1]:(ri+1)*ssp_pred.shape[1], :] = lstm_outputs.detach().numpy()[ri, :, :]\n dense_activations[ri * ssp_pred.shape[1]:(ri + 1) * ssp_pred.shape[1], :] = dense_outputs.detach().numpy()[ri, :, :]\n\n# predictions = predictions / args.ssp_scaling\n# coords = coords / args.ssp_scaling\n\nprint(np.max(predictions))\nprint(np.min(predictions))\n\ngrid_scores_60_pred, grid_scores_90_pred, grid_scores_60_separation_pred, grid_scores_90_separation_pred = utils.get_scores_and_plot(\n scorer=latest_epoch_scorer,\n data_abs_xy=predictions, #res['pos_xy'],\n activations=lstm_activations, #res['bottleneck'],\n directory='output_grid_scores', #FLAGS.saver_results_directory,\n filename=fname_lstm_pred,\n)\n\ngrid_scores_60_truth, grid_scores_90_truth, grid_scores_60_separation_truth, grid_scores_90_separation_truth = utils.get_scores_and_plot(\n scorer=latest_epoch_scorer,\n data_abs_xy=coords, #res['pos_xy'],\n activations=lstm_activations, #res['bottleneck'],\n directory='output_grid_scores', #FLAGS.saver_results_directory,\n filename=fname_lstm_truth,\n)\n\ngrid_scores_60_dense_pred, grid_scores_90_dense_pred, grid_scores_60_separation_dense_pred, grid_scores_90_separation_dense_pred = utils.get_scores_and_plot(\n scorer=latest_epoch_scorer,\n data_abs_xy=predictions, #res['pos_xy'],\n activations=dense_activations, #res['bottleneck'],\n directory='output_grid_scores', #FLAGS.saver_results_directory,\n filename=fname_dense_pred,\n)\n\ngrid_scores_60_dense_truth, grid_scores_90_dense_truth, grid_scores_60_separation_dense_truth, grid_scores_90_separation_dense_truth = utils.get_scores_and_plot(\n scorer=latest_epoch_scorer,\n data_abs_xy=coords, #res['pos_xy'],\n activations=dense_activations, #res['bottleneck'],\n directory='output_grid_scores', #FLAGS.saver_results_directory,\n filename=fname_dense_truth,\n)\n\n\nprint(grid_scores_60_truth, grid_scores_90_truth, grid_scores_60_separation_truth, grid_scores_90_separation_truth)\n\n# Saving to make grid score values easy to compare for different variations\nfname = 'output_grid_scores/{}_{}samples.npz'.format(args.fname_prefix, args.n_samples)\nnp.savez(\n fname,\n grid_scores_60_pred=grid_scores_60_pred,\n grid_scores_90_pred=grid_scores_90_pred,\n grid_scores_60_separation_pred=grid_scores_60_separation_pred,\n grid_scores_90_separation_pred=grid_scores_90_separation_pred,\n grid_scores_60_truth=grid_scores_60_truth,\n grid_scores_90_truth=grid_scores_90_truth,\n grid_scores_60_separation_truth=grid_scores_60_separation_truth,\n grid_scores_90_separation_truth=grid_scores_90_separation_truth,\n\n grid_scores_60_dense_pred=grid_scores_60_dense_pred,\n grid_scores_90_dense_pred=grid_scores_90_dense_pred,\n grid_scores_60_separation_dense_pred=grid_scores_60_separation_dense_pred,\n grid_scores_90_separation_dense_pred=grid_scores_90_separation_dense_pred,\n grid_scores_60_dense_truth=grid_scores_60_dense_truth,\n grid_scores_90_dense_truth=grid_scores_90_dense_truth,\n grid_scores_60_separation_dense_truth=grid_scores_60_separation_dense_truth,\n grid_scores_90_separation_dense_truth=grid_scores_90_separation_dense_truth,\n)\n",
"import matplotlib\nimport os\ndisplay = os.environ.get('DISPLAY')\nif display is None or 'localhost' in display:\n matplotlib.use('agg')\nimport argparse\nimport numpy as np\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport torch.optim as optim\nfrom datasets import train_test_loaders, angular_train_test_loaders, tf_train_test_loaders, load_from_cache\nfrom models import SSPPathIntegrationModel\nfrom datetime import datetime\nfrom tensorboardX import SummaryWriter\nimport json\nfrom spatial_semantic_pointers.utils import get_heatmap_vectors, ssp_to_loc, ssp_to_loc_v\nfrom spatial_semantic_pointers.plots import plot_predictions, plot_predictions_v\nimport matplotlib.pyplot as plt\nfrom path_integration_utils import pc_to_loc_v, encoding_func_from_model, pc_gauss_encoding_func, ssp_encoding_func, hd_gauss_encoding_func, hex_trig_encoding_func\nfrom ssp_navigation.utils.encodings import get_encoding_function\nimport grid_scoring.scores as scores\nimport grid_scoring.utils as utils\nfrom path_integration_utils import encoding_func_from_model, pc_gauss_encoding_func\nparser = argparse.ArgumentParser(\n 'Compute grid scores for a path integration model')\nparser.add_argument('--n-samples', type=int, default=5000)\nparser.add_argument('--use-localization', action='store_true')\nparser.add_argument('--dataset', type=str, default='')\nparser.add_argument('--model', type=str, default='')\nparser.add_argument('--fname-prefix', type=str, default='sac')\nparser.add_argument('--spatial-encoding', type=str, default='ssp', choices=\n ['ssp', 'hex-ssp', 'periodic-hex-ssp', 'grid-ssp', 'ind-ssp',\n 'orth-proj-ssp', 'rec-ssp', 'rec-hex-ssp', 'rec-ind-ssp',\n 'sub-toroid-ssp', 'var-sub-toroid-ssp', 'random', '2d', '2d-normalized',\n 'one-hot', 'hex-trig', 'trig', 'random-trig', 'random-rotated-trig',\n 'random-proj', 'legendre', 'learned', 'learned-normalized',\n 'frozen-learned', 'frozen-learned-normalized', 'pc-gauss', 'pc-dog',\n 'tile-coding'])\nparser.add_argument('--frozen-model', type=str, default='', help=\n 'model to use frozen encoding weights from')\nparser.add_argument('--pc-gauss-sigma', type=float, default=0.25)\nparser.add_argument('--pc-diff-sigma', type=float, default=0.5)\nparser.add_argument('--hex-freq-coef', type=float, default=2.5, help=\n 'constant to scale frequencies by')\nparser.add_argument('--n-tiles', type=int, default=8, help=\n 'number of layers for tile coding')\nparser.add_argument('--n-bins', type=int, default=8, help=\n 'number of bins for tile coding')\nparser.add_argument('--ssp-scaling', type=float, default=1.0)\nparser.add_argument('--grid-ssp-min', type=float, default=0.25, help=\n 'minimum plane wave scale')\nparser.add_argument('--grid-ssp-max', type=float, default=2.0, help=\n 'maximum plane wave scale')\nparser.add_argument('--phi', type=float, default=0.5, help=\n 'phi as a fraction of pi for orth-proj-ssp')\nparser.add_argument('--n-proj', type=int, default=3, help=\n 'projection dimension for sub toroids')\nparser.add_argument('--scale-ratio', type=float, default=0, help=\n 'ratio between sub toroid scales')\nparser.add_argument('--hilbert-points', type=int, default=1, choices=[0, 1,\n 2, 3], help=\n 'pc centers. 0: random uniform. 1: hilbert curve. 2: evenly spaced grid. 3: hex grid'\n )\nparser.add_argument('--seed', type=int, default=13)\nparser.add_argument('--dropout-p', type=float, default=0.5)\nparser.add_argument('--dim', type=int, default=512)\nparser.add_argument('--train-split', type=float, default=0.8, help=\n 'Training fraction of the train/test split')\nparser.add_argument('--allow-cache', action='store_true', help=\n 'once the dataset has been generated, it will be saved to a file to be loaded faster'\n )\nparser.add_argument('--trajectory-length', type=int, default=100)\nparser.add_argument('--minibatch-size', type=int, default=10)\nparser.add_argument('--n-image-bins', type=int, default=20)\nparser.add_argument('--n-hd-cells', type=int, default=0, help=\n 'If non-zero, use linear and angular velocity as well as HD cell output')\nparser.add_argument('--sin-cos-ang', type=int, default=1, choices=[0, 1],\n help=\n 'Use the sin and cos of the angular velocity if angular velocities are used'\n )\nparser.add_argument('--use-lmu', action='store_true')\nparser.add_argument('--lmu-order', type=int, default=6)\nparser.add_argument('--no-cache-load', action='store_true', help=\n 'do not load from cache')\nargs = parser.parse_args()\nssp_scaling = args.ssp_scaling\ntorch.manual_seed(args.seed)\nnp.random.seed(args.seed)\ndata = np.load(args.dataset)\nlimit_low = 0\nlimit_high = 2.2\nres = 128\nencoding_func, dim = get_encoding_function(args, limit_low=limit_low,\n limit_high=limit_high)\nxs = np.linspace(limit_low, limit_high, res)\nys = np.linspace(limit_low, limit_high, res)\nheatmap_vectors = np.zeros((len(xs), len(ys), dim))\nprint('Generating Heatmap Vectors')\nfor i, x in enumerate(xs):\n for j, y in enumerate(ys):\n heatmap_vectors[i, j, :] = encoding_func(x=x, y=y)\n heatmap_vectors[i, j, :] /= np.linalg.norm(heatmap_vectors[i, j, :])\nprint('Heatmap Vector Generation Complete')\nn_samples = args.n_samples\nrollout_length = args.trajectory_length\nbatch_size = args.minibatch_size\nif args.n_hd_cells > 0:\n hd_encoding_func = hd_gauss_encoding_func(dim=args.n_hd_cells, sigma=\n 0.25, use_softmax=False, rng=np.random.RandomState(args.seed))\n if args.sin_cos_ang:\n input_size = 3\n else:\n input_size = 2\n model = SSPPathIntegrationModel(input_size=input_size, unroll_length=\n rollout_length, sp_dim=dim + args.n_hd_cells, dropout_p=args.\n dropout_p, use_lmu=args.use_lmu, order=args.lmu_order)\nelse:\n hd_encoding_func = None\n model = SSPPathIntegrationModel(input_size=2, unroll_length=\n rollout_length, sp_dim=dim, dropout_p=args.dropout_p, use_lmu=args.\n use_lmu, order=args.lmu_order)\nmodel.load_state_dict(torch.load(args.model), strict=False)\nmodel.eval()\nencoding_specific = ''\nif 'ssp' in args.spatial_encoding:\n encoding_specific = args.ssp_scaling\nelif args.spatial_encoding == 'frozen-learned':\n encoding_specific = args.frozen_model\nelif args.spatial_encoding == 'pc-gauss' or args.spatial_encoding == 'pc-gauss-softmax':\n encoding_specific = args.pc_gauss_sigma\nelif args.spatial_encoding == 'pc-dog':\n encoding_specific = '{}-{}'.format(args.pc_gauss_sigma, args.pc_diff_sigma)\nelif args.spatial_encoding == 'hex-trig':\n encoding_specific = args.hex_freq_coef\nif 'tf' in args.dataset:\n cache_fname = 'dataset_cache/tf_{}_{}_{}_{}_{}_{}.npz'.format(args.\n spatial_encoding, args.dim, args.seed, args.n_samples, args.\n n_hd_cells, encoding_specific)\nelse:\n cache_fname = 'dataset_cache/{}_{}_{}_{}_{}_{}.npz'.format(args.\n spatial_encoding, args.dim, args.seed, args.n_samples, args.\n n_hd_cells, encoding_specific)\nif os.path.exists(cache_fname) and not args.no_cache_load:\n print('Generating Train and Test Loaders from Cache')\n trainloader, testloader = load_from_cache(cache_fname, batch_size=\n batch_size, n_samples=n_samples)\nelse:\n print('Generating Train and Test Loaders')\n if 'tf' in args.dataset:\n assert args.sin_cos_ang == 1\n trainloader, testloader = tf_train_test_loaders(data,\n n_train_samples=n_samples, n_test_samples=n_samples,\n rollout_length=rollout_length, batch_size=batch_size, encoding=\n args.spatial_encoding, encoding_func=encoding_func,\n encoding_dim=args.dim, train_split=args.train_split, hd_dim=\n args.n_hd_cells, hd_encoding_func=hd_encoding_func, sin_cos_ang\n =args.sin_cos_ang)\n elif args.n_hd_cells > 0:\n trainloader, testloader = angular_train_test_loaders(data,\n n_train_samples=n_samples, n_test_samples=n_samples,\n rollout_length=rollout_length, batch_size=batch_size, encoding=\n args.spatial_encoding, encoding_func=encoding_func,\n encoding_dim=args.dim, train_split=args.train_split, hd_dim=\n args.n_hd_cells, hd_encoding_func=hd_encoding_func, sin_cos_ang\n =args.sin_cos_ang)\n else:\n trainloader, testloader = train_test_loaders(data, n_train_samples=\n n_samples, n_test_samples=n_samples, rollout_length=\n rollout_length, batch_size=batch_size, encoding=args.\n spatial_encoding, encoding_func=encoding_func, encoding_dim=\n args.dim, train_split=args.train_split)\n if args.allow_cache:\n if not os.path.exists('dataset_cache'):\n os.makedirs('dataset_cache')\n np.savez(cache_fname, train_velocity_inputs=trainloader.dataset.\n velocity_inputs, train_ssp_inputs=trainloader.dataset.\n ssp_inputs, train_ssp_outputs=trainloader.dataset.ssp_outputs,\n test_velocity_inputs=testloader.dataset.velocity_inputs,\n test_ssp_inputs=testloader.dataset.ssp_inputs, test_ssp_outputs\n =testloader.dataset.ssp_outputs)\nprint('Train and Test Loaders Generation Complete')\nstarts = [0.2] * 10\nends = np.linspace(0.4, 1.0, num=10)\nmasks_parameters = zip(starts, ends.tolist())\nlatest_epoch_scorer = scores.GridScorer(nbins=args.n_image_bins,\n coords_range=((0, 2.2), (0, 2.2)), mask_parameters=masks_parameters)\nfname_lstm_pred = '{}_{}samples_lstm_pred.pdf'.format(args.fname_prefix,\n args.n_samples)\nfname_lstm_truth = '{}_{}samples_lstm_truth.pdf'.format(args.fname_prefix,\n args.n_samples)\nfname_dense_pred = '{}_{}samples_dense_pred.pdf'.format(args.fname_prefix,\n args.n_samples)\nfname_dense_truth = '{}_{}samples_dense_truth.pdf'.format(args.fname_prefix,\n args.n_samples)\nprint('Testing')\nwith torch.no_grad():\n for i, data in enumerate(testloader):\n velocity_inputs, ssp_inputs, ssp_outputs = data\n ssp_pred, lstm_outputs, dense_outputs = model.forward_activations(\n velocity_inputs, ssp_inputs)\n predictions = np.zeros((ssp_pred.shape[0] * ssp_pred.shape[1], 2))\n coords = np.zeros((ssp_pred.shape[0] * ssp_pred.shape[1], 2))\n lstm_activations = np.zeros((ssp_pred.shape[0] * ssp_pred.shape[1],\n model.lstm_hidden_size))\n dense_activations = np.zeros((ssp_pred.shape[0] * ssp_pred.shape[1],\n model.linear_hidden_size))\n assert rollout_length == ssp_pred.shape[0]\n print('Computing predicted locations and true locations')\n for ri in range(rollout_length):\n pred = ssp_pred.detach().numpy()[ri, :, :args.dim]\n predictions[ri * ssp_pred.shape[1]:(ri + 1) * ssp_pred.shape[1], :\n ] = ssp_to_loc_v(pred, heatmap_vectors, xs, ys)\n coord = ssp_outputs.detach().numpy()[:, ri, :args.dim]\n coords[ri * ssp_pred.shape[1]:(ri + 1) * ssp_pred.shape[1], :\n ] = ssp_to_loc_v(coord, heatmap_vectors, xs, ys)\n lstm_activations[ri * ssp_pred.shape[1]:(ri + 1) * ssp_pred.shape[1], :\n ] = lstm_outputs.detach().numpy()[ri, :, :]\n dense_activations[ri * ssp_pred.shape[1]:(ri + 1) * ssp_pred.shape[\n 1], :] = dense_outputs.detach().numpy()[ri, :, :]\nprint(np.max(predictions))\nprint(np.min(predictions))\n(grid_scores_60_pred, grid_scores_90_pred, grid_scores_60_separation_pred,\n grid_scores_90_separation_pred) = (utils.get_scores_and_plot(scorer=\n latest_epoch_scorer, data_abs_xy=predictions, activations=\n lstm_activations, directory='output_grid_scores', filename=fname_lstm_pred)\n )\n(grid_scores_60_truth, grid_scores_90_truth,\n grid_scores_60_separation_truth, grid_scores_90_separation_truth) = (utils\n .get_scores_and_plot(scorer=latest_epoch_scorer, data_abs_xy=coords,\n activations=lstm_activations, directory='output_grid_scores', filename=\n fname_lstm_truth))\n(grid_scores_60_dense_pred, grid_scores_90_dense_pred,\n grid_scores_60_separation_dense_pred, grid_scores_90_separation_dense_pred\n ) = (utils.get_scores_and_plot(scorer=latest_epoch_scorer, data_abs_xy=\n predictions, activations=dense_activations, directory=\n 'output_grid_scores', filename=fname_dense_pred))\n(grid_scores_60_dense_truth, grid_scores_90_dense_truth,\n grid_scores_60_separation_dense_truth,\n grid_scores_90_separation_dense_truth) = (utils.get_scores_and_plot(\n scorer=latest_epoch_scorer, data_abs_xy=coords, activations=\n dense_activations, directory='output_grid_scores', filename=\n fname_dense_truth))\nprint(grid_scores_60_truth, grid_scores_90_truth,\n grid_scores_60_separation_truth, grid_scores_90_separation_truth)\nfname = 'output_grid_scores/{}_{}samples.npz'.format(args.fname_prefix,\n args.n_samples)\nnp.savez(fname, grid_scores_60_pred=grid_scores_60_pred,\n grid_scores_90_pred=grid_scores_90_pred, grid_scores_60_separation_pred\n =grid_scores_60_separation_pred, grid_scores_90_separation_pred=\n grid_scores_90_separation_pred, grid_scores_60_truth=\n grid_scores_60_truth, grid_scores_90_truth=grid_scores_90_truth,\n grid_scores_60_separation_truth=grid_scores_60_separation_truth,\n grid_scores_90_separation_truth=grid_scores_90_separation_truth,\n grid_scores_60_dense_pred=grid_scores_60_dense_pred,\n grid_scores_90_dense_pred=grid_scores_90_dense_pred,\n grid_scores_60_separation_dense_pred=\n grid_scores_60_separation_dense_pred,\n grid_scores_90_separation_dense_pred=\n grid_scores_90_separation_dense_pred, grid_scores_60_dense_truth=\n grid_scores_60_dense_truth, grid_scores_90_dense_truth=\n grid_scores_90_dense_truth, grid_scores_60_separation_dense_truth=\n grid_scores_60_separation_dense_truth,\n grid_scores_90_separation_dense_truth=grid_scores_90_separation_dense_truth\n )\n",
"<import token>\ndisplay = os.environ.get('DISPLAY')\nif display is None or 'localhost' in display:\n matplotlib.use('agg')\n<import token>\nparser = argparse.ArgumentParser(\n 'Compute grid scores for a path integration model')\nparser.add_argument('--n-samples', type=int, default=5000)\nparser.add_argument('--use-localization', action='store_true')\nparser.add_argument('--dataset', type=str, default='')\nparser.add_argument('--model', type=str, default='')\nparser.add_argument('--fname-prefix', type=str, default='sac')\nparser.add_argument('--spatial-encoding', type=str, default='ssp', choices=\n ['ssp', 'hex-ssp', 'periodic-hex-ssp', 'grid-ssp', 'ind-ssp',\n 'orth-proj-ssp', 'rec-ssp', 'rec-hex-ssp', 'rec-ind-ssp',\n 'sub-toroid-ssp', 'var-sub-toroid-ssp', 'random', '2d', '2d-normalized',\n 'one-hot', 'hex-trig', 'trig', 'random-trig', 'random-rotated-trig',\n 'random-proj', 'legendre', 'learned', 'learned-normalized',\n 'frozen-learned', 'frozen-learned-normalized', 'pc-gauss', 'pc-dog',\n 'tile-coding'])\nparser.add_argument('--frozen-model', type=str, default='', help=\n 'model to use frozen encoding weights from')\nparser.add_argument('--pc-gauss-sigma', type=float, default=0.25)\nparser.add_argument('--pc-diff-sigma', type=float, default=0.5)\nparser.add_argument('--hex-freq-coef', type=float, default=2.5, help=\n 'constant to scale frequencies by')\nparser.add_argument('--n-tiles', type=int, default=8, help=\n 'number of layers for tile coding')\nparser.add_argument('--n-bins', type=int, default=8, help=\n 'number of bins for tile coding')\nparser.add_argument('--ssp-scaling', type=float, default=1.0)\nparser.add_argument('--grid-ssp-min', type=float, default=0.25, help=\n 'minimum plane wave scale')\nparser.add_argument('--grid-ssp-max', type=float, default=2.0, help=\n 'maximum plane wave scale')\nparser.add_argument('--phi', type=float, default=0.5, help=\n 'phi as a fraction of pi for orth-proj-ssp')\nparser.add_argument('--n-proj', type=int, default=3, help=\n 'projection dimension for sub toroids')\nparser.add_argument('--scale-ratio', type=float, default=0, help=\n 'ratio between sub toroid scales')\nparser.add_argument('--hilbert-points', type=int, default=1, choices=[0, 1,\n 2, 3], help=\n 'pc centers. 0: random uniform. 1: hilbert curve. 2: evenly spaced grid. 3: hex grid'\n )\nparser.add_argument('--seed', type=int, default=13)\nparser.add_argument('--dropout-p', type=float, default=0.5)\nparser.add_argument('--dim', type=int, default=512)\nparser.add_argument('--train-split', type=float, default=0.8, help=\n 'Training fraction of the train/test split')\nparser.add_argument('--allow-cache', action='store_true', help=\n 'once the dataset has been generated, it will be saved to a file to be loaded faster'\n )\nparser.add_argument('--trajectory-length', type=int, default=100)\nparser.add_argument('--minibatch-size', type=int, default=10)\nparser.add_argument('--n-image-bins', type=int, default=20)\nparser.add_argument('--n-hd-cells', type=int, default=0, help=\n 'If non-zero, use linear and angular velocity as well as HD cell output')\nparser.add_argument('--sin-cos-ang', type=int, default=1, choices=[0, 1],\n help=\n 'Use the sin and cos of the angular velocity if angular velocities are used'\n )\nparser.add_argument('--use-lmu', action='store_true')\nparser.add_argument('--lmu-order', type=int, default=6)\nparser.add_argument('--no-cache-load', action='store_true', help=\n 'do not load from cache')\nargs = parser.parse_args()\nssp_scaling = args.ssp_scaling\ntorch.manual_seed(args.seed)\nnp.random.seed(args.seed)\ndata = np.load(args.dataset)\nlimit_low = 0\nlimit_high = 2.2\nres = 128\nencoding_func, dim = get_encoding_function(args, limit_low=limit_low,\n limit_high=limit_high)\nxs = np.linspace(limit_low, limit_high, res)\nys = np.linspace(limit_low, limit_high, res)\nheatmap_vectors = np.zeros((len(xs), len(ys), dim))\nprint('Generating Heatmap Vectors')\nfor i, x in enumerate(xs):\n for j, y in enumerate(ys):\n heatmap_vectors[i, j, :] = encoding_func(x=x, y=y)\n heatmap_vectors[i, j, :] /= np.linalg.norm(heatmap_vectors[i, j, :])\nprint('Heatmap Vector Generation Complete')\nn_samples = args.n_samples\nrollout_length = args.trajectory_length\nbatch_size = args.minibatch_size\nif args.n_hd_cells > 0:\n hd_encoding_func = hd_gauss_encoding_func(dim=args.n_hd_cells, sigma=\n 0.25, use_softmax=False, rng=np.random.RandomState(args.seed))\n if args.sin_cos_ang:\n input_size = 3\n else:\n input_size = 2\n model = SSPPathIntegrationModel(input_size=input_size, unroll_length=\n rollout_length, sp_dim=dim + args.n_hd_cells, dropout_p=args.\n dropout_p, use_lmu=args.use_lmu, order=args.lmu_order)\nelse:\n hd_encoding_func = None\n model = SSPPathIntegrationModel(input_size=2, unroll_length=\n rollout_length, sp_dim=dim, dropout_p=args.dropout_p, use_lmu=args.\n use_lmu, order=args.lmu_order)\nmodel.load_state_dict(torch.load(args.model), strict=False)\nmodel.eval()\nencoding_specific = ''\nif 'ssp' in args.spatial_encoding:\n encoding_specific = args.ssp_scaling\nelif args.spatial_encoding == 'frozen-learned':\n encoding_specific = args.frozen_model\nelif args.spatial_encoding == 'pc-gauss' or args.spatial_encoding == 'pc-gauss-softmax':\n encoding_specific = args.pc_gauss_sigma\nelif args.spatial_encoding == 'pc-dog':\n encoding_specific = '{}-{}'.format(args.pc_gauss_sigma, args.pc_diff_sigma)\nelif args.spatial_encoding == 'hex-trig':\n encoding_specific = args.hex_freq_coef\nif 'tf' in args.dataset:\n cache_fname = 'dataset_cache/tf_{}_{}_{}_{}_{}_{}.npz'.format(args.\n spatial_encoding, args.dim, args.seed, args.n_samples, args.\n n_hd_cells, encoding_specific)\nelse:\n cache_fname = 'dataset_cache/{}_{}_{}_{}_{}_{}.npz'.format(args.\n spatial_encoding, args.dim, args.seed, args.n_samples, args.\n n_hd_cells, encoding_specific)\nif os.path.exists(cache_fname) and not args.no_cache_load:\n print('Generating Train and Test Loaders from Cache')\n trainloader, testloader = load_from_cache(cache_fname, batch_size=\n batch_size, n_samples=n_samples)\nelse:\n print('Generating Train and Test Loaders')\n if 'tf' in args.dataset:\n assert args.sin_cos_ang == 1\n trainloader, testloader = tf_train_test_loaders(data,\n n_train_samples=n_samples, n_test_samples=n_samples,\n rollout_length=rollout_length, batch_size=batch_size, encoding=\n args.spatial_encoding, encoding_func=encoding_func,\n encoding_dim=args.dim, train_split=args.train_split, hd_dim=\n args.n_hd_cells, hd_encoding_func=hd_encoding_func, sin_cos_ang\n =args.sin_cos_ang)\n elif args.n_hd_cells > 0:\n trainloader, testloader = angular_train_test_loaders(data,\n n_train_samples=n_samples, n_test_samples=n_samples,\n rollout_length=rollout_length, batch_size=batch_size, encoding=\n args.spatial_encoding, encoding_func=encoding_func,\n encoding_dim=args.dim, train_split=args.train_split, hd_dim=\n args.n_hd_cells, hd_encoding_func=hd_encoding_func, sin_cos_ang\n =args.sin_cos_ang)\n else:\n trainloader, testloader = train_test_loaders(data, n_train_samples=\n n_samples, n_test_samples=n_samples, rollout_length=\n rollout_length, batch_size=batch_size, encoding=args.\n spatial_encoding, encoding_func=encoding_func, encoding_dim=\n args.dim, train_split=args.train_split)\n if args.allow_cache:\n if not os.path.exists('dataset_cache'):\n os.makedirs('dataset_cache')\n np.savez(cache_fname, train_velocity_inputs=trainloader.dataset.\n velocity_inputs, train_ssp_inputs=trainloader.dataset.\n ssp_inputs, train_ssp_outputs=trainloader.dataset.ssp_outputs,\n test_velocity_inputs=testloader.dataset.velocity_inputs,\n test_ssp_inputs=testloader.dataset.ssp_inputs, test_ssp_outputs\n =testloader.dataset.ssp_outputs)\nprint('Train and Test Loaders Generation Complete')\nstarts = [0.2] * 10\nends = np.linspace(0.4, 1.0, num=10)\nmasks_parameters = zip(starts, ends.tolist())\nlatest_epoch_scorer = scores.GridScorer(nbins=args.n_image_bins,\n coords_range=((0, 2.2), (0, 2.2)), mask_parameters=masks_parameters)\nfname_lstm_pred = '{}_{}samples_lstm_pred.pdf'.format(args.fname_prefix,\n args.n_samples)\nfname_lstm_truth = '{}_{}samples_lstm_truth.pdf'.format(args.fname_prefix,\n args.n_samples)\nfname_dense_pred = '{}_{}samples_dense_pred.pdf'.format(args.fname_prefix,\n args.n_samples)\nfname_dense_truth = '{}_{}samples_dense_truth.pdf'.format(args.fname_prefix,\n args.n_samples)\nprint('Testing')\nwith torch.no_grad():\n for i, data in enumerate(testloader):\n velocity_inputs, ssp_inputs, ssp_outputs = data\n ssp_pred, lstm_outputs, dense_outputs = model.forward_activations(\n velocity_inputs, ssp_inputs)\n predictions = np.zeros((ssp_pred.shape[0] * ssp_pred.shape[1], 2))\n coords = np.zeros((ssp_pred.shape[0] * ssp_pred.shape[1], 2))\n lstm_activations = np.zeros((ssp_pred.shape[0] * ssp_pred.shape[1],\n model.lstm_hidden_size))\n dense_activations = np.zeros((ssp_pred.shape[0] * ssp_pred.shape[1],\n model.linear_hidden_size))\n assert rollout_length == ssp_pred.shape[0]\n print('Computing predicted locations and true locations')\n for ri in range(rollout_length):\n pred = ssp_pred.detach().numpy()[ri, :, :args.dim]\n predictions[ri * ssp_pred.shape[1]:(ri + 1) * ssp_pred.shape[1], :\n ] = ssp_to_loc_v(pred, heatmap_vectors, xs, ys)\n coord = ssp_outputs.detach().numpy()[:, ri, :args.dim]\n coords[ri * ssp_pred.shape[1]:(ri + 1) * ssp_pred.shape[1], :\n ] = ssp_to_loc_v(coord, heatmap_vectors, xs, ys)\n lstm_activations[ri * ssp_pred.shape[1]:(ri + 1) * ssp_pred.shape[1], :\n ] = lstm_outputs.detach().numpy()[ri, :, :]\n dense_activations[ri * ssp_pred.shape[1]:(ri + 1) * ssp_pred.shape[\n 1], :] = dense_outputs.detach().numpy()[ri, :, :]\nprint(np.max(predictions))\nprint(np.min(predictions))\n(grid_scores_60_pred, grid_scores_90_pred, grid_scores_60_separation_pred,\n grid_scores_90_separation_pred) = (utils.get_scores_and_plot(scorer=\n latest_epoch_scorer, data_abs_xy=predictions, activations=\n lstm_activations, directory='output_grid_scores', filename=fname_lstm_pred)\n )\n(grid_scores_60_truth, grid_scores_90_truth,\n grid_scores_60_separation_truth, grid_scores_90_separation_truth) = (utils\n .get_scores_and_plot(scorer=latest_epoch_scorer, data_abs_xy=coords,\n activations=lstm_activations, directory='output_grid_scores', filename=\n fname_lstm_truth))\n(grid_scores_60_dense_pred, grid_scores_90_dense_pred,\n grid_scores_60_separation_dense_pred, grid_scores_90_separation_dense_pred\n ) = (utils.get_scores_and_plot(scorer=latest_epoch_scorer, data_abs_xy=\n predictions, activations=dense_activations, directory=\n 'output_grid_scores', filename=fname_dense_pred))\n(grid_scores_60_dense_truth, grid_scores_90_dense_truth,\n grid_scores_60_separation_dense_truth,\n grid_scores_90_separation_dense_truth) = (utils.get_scores_and_plot(\n scorer=latest_epoch_scorer, data_abs_xy=coords, activations=\n dense_activations, directory='output_grid_scores', filename=\n fname_dense_truth))\nprint(grid_scores_60_truth, grid_scores_90_truth,\n grid_scores_60_separation_truth, grid_scores_90_separation_truth)\nfname = 'output_grid_scores/{}_{}samples.npz'.format(args.fname_prefix,\n args.n_samples)\nnp.savez(fname, grid_scores_60_pred=grid_scores_60_pred,\n grid_scores_90_pred=grid_scores_90_pred, grid_scores_60_separation_pred\n =grid_scores_60_separation_pred, grid_scores_90_separation_pred=\n grid_scores_90_separation_pred, grid_scores_60_truth=\n grid_scores_60_truth, grid_scores_90_truth=grid_scores_90_truth,\n grid_scores_60_separation_truth=grid_scores_60_separation_truth,\n grid_scores_90_separation_truth=grid_scores_90_separation_truth,\n grid_scores_60_dense_pred=grid_scores_60_dense_pred,\n grid_scores_90_dense_pred=grid_scores_90_dense_pred,\n grid_scores_60_separation_dense_pred=\n grid_scores_60_separation_dense_pred,\n grid_scores_90_separation_dense_pred=\n grid_scores_90_separation_dense_pred, grid_scores_60_dense_truth=\n grid_scores_60_dense_truth, grid_scores_90_dense_truth=\n grid_scores_90_dense_truth, grid_scores_60_separation_dense_truth=\n grid_scores_60_separation_dense_truth,\n grid_scores_90_separation_dense_truth=grid_scores_90_separation_dense_truth\n )\n",
"<import token>\n<assignment token>\nif display is None or 'localhost' in display:\n matplotlib.use('agg')\n<import token>\n<assignment token>\nparser.add_argument('--n-samples', type=int, default=5000)\nparser.add_argument('--use-localization', action='store_true')\nparser.add_argument('--dataset', type=str, default='')\nparser.add_argument('--model', type=str, default='')\nparser.add_argument('--fname-prefix', type=str, default='sac')\nparser.add_argument('--spatial-encoding', type=str, default='ssp', choices=\n ['ssp', 'hex-ssp', 'periodic-hex-ssp', 'grid-ssp', 'ind-ssp',\n 'orth-proj-ssp', 'rec-ssp', 'rec-hex-ssp', 'rec-ind-ssp',\n 'sub-toroid-ssp', 'var-sub-toroid-ssp', 'random', '2d', '2d-normalized',\n 'one-hot', 'hex-trig', 'trig', 'random-trig', 'random-rotated-trig',\n 'random-proj', 'legendre', 'learned', 'learned-normalized',\n 'frozen-learned', 'frozen-learned-normalized', 'pc-gauss', 'pc-dog',\n 'tile-coding'])\nparser.add_argument('--frozen-model', type=str, default='', help=\n 'model to use frozen encoding weights from')\nparser.add_argument('--pc-gauss-sigma', type=float, default=0.25)\nparser.add_argument('--pc-diff-sigma', type=float, default=0.5)\nparser.add_argument('--hex-freq-coef', type=float, default=2.5, help=\n 'constant to scale frequencies by')\nparser.add_argument('--n-tiles', type=int, default=8, help=\n 'number of layers for tile coding')\nparser.add_argument('--n-bins', type=int, default=8, help=\n 'number of bins for tile coding')\nparser.add_argument('--ssp-scaling', type=float, default=1.0)\nparser.add_argument('--grid-ssp-min', type=float, default=0.25, help=\n 'minimum plane wave scale')\nparser.add_argument('--grid-ssp-max', type=float, default=2.0, help=\n 'maximum plane wave scale')\nparser.add_argument('--phi', type=float, default=0.5, help=\n 'phi as a fraction of pi for orth-proj-ssp')\nparser.add_argument('--n-proj', type=int, default=3, help=\n 'projection dimension for sub toroids')\nparser.add_argument('--scale-ratio', type=float, default=0, help=\n 'ratio between sub toroid scales')\nparser.add_argument('--hilbert-points', type=int, default=1, choices=[0, 1,\n 2, 3], help=\n 'pc centers. 0: random uniform. 1: hilbert curve. 2: evenly spaced grid. 3: hex grid'\n )\nparser.add_argument('--seed', type=int, default=13)\nparser.add_argument('--dropout-p', type=float, default=0.5)\nparser.add_argument('--dim', type=int, default=512)\nparser.add_argument('--train-split', type=float, default=0.8, help=\n 'Training fraction of the train/test split')\nparser.add_argument('--allow-cache', action='store_true', help=\n 'once the dataset has been generated, it will be saved to a file to be loaded faster'\n )\nparser.add_argument('--trajectory-length', type=int, default=100)\nparser.add_argument('--minibatch-size', type=int, default=10)\nparser.add_argument('--n-image-bins', type=int, default=20)\nparser.add_argument('--n-hd-cells', type=int, default=0, help=\n 'If non-zero, use linear and angular velocity as well as HD cell output')\nparser.add_argument('--sin-cos-ang', type=int, default=1, choices=[0, 1],\n help=\n 'Use the sin and cos of the angular velocity if angular velocities are used'\n )\nparser.add_argument('--use-lmu', action='store_true')\nparser.add_argument('--lmu-order', type=int, default=6)\nparser.add_argument('--no-cache-load', action='store_true', help=\n 'do not load from cache')\n<assignment token>\ntorch.manual_seed(args.seed)\nnp.random.seed(args.seed)\n<assignment token>\nprint('Generating Heatmap Vectors')\nfor i, x in enumerate(xs):\n for j, y in enumerate(ys):\n heatmap_vectors[i, j, :] = encoding_func(x=x, y=y)\n heatmap_vectors[i, j, :] /= np.linalg.norm(heatmap_vectors[i, j, :])\nprint('Heatmap Vector Generation Complete')\n<assignment token>\nif args.n_hd_cells > 0:\n hd_encoding_func = hd_gauss_encoding_func(dim=args.n_hd_cells, sigma=\n 0.25, use_softmax=False, rng=np.random.RandomState(args.seed))\n if args.sin_cos_ang:\n input_size = 3\n else:\n input_size = 2\n model = SSPPathIntegrationModel(input_size=input_size, unroll_length=\n rollout_length, sp_dim=dim + args.n_hd_cells, dropout_p=args.\n dropout_p, use_lmu=args.use_lmu, order=args.lmu_order)\nelse:\n hd_encoding_func = None\n model = SSPPathIntegrationModel(input_size=2, unroll_length=\n rollout_length, sp_dim=dim, dropout_p=args.dropout_p, use_lmu=args.\n use_lmu, order=args.lmu_order)\nmodel.load_state_dict(torch.load(args.model), strict=False)\nmodel.eval()\n<assignment token>\nif 'ssp' in args.spatial_encoding:\n encoding_specific = args.ssp_scaling\nelif args.spatial_encoding == 'frozen-learned':\n encoding_specific = args.frozen_model\nelif args.spatial_encoding == 'pc-gauss' or args.spatial_encoding == 'pc-gauss-softmax':\n encoding_specific = args.pc_gauss_sigma\nelif args.spatial_encoding == 'pc-dog':\n encoding_specific = '{}-{}'.format(args.pc_gauss_sigma, args.pc_diff_sigma)\nelif args.spatial_encoding == 'hex-trig':\n encoding_specific = args.hex_freq_coef\nif 'tf' in args.dataset:\n cache_fname = 'dataset_cache/tf_{}_{}_{}_{}_{}_{}.npz'.format(args.\n spatial_encoding, args.dim, args.seed, args.n_samples, args.\n n_hd_cells, encoding_specific)\nelse:\n cache_fname = 'dataset_cache/{}_{}_{}_{}_{}_{}.npz'.format(args.\n spatial_encoding, args.dim, args.seed, args.n_samples, args.\n n_hd_cells, encoding_specific)\nif os.path.exists(cache_fname) and not args.no_cache_load:\n print('Generating Train and Test Loaders from Cache')\n trainloader, testloader = load_from_cache(cache_fname, batch_size=\n batch_size, n_samples=n_samples)\nelse:\n print('Generating Train and Test Loaders')\n if 'tf' in args.dataset:\n assert args.sin_cos_ang == 1\n trainloader, testloader = tf_train_test_loaders(data,\n n_train_samples=n_samples, n_test_samples=n_samples,\n rollout_length=rollout_length, batch_size=batch_size, encoding=\n args.spatial_encoding, encoding_func=encoding_func,\n encoding_dim=args.dim, train_split=args.train_split, hd_dim=\n args.n_hd_cells, hd_encoding_func=hd_encoding_func, sin_cos_ang\n =args.sin_cos_ang)\n elif args.n_hd_cells > 0:\n trainloader, testloader = angular_train_test_loaders(data,\n n_train_samples=n_samples, n_test_samples=n_samples,\n rollout_length=rollout_length, batch_size=batch_size, encoding=\n args.spatial_encoding, encoding_func=encoding_func,\n encoding_dim=args.dim, train_split=args.train_split, hd_dim=\n args.n_hd_cells, hd_encoding_func=hd_encoding_func, sin_cos_ang\n =args.sin_cos_ang)\n else:\n trainloader, testloader = train_test_loaders(data, n_train_samples=\n n_samples, n_test_samples=n_samples, rollout_length=\n rollout_length, batch_size=batch_size, encoding=args.\n spatial_encoding, encoding_func=encoding_func, encoding_dim=\n args.dim, train_split=args.train_split)\n if args.allow_cache:\n if not os.path.exists('dataset_cache'):\n os.makedirs('dataset_cache')\n np.savez(cache_fname, train_velocity_inputs=trainloader.dataset.\n velocity_inputs, train_ssp_inputs=trainloader.dataset.\n ssp_inputs, train_ssp_outputs=trainloader.dataset.ssp_outputs,\n test_velocity_inputs=testloader.dataset.velocity_inputs,\n test_ssp_inputs=testloader.dataset.ssp_inputs, test_ssp_outputs\n =testloader.dataset.ssp_outputs)\nprint('Train and Test Loaders Generation Complete')\n<assignment token>\nprint('Testing')\nwith torch.no_grad():\n for i, data in enumerate(testloader):\n velocity_inputs, ssp_inputs, ssp_outputs = data\n ssp_pred, lstm_outputs, dense_outputs = model.forward_activations(\n velocity_inputs, ssp_inputs)\n predictions = np.zeros((ssp_pred.shape[0] * ssp_pred.shape[1], 2))\n coords = np.zeros((ssp_pred.shape[0] * ssp_pred.shape[1], 2))\n lstm_activations = np.zeros((ssp_pred.shape[0] * ssp_pred.shape[1],\n model.lstm_hidden_size))\n dense_activations = np.zeros((ssp_pred.shape[0] * ssp_pred.shape[1],\n model.linear_hidden_size))\n assert rollout_length == ssp_pred.shape[0]\n print('Computing predicted locations and true locations')\n for ri in range(rollout_length):\n pred = ssp_pred.detach().numpy()[ri, :, :args.dim]\n predictions[ri * ssp_pred.shape[1]:(ri + 1) * ssp_pred.shape[1], :\n ] = ssp_to_loc_v(pred, heatmap_vectors, xs, ys)\n coord = ssp_outputs.detach().numpy()[:, ri, :args.dim]\n coords[ri * ssp_pred.shape[1]:(ri + 1) * ssp_pred.shape[1], :\n ] = ssp_to_loc_v(coord, heatmap_vectors, xs, ys)\n lstm_activations[ri * ssp_pred.shape[1]:(ri + 1) * ssp_pred.shape[1], :\n ] = lstm_outputs.detach().numpy()[ri, :, :]\n dense_activations[ri * ssp_pred.shape[1]:(ri + 1) * ssp_pred.shape[\n 1], :] = dense_outputs.detach().numpy()[ri, :, :]\nprint(np.max(predictions))\nprint(np.min(predictions))\n<assignment token>\nprint(grid_scores_60_truth, grid_scores_90_truth,\n grid_scores_60_separation_truth, grid_scores_90_separation_truth)\n<assignment token>\nnp.savez(fname, grid_scores_60_pred=grid_scores_60_pred,\n grid_scores_90_pred=grid_scores_90_pred, grid_scores_60_separation_pred\n =grid_scores_60_separation_pred, grid_scores_90_separation_pred=\n grid_scores_90_separation_pred, grid_scores_60_truth=\n grid_scores_60_truth, grid_scores_90_truth=grid_scores_90_truth,\n grid_scores_60_separation_truth=grid_scores_60_separation_truth,\n grid_scores_90_separation_truth=grid_scores_90_separation_truth,\n grid_scores_60_dense_pred=grid_scores_60_dense_pred,\n grid_scores_90_dense_pred=grid_scores_90_dense_pred,\n grid_scores_60_separation_dense_pred=\n grid_scores_60_separation_dense_pred,\n grid_scores_90_separation_dense_pred=\n grid_scores_90_separation_dense_pred, grid_scores_60_dense_truth=\n grid_scores_60_dense_truth, grid_scores_90_dense_truth=\n grid_scores_90_dense_truth, grid_scores_60_separation_dense_truth=\n grid_scores_60_separation_dense_truth,\n grid_scores_90_separation_dense_truth=grid_scores_90_separation_dense_truth\n )\n",
"<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n"
] | false |
839 |
8c6f890631e9696a7907975b5d0bb71d03b380da
|
import cv2
import numpy as np
img = cv2.imread('Scan1.jpg')
img_height , img_width , dim = img.shape
cv2.imshow('image1',img[0:int(img_height/2),0:int(img_width/2)])
cv2.imshow('image2',img[int(img_height/2):img_height,0:int(img_width/2)])
cv2.imshow('image3',img[0:int(img_height/2),int(img_width/2):img_width])
cv2.imshow('image4',img[int(img_height/2):img_height,int(img_width/2):img_width])
cv2.waitKey(0)
cv2.destroyAllWindows()
|
[
"import cv2\r\nimport numpy as np\r\n\r\nimg = cv2.imread('Scan1.jpg')\r\n\r\nimg_height , img_width , dim = img.shape\r\n\r\ncv2.imshow('image1',img[0:int(img_height/2),0:int(img_width/2)])\r\ncv2.imshow('image2',img[int(img_height/2):img_height,0:int(img_width/2)])\r\ncv2.imshow('image3',img[0:int(img_height/2),int(img_width/2):img_width])\r\ncv2.imshow('image4',img[int(img_height/2):img_height,int(img_width/2):img_width])\r\n\r\ncv2.waitKey(0)\r\ncv2.destroyAllWindows()\r\n",
"import cv2\nimport numpy as np\nimg = cv2.imread('Scan1.jpg')\nimg_height, img_width, dim = img.shape\ncv2.imshow('image1', img[0:int(img_height / 2), 0:int(img_width / 2)])\ncv2.imshow('image2', img[int(img_height / 2):img_height, 0:int(img_width / 2)])\ncv2.imshow('image3', img[0:int(img_height / 2), int(img_width / 2):img_width])\ncv2.imshow('image4', img[int(img_height / 2):img_height, int(img_width / 2)\n :img_width])\ncv2.waitKey(0)\ncv2.destroyAllWindows()\n",
"<import token>\nimg = cv2.imread('Scan1.jpg')\nimg_height, img_width, dim = img.shape\ncv2.imshow('image1', img[0:int(img_height / 2), 0:int(img_width / 2)])\ncv2.imshow('image2', img[int(img_height / 2):img_height, 0:int(img_width / 2)])\ncv2.imshow('image3', img[0:int(img_height / 2), int(img_width / 2):img_width])\ncv2.imshow('image4', img[int(img_height / 2):img_height, int(img_width / 2)\n :img_width])\ncv2.waitKey(0)\ncv2.destroyAllWindows()\n",
"<import token>\n<assignment token>\ncv2.imshow('image1', img[0:int(img_height / 2), 0:int(img_width / 2)])\ncv2.imshow('image2', img[int(img_height / 2):img_height, 0:int(img_width / 2)])\ncv2.imshow('image3', img[0:int(img_height / 2), int(img_width / 2):img_width])\ncv2.imshow('image4', img[int(img_height / 2):img_height, int(img_width / 2)\n :img_width])\ncv2.waitKey(0)\ncv2.destroyAllWindows()\n",
"<import token>\n<assignment token>\n<code token>\n"
] | false |
840 |
c804391cc199a242d1b54ece8487ef74065a40ad
|
def max_product(n):
lst, lstnums, res, num = [], [], [], 1
for i in range(0, n+1):
lstnums.append(i)
for j in str(i):
num *= int(j)
lst.append(num)
num = 1
maxlst = max(lst)
for i in range(len(lst)):
if lst[i] == maxlst:
res.append(lstnums[i])
return res
|
[
"\ndef max_product(n):\n lst, lstnums, res, num = [], [], [], 1\n for i in range(0, n+1):\n lstnums.append(i)\n for j in str(i):\n num *= int(j)\n lst.append(num)\n num = 1\n\n maxlst = max(lst)\n for i in range(len(lst)):\n if lst[i] == maxlst:\n res.append(lstnums[i])\n\n return res\n\n"
] | true |
841 |
c43b899234ffff09225153dcaf097591c7176430
|
from django.contrib import admin
# Register your models here.
from .models import Participant
class ParticipantAdmin(admin.ModelAdmin):
fieldsets = [
("Personal information", {'fields': ['email', 'name', 'institution', 'assistant']}),
("Asistance", {'fields': ['assistant', 'participant_hash']}),
("Contribution", {'fields': ['contribution', 'title', 'abstract', 'link']}),
]
list_display = ('email', 'name', 'assistant', 'contribution', 'title')
list_filter = ['assistant', 'contribution']
admin.site.register(Participant, ParticipantAdmin)
|
[
"from django.contrib import admin\n\n# Register your models here.\nfrom .models import Participant\n\n\nclass ParticipantAdmin(admin.ModelAdmin):\n fieldsets = [\n (\"Personal information\", {'fields': ['email', 'name', 'institution', 'assistant']}),\n (\"Asistance\", {'fields': ['assistant', 'participant_hash']}),\n (\"Contribution\", {'fields': ['contribution', 'title', 'abstract', 'link']}),\n ]\n list_display = ('email', 'name', 'assistant', 'contribution', 'title')\n list_filter = ['assistant', 'contribution']\n\nadmin.site.register(Participant, ParticipantAdmin)\n",
"from django.contrib import admin\nfrom .models import Participant\n\n\nclass ParticipantAdmin(admin.ModelAdmin):\n fieldsets = [('Personal information', {'fields': ['email', 'name',\n 'institution', 'assistant']}), ('Asistance', {'fields': [\n 'assistant', 'participant_hash']}), ('Contribution', {'fields': [\n 'contribution', 'title', 'abstract', 'link']})]\n list_display = 'email', 'name', 'assistant', 'contribution', 'title'\n list_filter = ['assistant', 'contribution']\n\n\nadmin.site.register(Participant, ParticipantAdmin)\n",
"<import token>\n\n\nclass ParticipantAdmin(admin.ModelAdmin):\n fieldsets = [('Personal information', {'fields': ['email', 'name',\n 'institution', 'assistant']}), ('Asistance', {'fields': [\n 'assistant', 'participant_hash']}), ('Contribution', {'fields': [\n 'contribution', 'title', 'abstract', 'link']})]\n list_display = 'email', 'name', 'assistant', 'contribution', 'title'\n list_filter = ['assistant', 'contribution']\n\n\nadmin.site.register(Participant, ParticipantAdmin)\n",
"<import token>\n\n\nclass ParticipantAdmin(admin.ModelAdmin):\n fieldsets = [('Personal information', {'fields': ['email', 'name',\n 'institution', 'assistant']}), ('Asistance', {'fields': [\n 'assistant', 'participant_hash']}), ('Contribution', {'fields': [\n 'contribution', 'title', 'abstract', 'link']})]\n list_display = 'email', 'name', 'assistant', 'contribution', 'title'\n list_filter = ['assistant', 'contribution']\n\n\n<code token>\n",
"<import token>\n\n\nclass ParticipantAdmin(admin.ModelAdmin):\n <assignment token>\n <assignment token>\n <assignment token>\n\n\n<code token>\n",
"<import token>\n<class token>\n<code token>\n"
] | false |
842 |
95422348c8db9753830cc0a7c8785c05b44886b1
|
from datetime import datetime as dt
YEAR = dt.today().year
BINARY_LOCATION = {'binary_location': 'C:/Program Files (x86)/Google/Chrome/Application/chrome.exe'}
CHROME_DRIVER_PATH = r'C:\Users\pavithra\Downloads\chromedriver_win32\chromedriver.exe'
EXTRACTED_DIR = r'C:\Users\pavithra\Documents\fintuple-automation-projects\BseBhavCopy\dailybhavcopy\dailybhavcopy' \
r'\csv_files'
ZIP_DIR = r'C:\Users\pavithra\Documents\fintuple-automation-projects\BseBhavCopy\dailybhavcopy\dailybhavcopy\zip_files'
HEADLESS_OPTIONS = {'headless': '--headless',
'window_size': '--window-size=1920x1080'}
DOWNLOAD_PREFERENCES = {'download.default_directory': EXTRACTED_DIR,
'download.prompt_for_download': False}
def enable_download(driver, directory):
"""
:param driver: Selenium web driver
:param directory: Directory to store the file
This function allows the Selenium web driver to store the file in the given directory.
"""
driver.command_executor._commands["send_command"] = ("POST", '/session/$sessionId/chromium/send_command')
params = {'cmd': 'Page.setDownloadBehavior',
'params': {'behavior': 'allow',
'downloadPath': directory}}
driver.execute("send_command", params)
|
[
"from datetime import datetime as dt\n\nYEAR = dt.today().year\nBINARY_LOCATION = {'binary_location': 'C:/Program Files (x86)/Google/Chrome/Application/chrome.exe'}\nCHROME_DRIVER_PATH = r'C:\\Users\\pavithra\\Downloads\\chromedriver_win32\\chromedriver.exe'\nEXTRACTED_DIR = r'C:\\Users\\pavithra\\Documents\\fintuple-automation-projects\\BseBhavCopy\\dailybhavcopy\\dailybhavcopy' \\\n r'\\csv_files'\nZIP_DIR = r'C:\\Users\\pavithra\\Documents\\fintuple-automation-projects\\BseBhavCopy\\dailybhavcopy\\dailybhavcopy\\zip_files'\nHEADLESS_OPTIONS = {'headless': '--headless',\n 'window_size': '--window-size=1920x1080'}\nDOWNLOAD_PREFERENCES = {'download.default_directory': EXTRACTED_DIR,\n 'download.prompt_for_download': False}\n\n\ndef enable_download(driver, directory):\n \"\"\"\n\n :param driver: Selenium web driver\n :param directory: Directory to store the file\n\n This function allows the Selenium web driver to store the file in the given directory.\n \"\"\"\n driver.command_executor._commands[\"send_command\"] = (\"POST\", '/session/$sessionId/chromium/send_command')\n params = {'cmd': 'Page.setDownloadBehavior',\n 'params': {'behavior': 'allow',\n 'downloadPath': directory}}\n driver.execute(\"send_command\", params)\n",
"from datetime import datetime as dt\nYEAR = dt.today().year\nBINARY_LOCATION = {'binary_location':\n 'C:/Program Files (x86)/Google/Chrome/Application/chrome.exe'}\nCHROME_DRIVER_PATH = (\n 'C:\\\\Users\\\\pavithra\\\\Downloads\\\\chromedriver_win32\\\\chromedriver.exe')\nEXTRACTED_DIR = (\n 'C:\\\\Users\\\\pavithra\\\\Documents\\\\fintuple-automation-projects\\\\BseBhavCopy\\\\dailybhavcopy\\\\dailybhavcopy\\\\csv_files'\n )\nZIP_DIR = (\n 'C:\\\\Users\\\\pavithra\\\\Documents\\\\fintuple-automation-projects\\\\BseBhavCopy\\\\dailybhavcopy\\\\dailybhavcopy\\\\zip_files'\n )\nHEADLESS_OPTIONS = {'headless': '--headless', 'window_size':\n '--window-size=1920x1080'}\nDOWNLOAD_PREFERENCES = {'download.default_directory': EXTRACTED_DIR,\n 'download.prompt_for_download': False}\n\n\ndef enable_download(driver, directory):\n \"\"\"\n\n :param driver: Selenium web driver\n :param directory: Directory to store the file\n\n This function allows the Selenium web driver to store the file in the given directory.\n \"\"\"\n driver.command_executor._commands['send_command'\n ] = 'POST', '/session/$sessionId/chromium/send_command'\n params = {'cmd': 'Page.setDownloadBehavior', 'params': {'behavior':\n 'allow', 'downloadPath': directory}}\n driver.execute('send_command', params)\n",
"<import token>\nYEAR = dt.today().year\nBINARY_LOCATION = {'binary_location':\n 'C:/Program Files (x86)/Google/Chrome/Application/chrome.exe'}\nCHROME_DRIVER_PATH = (\n 'C:\\\\Users\\\\pavithra\\\\Downloads\\\\chromedriver_win32\\\\chromedriver.exe')\nEXTRACTED_DIR = (\n 'C:\\\\Users\\\\pavithra\\\\Documents\\\\fintuple-automation-projects\\\\BseBhavCopy\\\\dailybhavcopy\\\\dailybhavcopy\\\\csv_files'\n )\nZIP_DIR = (\n 'C:\\\\Users\\\\pavithra\\\\Documents\\\\fintuple-automation-projects\\\\BseBhavCopy\\\\dailybhavcopy\\\\dailybhavcopy\\\\zip_files'\n )\nHEADLESS_OPTIONS = {'headless': '--headless', 'window_size':\n '--window-size=1920x1080'}\nDOWNLOAD_PREFERENCES = {'download.default_directory': EXTRACTED_DIR,\n 'download.prompt_for_download': False}\n\n\ndef enable_download(driver, directory):\n \"\"\"\n\n :param driver: Selenium web driver\n :param directory: Directory to store the file\n\n This function allows the Selenium web driver to store the file in the given directory.\n \"\"\"\n driver.command_executor._commands['send_command'\n ] = 'POST', '/session/$sessionId/chromium/send_command'\n params = {'cmd': 'Page.setDownloadBehavior', 'params': {'behavior':\n 'allow', 'downloadPath': directory}}\n driver.execute('send_command', params)\n",
"<import token>\n<assignment token>\n\n\ndef enable_download(driver, directory):\n \"\"\"\n\n :param driver: Selenium web driver\n :param directory: Directory to store the file\n\n This function allows the Selenium web driver to store the file in the given directory.\n \"\"\"\n driver.command_executor._commands['send_command'\n ] = 'POST', '/session/$sessionId/chromium/send_command'\n params = {'cmd': 'Page.setDownloadBehavior', 'params': {'behavior':\n 'allow', 'downloadPath': directory}}\n driver.execute('send_command', params)\n",
"<import token>\n<assignment token>\n<function token>\n"
] | false |
843 |
96cb2754db2740767dfb145078ed17969e85123d
|
from .parapred import main
main()
|
[
"from .parapred import main\nmain()\n",
"<import token>\nmain()\n",
"<import token>\n<code token>\n"
] | false |
844 |
77f94ecd205ae9f240f25d959a6d5cd9cf844d86
|
"""
The Snail v 2
"Buy the dips! ... then wait"
STRATEGY
1. Selects coins that are X% (percent_below) below their X day (LIMIT) maximum
2. ** NEW ** Finds movement (MOVEMENT) range over X Days
- if MOVEMENT* > TAKE_PROFIT coins pass to 3
3. Check coins are not already owned
4. Uses MACD to check if coins are currently on an uptrend
5. Adds coins that pass all above tests to Signal file for the Bot to buy (ordered by Potential Profit from High to Low)
* MOVEMENT
Looks at the fluctuation in price over LIMIT days and compares to your TAKE_PROFIT settings.
i.e. if your TAKE_PROFIT is 3%, but the movement is only 1%, then you wont hit TP and will be left holding the coin
This can be turned off if you want.
STRATEGY SETTINGS
LIMIT = 4
INTERVAL = '1d'
profit_min = 15
profit_max = 100 # only required if you want to limit max profit
percent_below = 0.6 # change risk level: 0.7 = 70% below high_price, 0.5 = 50% below high_price
MOVEMENT = True #
OTHER SETTINGS
BVT or OLORIN Fork.
Set True / False for compatibility
WINDOWS (WINDOWS OS)
Set True / False for compatibility
DISCORD
send message to Discord - Set True / False
CONFIG.YML SETTINGS
CHANGE_IN_PRICE: 100 REQUIRED
Do NOT use pausebotmod as it will prevent the_snail from buying - The Snail buys the dips
Developed by scoobie
Thanks to
@vyacheslav for optimising the code with async and adding list sorting,
@Kevin.Butters for the meticulous testing and reporting,
@OlorinSledge for the coding advice and a great fork
DISCLAIMER
CHECK YOU HAVE ALL THE REQUIRED IMPORTS INSTALLED
Developed for OlorinSledge fork - no support for any others as I don't use them.
Troubleshooting and help - please use the #troubleshooting channel
Settings - the settings in this file are what I currently use, please don't DM me for the 'best' settings - for me, these are the best so far.
There's a lot of options to adjust the strategy, test them out and share your results in #bot-strategies so others can learn from them too
Hope the Snail makes you rich!
"""
import os
import re
import aiohttp
import asyncio
import time
import json
from datetime import datetime, timedelta
from kucoin.client import Client
from helpers.parameters import parse_args, load_config
import pandas as pd
import pandas_ta as ta
import ccxt
from tradingview_ta import TA_Handler, Interval, Exchange
import requests
# Load creds modules
from helpers.handle_creds import (
load_correct_creds, load_discord_creds
)
# Settings
args = parse_args()
DEFAULT_CONFIG_FILE = 'config.yml'
DEFAULT_CREDS_FILE = 'creds.yml'
config_file = args.config if args.config else DEFAULT_CONFIG_FILE
creds_file = args.creds if args.creds else DEFAULT_CREDS_FILE
parsed_creds = load_config(creds_file)
parsed_config = load_config(config_file)
# Load trading vars
PAIR_WITH = parsed_config['trading_options']['PAIR_WITH']
EX_PAIRS = parsed_config['trading_options']['FIATS']
TEST_MODE = parsed_config['script_options']['TEST_MODE']
TAKE_PROFIT = parsed_config['trading_options']['TAKE_PROFIT']
DISCORD_WEBHOOK = load_discord_creds(parsed_creds)
# Load creds for correct environment
access_key, secret_key, passphrase_key = load_correct_creds(parsed_creds)
client = Client(access_key, secret_key, passphrase_key)
# If True, an updated list of coins will be generated from the site - http://edgesforledges.com/watchlists/binance.
# If False, then the list you create in TICKERS_LIST = 'tickers.txt' will be used.
CREATE_TICKER_LIST = True
# When creating a ticker list from the source site:
# http://edgesforledges.com you can use the parameter (all or innovation-zone).
# ticker_type = 'innovation-zone'
ticker_type = 'all'
if CREATE_TICKER_LIST:
TICKERS_LIST = 'tickers_all_USDT.txt'
else:
TICKERS_LIST = 'tickers_all_USDT.txt'
# System Settings
BVT = False
OLORIN = True # if not using Olorin Sledge Fork set to False
if OLORIN:
signal_file_type = '.buy'
else:
signal_file_type = '.exs'
# if using Windows OS set to True, else set to False
WINDOWS = True
# send message to discord
DISCORD = True
# Strategy Settings
LIMIT = 4
INTERVAL = '1day'
profit_min = 15
profit_max = 100 # only required if you want to limit max profit
percent_below = 0.7 # change risk level: 0.7 = 70% below high_price, 0.5 = 50% below high_price
MOVEMENT = True
# Display Setttings
all_info = False
class TextColors:
BUY = '\033[92m'
WARNING = '\033[93m'
SELL_LOSS = '\033[91m'
SELL_PROFIT = '\033[32m'
DIM = '\033[2m\033[35m'
DEFAULT = '\033[39m'
YELLOW = '\033[33m'
TURQUOISE = '\033[36m'
UNDERLINE = '\033[4m'
END = '\033[0m'
ITALICS = '\033[3m'
def msg_discord(msg):
message = msg + '\n\n'
mUrl = "https://discordapp.com/api/webhooks/"+DISCORD_WEBHOOK
data = {"content": message}
response = requests.post(mUrl, json=data)
def get_price(client_api):
initial_price = {}
tickers = [line.strip() for line in open(TICKERS_LIST)]
prices = client_api.get_ticker()
for coin in prices['ticker']:
for item in tickers:
if item + PAIR_WITH == coin['symbol'] and all(item + PAIR_WITH not in coin['symbol'] for item in EX_PAIRS):
initial_price[coin['symbol']] = {'symbol': coin['symbol'],
'price': coin['last'],
'time': datetime.now(),
'price_list': [],
'change_price': 0.0,
'cov': 0.0}
return initial_price
async def create_urls(ticker_list, interval) -> dict:
coins_urls = {}
if INTERVAL == '1day':
st = datetime.now() - timedelta(days=float(LIMIT))
et = datetime.now()
start_time = int(st.timestamp())
stop_time = int(et.timestamp())
for coin in ticker_list:
if type(coin) == dict:
if all(item + PAIR_WITH not in coin['symbol'] for item in EX_PAIRS):
coins_urls[coin['symbol']] = {'symbol': coin['symbol'],
'url': f"https://api.kucoin.com/api/v1/market/candles?symbol"
f"{coin['symbol']}&type={interval}&startAt={start_time}&endAt={stop_time}"}
else:
coins_urls[coin] = {'symbol': coin,
'url': f"https://api.kucoin.com/api/v1/market/candles?symbol={coin}&type={interval}&startAt={start_time}&endAt={stop_time}"}
return coins_urls
async def get(session: aiohttp.ClientSession, url) -> dict:
data = {}
symbol = re.findall(r'=\w+', url)[0][1:]
try:
resp = await session.request('GET', url=url)
data['symbol'] = symbol
# data['last_price'] = await get_last_price(session=session, symbol=symbol)
data['data'] = await resp.json()
except Exception as e:
print(e)
return data
async def get_historical_data(ticker_list, interval):
urls = await create_urls(ticker_list=ticker_list, interval=interval)
if WINDOWS:
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
async with aiohttp.ClientSession() as session:
tasks = []
for url in urls:
link = urls[url]['url']
tasks.append(get(session=session, url=link))
response = await asyncio.gather(*tasks, return_exceptions=True)
return response
def get_prices_high_low(list_coins, interval):
if WINDOWS:
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
prices_low_high = {}
hist_data = asyncio.run(get_historical_data(ticker_list=list_coins, interval=interval))
for item in hist_data:
coin_symbol = item['symbol']
h_p = []
l_p = []
try:
for i in item['data']['data']:
close_time = i[0]
open_price = float(i[1])
close_price = float(i[2])
high_price = float(i[3])
low_price = float(i[4])
volume = float(i[5])
quote_volume = i[6]
h_p.append(high_price)
l_p.append(low_price)
except Exception as e:
print(f'Exception {e}')
continue
prices_low_high[coin_symbol] = {'symbol': coin_symbol, 'high_price': h_p, 'low_price': l_p, 'current_potential': 0.0}
return prices_low_high
def do_work():
while True:
init_price = get_price(client)
coins = get_prices_high_low(init_price, INTERVAL)
print(f'{TextColors.TURQUOISE}The Snail is checking for potential profit and buy signals{TextColors.DEFAULT}')
if os.path.exists(f'signals/snail_scan{signal_file_type}'):
os.remove(f'signals/snail_scan{signal_file_type}')
current_potential_list = []
held_coins_list = {}
if TEST_MODE:
coin_path = 'test_coins_bought.json'
elif BVT:
coin_path = 'coins_bought.json'
else:
coin_path = 'live_coins_bought.json'
if os.path.isfile(coin_path) and os.stat(coin_path).st_size != 0:
with open(coin_path) as file:
held_coins_list = json.load(file)
for coin in coins:
if len(coins[coin]['high_price']) == LIMIT:
high_price = float(max(coins[coin]['high_price']))
low_price = float(min(coins[coin]['low_price']))
last_price = float(init_price[coin + PAIR_WITH]['price'])
# Calculation
range = high_price - low_price
potential = (low_price / high_price) * 100
buy_above = low_price * 1.00
buy_below = high_price - (range * percent_below) # percent below affects Risk
max_potential = potential * 0.98
min_potential = potential * 0.6
safe_potential = potential - 12
current_range = high_price - last_price
current_potential = ((high_price / last_price) * 100) - 100
coins[coin]['current_potential'] = current_potential
movement = (low_price / range)
# print(f'{coin} {potential:.2f}% {movement:.2f}%')
if MOVEMENT:
if profit_min < current_potential < profit_max and last_price < buy_below and movement >= TAKE_PROFIT and coin not in held_coins_list:
current_potential_list.append(coins[coin])
else:
if profit_min < current_potential < profit_max and last_price < buy_below and coin not in held_coins_list:
current_potential_list.append(coins[coin])
if current_potential_list:
# print(current_potential_list)
exchange = ccxt.binance()
macd_list = []
for i in current_potential_list:
coin = i['symbol'] + PAIR_WITH
current_potential = i['current_potential']
macd1 = exchange.fetch_ohlcv(coin, timeframe='1m', limit=36)
macd5 = exchange.fetch_ohlcv(coin, timeframe='5m', limit=36)
macd15 = exchange.fetch_ohlcv(coin, timeframe='15m', limit=36)
try:
macd1day = exchange.fetch_ohlcv(coin, timeframe='1d', limit=36)
except Exception as e:
print(f'{coin} Exception {e}')
continue
macdbtc = exchange.fetch_ohlcv('BTCUSDT', timeframe='1m', limit=36)
df1 = pd.DataFrame(macd1, columns=['time', 'open', 'high', 'low', 'close', 'volume'])
df5 = pd.DataFrame(macd5, columns=['time', 'open', 'high', 'low', 'close', 'volume'])
df15 = pd.DataFrame(macd15, columns=['time', 'open', 'high', 'low', 'close', 'volume'])
df1day = pd.DataFrame(macd1day, columns=['time', 'open', 'high', 'low', 'close', 'volume'])
dfbtc = pd.DataFrame(macdbtc, columns=['time', 'open', 'high', 'low', 'close', 'volume'])
# Wait for 1 sec to prevent kucoin query limit
time.sleep(1)
try:
macd1 = df1.ta.macd(fast=12, slow=26)
macd5 = df5.ta.macd(fast=12, slow=26)
macd15 = df15.ta.macd(fast=12, slow=26)
macd1day = df1day.ta.macd(fast=12, slow=26)
macdbtc = dfbtc.ta.macd(fast=12, slow=26)
get_hist1 = macd1.iloc[35, 1]
get_hist5 = macd5.iloc[35, 1]
get_hist15 = macd15.iloc[35, 1]
get_hist1day = macd1day.iloc[35, 1]
get_histbtc = macdbtc.iloc[35, 1]
except Exception as e:
print(f'{coin} Exception {e}')
continue
if all_info:
if get_hist1 >= 0 and get_hist5 >= 0 and get_hist15 >= 0 and get_hist1day >= 0 and get_histbtc >= 0:
print(f'MACD HIST {coin} {current_potential:2f}% {TextColors.SELL_PROFIT}{get_hist1} {get_hist5} {get_hist15} {get_hist1day} {get_histbtc}{TextColors.DEFAULT}')
else:
print(f'MACD HIST {coin} {current_potential:2f}% {get_hist1} {get_hist5} {get_hist15} {get_hist1day} {get_histbtc}')
if get_hist1 >= 0 and get_hist5 >= 0 and get_hist15 >= 0 and get_hist1day >= 0 and get_histbtc >= 0:
# Add to coins for Snail to scan
print(f'{TextColors.TURQUOISE}{coin}{TextColors.DEFAULT} Potential profit: {TextColors.TURQUOISE}{current_potential:.0f}%{TextColors.DEFAULT}\n')
macd_list.append(coins[coin])
# else:
# print(f'Do NOT buy {coin}')
if macd_list:
# print(macd_list)
sort_list = sorted(macd_list, key=lambda x: x[f'current_potential'], reverse=True)
for i in sort_list:
coin = i['symbol']
current_potential = i['current_potential']
last_price = float(init_price[coin + PAIR_WITH]['price'])
# print(f'list {coin} {last_price}')
high_price = float(max(coins[coin]['high_price']))
# print(f'list {coin} {high_price}')
low_price = float(min(coins[coin]['low_price']))
# print(f'list {coin} {low_price}')
range = high_price - low_price
potential = (low_price / high_price) * 100
buy_above = low_price * 1.00
buy_below = high_price - (range * percent_below)
current_range = high_price - last_price
if all_info:
print(f'\nPrice: ${last_price:.3f}\n'
f'High: ${high_price:.3f}\n'
# f'Plan: TP {TP}% TTP {TTP}%\n'
f'Day Max Range: ${range:.3f}\n'
f'Current Range: ${current_range:.3f} \n'
# f'Daily Range: ${range:.3f}\n'
# f'Current Range ${current_range:.3f} \n'
# f'Potential profit before safety: {potential:.0f}%\n'
# f'Buy above: ${buy_above:.3f}\n'
f'Buy Below: ${buy_below:.3f}\n'
f'Potential profit: {TextColors.TURQUOISE}{current_potential:.0f}%{TextColors.DEFAULT}'
# f'Max Profit {max_potential:.2f}%\n'
# f'Min Profit {min_potential:.2f}%\n'
)
# print(f'Adding {TextColors.TURQUOISE}{coin}{TextColors.DEFAULT} to buy list')
# add to signal
with open(f'signals/snail_scan{signal_file_type}', 'a+') as f:
f.write(str(coin + PAIR_WITH) + '\n')
# else:
# print(f'{TextColors.TURQUOISE}{coin}{TextColors.DEFAULT} may not be profitable at this time')
snail_coins = len(current_potential_list)
macd_coins = len(macd_list)
snail_discord = f'Snail found {snail_coins} coins and MACD approved {macd_coins}'
if DISCORD:
msg_discord(snail_discord)
print(f'{TextColors.TURQUOISE}Snail found {snail_coins} coins and MACD approved {macd_coins} coins. L: {LIMIT}days Min: {profit_min}% Risk: {percent_below * 100}% {TextColors.DEFAULT}')
time.sleep(180)
|
[
"\"\"\"\nThe Snail v 2\n\"Buy the dips! ... then wait\"\n\nSTRATEGY\n1. Selects coins that are X% (percent_below) below their X day (LIMIT) maximum\n2. ** NEW ** Finds movement (MOVEMENT) range over X Days\n - if MOVEMENT* > TAKE_PROFIT coins pass to 3\n3. Check coins are not already owned\n4. Uses MACD to check if coins are currently on an uptrend\n5. Adds coins that pass all above tests to Signal file for the Bot to buy (ordered by Potential Profit from High to Low)\n\n* MOVEMENT\n Looks at the fluctuation in price over LIMIT days and compares to your TAKE_PROFIT settings.\n i.e. if your TAKE_PROFIT is 3%, but the movement is only 1%, then you wont hit TP and will be left holding the coin\n This can be turned off if you want.\n\n\nSTRATEGY SETTINGS\nLIMIT = 4\nINTERVAL = '1d'\nprofit_min = 15\nprofit_max = 100 # only required if you want to limit max profit\npercent_below = 0.6 # change risk level: 0.7 = 70% below high_price, 0.5 = 50% below high_price\nMOVEMENT = True #\n\nOTHER SETTINGS\nBVT or OLORIN Fork.\nSet True / False for compatibility\n\nWINDOWS (WINDOWS OS)\nSet True / False for compatibility\n\nDISCORD\nsend message to Discord - Set True / False\n\n\nCONFIG.YML SETTINGS\nCHANGE_IN_PRICE: 100 REQUIRED\nDo NOT use pausebotmod as it will prevent the_snail from buying - The Snail buys the dips\n\nDeveloped by scoobie\nThanks to\n@vyacheslav for optimising the code with async and adding list sorting,\[email protected] for the meticulous testing and reporting,\n@OlorinSledge for the coding advice and a great fork\n\nDISCLAIMER\nCHECK YOU HAVE ALL THE REQUIRED IMPORTS INSTALLED\nDeveloped for OlorinSledge fork - no support for any others as I don't use them.\nTroubleshooting and help - please use the #troubleshooting channel\nSettings - the settings in this file are what I currently use, please don't DM me for the 'best' settings - for me, these are the best so far.\nThere's a lot of options to adjust the strategy, test them out and share your results in #bot-strategies so others can learn from them too\n\nHope the Snail makes you rich!\n\n\"\"\"\n\nimport os\nimport re\nimport aiohttp\nimport asyncio\nimport time\nimport json\nfrom datetime import datetime, timedelta\nfrom kucoin.client import Client\nfrom helpers.parameters import parse_args, load_config\nimport pandas as pd\nimport pandas_ta as ta\nimport ccxt\nfrom tradingview_ta import TA_Handler, Interval, Exchange\nimport requests\n\n# Load creds modules\nfrom helpers.handle_creds import (\n\tload_correct_creds, load_discord_creds\n)\n\n# Settings\nargs = parse_args()\nDEFAULT_CONFIG_FILE = 'config.yml'\nDEFAULT_CREDS_FILE = 'creds.yml'\n\n\nconfig_file = args.config if args.config else DEFAULT_CONFIG_FILE\ncreds_file = args.creds if args.creds else DEFAULT_CREDS_FILE\nparsed_creds = load_config(creds_file)\nparsed_config = load_config(config_file)\n\n# Load trading vars\nPAIR_WITH = parsed_config['trading_options']['PAIR_WITH']\nEX_PAIRS = parsed_config['trading_options']['FIATS']\nTEST_MODE = parsed_config['script_options']['TEST_MODE']\nTAKE_PROFIT = parsed_config['trading_options']['TAKE_PROFIT']\nDISCORD_WEBHOOK = load_discord_creds(parsed_creds)\n\n# Load creds for correct environment\naccess_key, secret_key, passphrase_key = load_correct_creds(parsed_creds)\nclient = Client(access_key, secret_key, passphrase_key)\n\n\n\n# If True, an updated list of coins will be generated from the site - http://edgesforledges.com/watchlists/binance.\n# If False, then the list you create in TICKERS_LIST = 'tickers.txt' will be used.\nCREATE_TICKER_LIST = True\n\n# When creating a ticker list from the source site:\n# http://edgesforledges.com you can use the parameter (all or innovation-zone).\n# ticker_type = 'innovation-zone'\nticker_type = 'all'\nif CREATE_TICKER_LIST:\n\tTICKERS_LIST = 'tickers_all_USDT.txt'\nelse:\n\tTICKERS_LIST = 'tickers_all_USDT.txt'\n\n# System Settings\nBVT = False\nOLORIN = True # if not using Olorin Sledge Fork set to False\nif OLORIN:\n\tsignal_file_type = '.buy'\nelse:\n\tsignal_file_type = '.exs'\n\n# if using Windows OS set to True, else set to False\nWINDOWS = True\n# send message to discord\nDISCORD = True\n\n# Strategy Settings\nLIMIT = 4\nINTERVAL = '1day'\nprofit_min = 15\nprofit_max = 100 # only required if you want to limit max profit\npercent_below = 0.7 # change risk level: 0.7 = 70% below high_price, 0.5 = 50% below high_price\nMOVEMENT = True\n\n# Display Setttings\nall_info = False\n\n\nclass TextColors:\n\tBUY = '\\033[92m'\n\tWARNING = '\\033[93m'\n\tSELL_LOSS = '\\033[91m'\n\tSELL_PROFIT = '\\033[32m'\n\tDIM = '\\033[2m\\033[35m'\n\tDEFAULT = '\\033[39m'\n\tYELLOW = '\\033[33m'\n\tTURQUOISE = '\\033[36m'\n\tUNDERLINE = '\\033[4m'\n\tEND = '\\033[0m'\n\tITALICS = '\\033[3m'\n\n\ndef msg_discord(msg):\n\n\tmessage = msg + '\\n\\n'\n\n\tmUrl = \"https://discordapp.com/api/webhooks/\"+DISCORD_WEBHOOK\n\tdata = {\"content\": message}\n\tresponse = requests.post(mUrl, json=data)\n\n\ndef get_price(client_api):\n\tinitial_price = {}\n\ttickers = [line.strip() for line in open(TICKERS_LIST)]\n\tprices = client_api.get_ticker()\n\n\tfor coin in prices['ticker']:\n\t\tfor item in tickers:\n\t\t\tif item + PAIR_WITH == coin['symbol'] and all(item + PAIR_WITH not in coin['symbol'] for item in EX_PAIRS):\n\t\t\t\tinitial_price[coin['symbol']] = {'symbol': coin['symbol'],\n\t\t\t\t\t\t\t\t\t\t\t\t 'price': coin['last'],\n\t\t\t\t\t\t\t\t\t\t\t\t 'time': datetime.now(),\n\t\t\t\t\t\t\t\t\t\t\t\t 'price_list': [],\n\t\t\t\t\t\t\t\t\t\t\t\t 'change_price': 0.0,\n\t\t\t\t\t\t\t\t\t\t\t\t 'cov': 0.0}\n\treturn initial_price\n\n\nasync def create_urls(ticker_list, interval) -> dict:\n\tcoins_urls = {}\n\n\tif INTERVAL == '1day':\n \t\tst = datetime.now() - timedelta(days=float(LIMIT))\n\t\t\t\n\tet = datetime.now()\n\tstart_time = int(st.timestamp())\n\tstop_time = int(et.timestamp())\n\n\tfor coin in ticker_list:\n\t\tif type(coin) == dict:\n\t\t\tif all(item + PAIR_WITH not in coin['symbol'] for item in EX_PAIRS):\n\t\t\t\tcoins_urls[coin['symbol']] = {'symbol': coin['symbol'],\n\t\t\t\t\t\t\t\t\t\t\t 'url': f\"https://api.kucoin.com/api/v1/market/candles?symbol\"\n f\"{coin['symbol']}&type={interval}&startAt={start_time}&endAt={stop_time}\"}\n\t\telse:\n\t\t\tcoins_urls[coin] = {'symbol': coin,\n\t\t\t\t\t\t\t\t'url': f\"https://api.kucoin.com/api/v1/market/candles?symbol={coin}&type={interval}&startAt={start_time}&endAt={stop_time}\"}\n\n\treturn coins_urls\n\n\nasync def get(session: aiohttp.ClientSession, url) -> dict:\n\tdata = {}\n\tsymbol = re.findall(r'=\\w+', url)[0][1:]\n\ttry:\n\t\tresp = await session.request('GET', url=url)\n\t\tdata['symbol'] = symbol\n\t\t# data['last_price'] = await get_last_price(session=session, symbol=symbol)\n\t\tdata['data'] = await resp.json()\n\texcept Exception as e:\n\t\tprint(e)\n\treturn data\n\n\nasync def get_historical_data(ticker_list, interval):\n\turls = await create_urls(ticker_list=ticker_list, interval=interval)\n\tif WINDOWS:\n\t\tasyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())\n\tasync with aiohttp.ClientSession() as session:\n\t\ttasks = []\n\t\tfor url in urls:\n\t\t\tlink = urls[url]['url']\n\t\t\ttasks.append(get(session=session, url=link))\n\t\tresponse = await asyncio.gather(*tasks, return_exceptions=True)\n\t\treturn response\n\n\ndef get_prices_high_low(list_coins, interval):\n\tif WINDOWS:\n\t\tasyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())\n\t\n\tprices_low_high = {}\n\thist_data = asyncio.run(get_historical_data(ticker_list=list_coins, interval=interval))\n\tfor item in hist_data:\n\t\tcoin_symbol = item['symbol']\n\t\th_p = []\n\t\tl_p = []\n\t\ttry:\n\t\t\tfor i in item['data']['data']:\n\t\t\t\tclose_time = i[0]\n\t\t\t\topen_price = float(i[1])\n\t\t\t\tclose_price = float(i[2])\n\t\t\t\thigh_price = float(i[3])\n\t\t\t\tlow_price = float(i[4])\n\t\t\t\tvolume = float(i[5])\n\t\t\t\tquote_volume = i[6]\n\t\t\t\th_p.append(high_price)\n\t\t\t\tl_p.append(low_price)\n\t\texcept Exception as e:\n\t\t\t\t\tprint(f'Exception {e}')\n\t\t\t\t\tcontinue\n\n\t\tprices_low_high[coin_symbol] = {'symbol': coin_symbol, 'high_price': h_p, 'low_price': l_p, 'current_potential': 0.0}\n\n\treturn prices_low_high\n\n\ndef do_work(): \t\n\twhile True:\n \t\n\t\tinit_price = get_price(client)\n\t\tcoins = get_prices_high_low(init_price, INTERVAL)\n\t\tprint(f'{TextColors.TURQUOISE}The Snail is checking for potential profit and buy signals{TextColors.DEFAULT}')\n\t\tif os.path.exists(f'signals/snail_scan{signal_file_type}'):\n\t\t\tos.remove(f'signals/snail_scan{signal_file_type}')\n\n\t\tcurrent_potential_list = []\n\t\theld_coins_list = {}\n\n\t\tif TEST_MODE:\n\t\t\tcoin_path = 'test_coins_bought.json'\n\t\telif BVT:\n\t\t\tcoin_path = 'coins_bought.json'\n\t\telse:\n\t\t\tcoin_path = 'live_coins_bought.json'\n\t\tif os.path.isfile(coin_path) and os.stat(coin_path).st_size != 0:\n\t\t\twith open(coin_path) as file:\n\t\t\t\theld_coins_list = json.load(file)\n\n\t\tfor coin in coins:\n\t\t\tif len(coins[coin]['high_price']) == LIMIT:\n\t\t\t\thigh_price = float(max(coins[coin]['high_price']))\n\t\t\t\tlow_price = float(min(coins[coin]['low_price']))\n\t\t\t\tlast_price = float(init_price[coin + PAIR_WITH]['price'])\n\n\t\t\t\t# Calculation\n\t\t\t\trange = high_price - low_price\n\t\t\t\tpotential = (low_price / high_price) * 100\n\t\t\t\tbuy_above = low_price * 1.00\n\t\t\t\tbuy_below = high_price - (range * percent_below) # percent below affects Risk\n\t\t\t\tmax_potential = potential * 0.98\n\t\t\t\tmin_potential = potential * 0.6\n\t\t\t\tsafe_potential = potential - 12\n\t\t\t\tcurrent_range = high_price - last_price\n\t\t\t\tcurrent_potential = ((high_price / last_price) * 100) - 100\n\t\t\t\tcoins[coin]['current_potential'] = current_potential\n\t\t\t\tmovement = (low_price / range)\n#\t\t\t\tprint(f'{coin} {potential:.2f}% {movement:.2f}%')\n\n\t\t\t\tif MOVEMENT:\n\t\t\t\t\tif profit_min < current_potential < profit_max and last_price < buy_below and movement >= TAKE_PROFIT and coin not in held_coins_list:\n\t\t\t\t\t\tcurrent_potential_list.append(coins[coin])\n\t\t\t\telse:\n\t\t\t\t\tif profit_min < current_potential < profit_max and last_price < buy_below and coin not in held_coins_list:\n\t\t\t\t\t\tcurrent_potential_list.append(coins[coin])\n\n\t\tif current_potential_list:\n\t\t\t# print(current_potential_list)\n\t\t\texchange = ccxt.binance()\n\t\t\tmacd_list = []\n\n\t\t\tfor i in current_potential_list:\n\t\t\t\tcoin = i['symbol'] + PAIR_WITH\n\t\t\t\tcurrent_potential = i['current_potential']\n\t\t\t\tmacd1 = exchange.fetch_ohlcv(coin, timeframe='1m', limit=36)\n\t\t\t\tmacd5 = exchange.fetch_ohlcv(coin, timeframe='5m', limit=36)\n\t\t\t\tmacd15 = exchange.fetch_ohlcv(coin, timeframe='15m', limit=36)\n\t\t\t\ttry:\n\t\t\t\t\tmacd1day = exchange.fetch_ohlcv(coin, timeframe='1d', limit=36)\n\t\t\t\texcept Exception as e:\n\t\t\t\t\tprint(f'{coin} Exception {e}')\n\t\t\t\t\tcontinue\n\t\t\t\tmacdbtc = exchange.fetch_ohlcv('BTCUSDT', timeframe='1m', limit=36)\n\n\t\t\t\tdf1 = pd.DataFrame(macd1, columns=['time', 'open', 'high', 'low', 'close', 'volume'])\n\t\t\t\tdf5 = pd.DataFrame(macd5, columns=['time', 'open', 'high', 'low', 'close', 'volume'])\n\t\t\t\tdf15 = pd.DataFrame(macd15, columns=['time', 'open', 'high', 'low', 'close', 'volume'])\n\t\t\t\tdf1day = pd.DataFrame(macd1day, columns=['time', 'open', 'high', 'low', 'close', 'volume'])\n\t\t\t\tdfbtc = pd.DataFrame(macdbtc, columns=['time', 'open', 'high', 'low', 'close', 'volume'])\n\n\t\t\t\t# Wait for 1 sec to prevent kucoin query limit\n\t\t\t\ttime.sleep(1)\n\n\t\t\t\ttry:\n\t\t\t\t\tmacd1 = df1.ta.macd(fast=12, slow=26)\n\t\t\t\t\tmacd5 = df5.ta.macd(fast=12, slow=26)\n\t\t\t\t\tmacd15 = df15.ta.macd(fast=12, slow=26)\n\t\t\t\t\tmacd1day = df1day.ta.macd(fast=12, slow=26)\n\t\t\t\t\tmacdbtc = dfbtc.ta.macd(fast=12, slow=26)\n\n\t\t\t\t\tget_hist1 = macd1.iloc[35, 1]\n\t\t\t\t\tget_hist5 = macd5.iloc[35, 1]\n\t\t\t\t\tget_hist15 = macd15.iloc[35, 1]\t\t\t\t\n\t\t\t\t\tget_hist1day = macd1day.iloc[35, 1]\n\t\t\t\t\tget_histbtc = macdbtc.iloc[35, 1]\n\t\t\t\t\n\t\t\t\texcept Exception as e:\n\t\t\t\t\tprint(f'{coin} Exception {e}')\n\t\t\t\t\tcontinue\n\n\t\t\t\tif all_info:\n\t\t\t\t\tif get_hist1 >= 0 and get_hist5 >= 0 and get_hist15 >= 0 and get_hist1day >= 0 and get_histbtc >= 0:\n\t\t\t\t\t\tprint(f'MACD HIST {coin} {current_potential:2f}% {TextColors.SELL_PROFIT}{get_hist1} {get_hist5} {get_hist15} {get_hist1day} {get_histbtc}{TextColors.DEFAULT}')\n\t\t\t\t\telse:\n\t\t\t\t\t\tprint(f'MACD HIST {coin} {current_potential:2f}% {get_hist1} {get_hist5} {get_hist15} {get_hist1day} {get_histbtc}')\n\n\t\t\t\tif get_hist1 >= 0 and get_hist5 >= 0 and get_hist15 >= 0 and get_hist1day >= 0 and get_histbtc >= 0:\n\t\t\t\t\t# Add to coins for Snail to scan\n\t\t\t\t\tprint(f'{TextColors.TURQUOISE}{coin}{TextColors.DEFAULT} Potential profit: {TextColors.TURQUOISE}{current_potential:.0f}%{TextColors.DEFAULT}\\n')\n\t\t\t\t\tmacd_list.append(coins[coin])\n\t\t\t\t# else:\n\t\t\t\t# print(f'Do NOT buy {coin}')\n\n\t\t\tif macd_list:\n\n\t\t\t\t# print(macd_list)\n\t\t\t\tsort_list = sorted(macd_list, key=lambda x: x[f'current_potential'], reverse=True)\n\t\t\t\tfor i in sort_list:\n\t\t\t\t\tcoin = i['symbol']\n\t\t\t\t\tcurrent_potential = i['current_potential']\n\t\t\t\t\tlast_price = float(init_price[coin + PAIR_WITH]['price'])\n\t\t\t\t\t# print(f'list {coin} {last_price}')\n\t\t\t\t\thigh_price = float(max(coins[coin]['high_price']))\n\t\t\t\t\t# print(f'list {coin} {high_price}')\n\t\t\t\t\tlow_price = float(min(coins[coin]['low_price']))\n\t\t\t\t\t# print(f'list {coin} {low_price}')\n\t\t\t\t\trange = high_price - low_price\n\t\t\t\t\tpotential = (low_price / high_price) * 100\n\t\t\t\t\tbuy_above = low_price * 1.00\n\t\t\t\t\tbuy_below = high_price - (range * percent_below)\n\t\t\t\t\tcurrent_range = high_price - last_price\n\n\t\t\t\t\tif all_info:\n\t\t\t\t\t\tprint(f'\\nPrice: ${last_price:.3f}\\n'\n\t\t\t\t\t\t\tf'High: ${high_price:.3f}\\n'\n\t\t\t\t\t\t\t# f'Plan: TP {TP}% TTP {TTP}%\\n'\n\t\t\t\t\t\t\tf'Day Max Range: ${range:.3f}\\n'\n\t\t\t\t\t\t\tf'Current Range: ${current_range:.3f} \\n'\n\t\t\t\t\t\t\t# f'Daily Range: ${range:.3f}\\n'\n\t\t\t\t\t\t\t# f'Current Range ${current_range:.3f} \\n'\n\t\t\t\t\t\t\t# f'Potential profit before safety: {potential:.0f}%\\n'\n\t\t\t\t\t\t\t# f'Buy above: ${buy_above:.3f}\\n'\n\t\t\t\t\t\t\tf'Buy Below: ${buy_below:.3f}\\n'\n\t\t\t\t\t\t\tf'Potential profit: {TextColors.TURQUOISE}{current_potential:.0f}%{TextColors.DEFAULT}'\n\t\t\t\t\t\t\t# f'Max Profit {max_potential:.2f}%\\n'\n\t\t\t\t\t\t\t# f'Min Profit {min_potential:.2f}%\\n'\n\t\t\t\t\t\t\t)\n\t\t\t\t\t# print(f'Adding {TextColors.TURQUOISE}{coin}{TextColors.DEFAULT} to buy list')\n\n\t\t\t\t\t# add to signal\n\t\t\t\t\twith open(f'signals/snail_scan{signal_file_type}', 'a+') as f:\n\t\t\t\t\t\tf.write(str(coin + PAIR_WITH) + '\\n')\n\n\t\t\t# else:\n\t\t\t# print(f'{TextColors.TURQUOISE}{coin}{TextColors.DEFAULT} may not be profitable at this time')\n\t\t\tsnail_coins = len(current_potential_list)\n\t\t\tmacd_coins = len(macd_list)\n\t\t\tsnail_discord = f'Snail found {snail_coins} coins and MACD approved {macd_coins}'\n\t\t\tif DISCORD:\n\t\t\t\tmsg_discord(snail_discord)\n\t\t\tprint(f'{TextColors.TURQUOISE}Snail found {snail_coins} coins and MACD approved {macd_coins} coins. L: {LIMIT}days Min: {profit_min}% Risk: {percent_below * 100}% {TextColors.DEFAULT}')\n\t\t\ttime.sleep(180)\n",
"<docstring token>\nimport os\nimport re\nimport aiohttp\nimport asyncio\nimport time\nimport json\nfrom datetime import datetime, timedelta\nfrom kucoin.client import Client\nfrom helpers.parameters import parse_args, load_config\nimport pandas as pd\nimport pandas_ta as ta\nimport ccxt\nfrom tradingview_ta import TA_Handler, Interval, Exchange\nimport requests\nfrom helpers.handle_creds import load_correct_creds, load_discord_creds\nargs = parse_args()\nDEFAULT_CONFIG_FILE = 'config.yml'\nDEFAULT_CREDS_FILE = 'creds.yml'\nconfig_file = args.config if args.config else DEFAULT_CONFIG_FILE\ncreds_file = args.creds if args.creds else DEFAULT_CREDS_FILE\nparsed_creds = load_config(creds_file)\nparsed_config = load_config(config_file)\nPAIR_WITH = parsed_config['trading_options']['PAIR_WITH']\nEX_PAIRS = parsed_config['trading_options']['FIATS']\nTEST_MODE = parsed_config['script_options']['TEST_MODE']\nTAKE_PROFIT = parsed_config['trading_options']['TAKE_PROFIT']\nDISCORD_WEBHOOK = load_discord_creds(parsed_creds)\naccess_key, secret_key, passphrase_key = load_correct_creds(parsed_creds)\nclient = Client(access_key, secret_key, passphrase_key)\nCREATE_TICKER_LIST = True\nticker_type = 'all'\nif CREATE_TICKER_LIST:\n TICKERS_LIST = 'tickers_all_USDT.txt'\nelse:\n TICKERS_LIST = 'tickers_all_USDT.txt'\nBVT = False\nOLORIN = True\nif OLORIN:\n signal_file_type = '.buy'\nelse:\n signal_file_type = '.exs'\nWINDOWS = True\nDISCORD = True\nLIMIT = 4\nINTERVAL = '1day'\nprofit_min = 15\nprofit_max = 100\npercent_below = 0.7\nMOVEMENT = True\nall_info = False\n\n\nclass TextColors:\n BUY = '\\x1b[92m'\n WARNING = '\\x1b[93m'\n SELL_LOSS = '\\x1b[91m'\n SELL_PROFIT = '\\x1b[32m'\n DIM = '\\x1b[2m\\x1b[35m'\n DEFAULT = '\\x1b[39m'\n YELLOW = '\\x1b[33m'\n TURQUOISE = '\\x1b[36m'\n UNDERLINE = '\\x1b[4m'\n END = '\\x1b[0m'\n ITALICS = '\\x1b[3m'\n\n\ndef msg_discord(msg):\n message = msg + '\\n\\n'\n mUrl = 'https://discordapp.com/api/webhooks/' + DISCORD_WEBHOOK\n data = {'content': message}\n response = requests.post(mUrl, json=data)\n\n\ndef get_price(client_api):\n initial_price = {}\n tickers = [line.strip() for line in open(TICKERS_LIST)]\n prices = client_api.get_ticker()\n for coin in prices['ticker']:\n for item in tickers:\n if item + PAIR_WITH == coin['symbol'] and all(item + PAIR_WITH\n not in coin['symbol'] for item in EX_PAIRS):\n initial_price[coin['symbol']] = {'symbol': coin['symbol'],\n 'price': coin['last'], 'time': datetime.now(),\n 'price_list': [], 'change_price': 0.0, 'cov': 0.0}\n return initial_price\n\n\nasync def create_urls(ticker_list, interval) ->dict:\n coins_urls = {}\n if INTERVAL == '1day':\n st = datetime.now() - timedelta(days=float(LIMIT))\n et = datetime.now()\n start_time = int(st.timestamp())\n stop_time = int(et.timestamp())\n for coin in ticker_list:\n if type(coin) == dict:\n if all(item + PAIR_WITH not in coin['symbol'] for item in EX_PAIRS\n ):\n coins_urls[coin['symbol']] = {'symbol': coin['symbol'],\n 'url':\n f\"https://api.kucoin.com/api/v1/market/candles?symbol{coin['symbol']}&type={interval}&startAt={start_time}&endAt={stop_time}\"\n }\n else:\n coins_urls[coin] = {'symbol': coin, 'url':\n f'https://api.kucoin.com/api/v1/market/candles?symbol={coin}&type={interval}&startAt={start_time}&endAt={stop_time}'\n }\n return coins_urls\n\n\nasync def get(session: aiohttp.ClientSession, url) ->dict:\n data = {}\n symbol = re.findall('=\\\\w+', url)[0][1:]\n try:\n resp = await session.request('GET', url=url)\n data['symbol'] = symbol\n data['data'] = await resp.json()\n except Exception as e:\n print(e)\n return data\n\n\nasync def get_historical_data(ticker_list, interval):\n urls = await create_urls(ticker_list=ticker_list, interval=interval)\n if WINDOWS:\n asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())\n async with aiohttp.ClientSession() as session:\n tasks = []\n for url in urls:\n link = urls[url]['url']\n tasks.append(get(session=session, url=link))\n response = await asyncio.gather(*tasks, return_exceptions=True)\n return response\n\n\ndef get_prices_high_low(list_coins, interval):\n if WINDOWS:\n asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())\n prices_low_high = {}\n hist_data = asyncio.run(get_historical_data(ticker_list=list_coins,\n interval=interval))\n for item in hist_data:\n coin_symbol = item['symbol']\n h_p = []\n l_p = []\n try:\n for i in item['data']['data']:\n close_time = i[0]\n open_price = float(i[1])\n close_price = float(i[2])\n high_price = float(i[3])\n low_price = float(i[4])\n volume = float(i[5])\n quote_volume = i[6]\n h_p.append(high_price)\n l_p.append(low_price)\n except Exception as e:\n print(f'Exception {e}')\n continue\n prices_low_high[coin_symbol] = {'symbol': coin_symbol, 'high_price':\n h_p, 'low_price': l_p, 'current_potential': 0.0}\n return prices_low_high\n\n\ndef do_work():\n while True:\n init_price = get_price(client)\n coins = get_prices_high_low(init_price, INTERVAL)\n print(\n f'{TextColors.TURQUOISE}The Snail is checking for potential profit and buy signals{TextColors.DEFAULT}'\n )\n if os.path.exists(f'signals/snail_scan{signal_file_type}'):\n os.remove(f'signals/snail_scan{signal_file_type}')\n current_potential_list = []\n held_coins_list = {}\n if TEST_MODE:\n coin_path = 'test_coins_bought.json'\n elif BVT:\n coin_path = 'coins_bought.json'\n else:\n coin_path = 'live_coins_bought.json'\n if os.path.isfile(coin_path) and os.stat(coin_path).st_size != 0:\n with open(coin_path) as file:\n held_coins_list = json.load(file)\n for coin in coins:\n if len(coins[coin]['high_price']) == LIMIT:\n high_price = float(max(coins[coin]['high_price']))\n low_price = float(min(coins[coin]['low_price']))\n last_price = float(init_price[coin + PAIR_WITH]['price'])\n range = high_price - low_price\n potential = low_price / high_price * 100\n buy_above = low_price * 1.0\n buy_below = high_price - range * percent_below\n max_potential = potential * 0.98\n min_potential = potential * 0.6\n safe_potential = potential - 12\n current_range = high_price - last_price\n current_potential = high_price / last_price * 100 - 100\n coins[coin]['current_potential'] = current_potential\n movement = low_price / range\n if MOVEMENT:\n if (profit_min < current_potential < profit_max and \n last_price < buy_below and movement >= TAKE_PROFIT and\n coin not in held_coins_list):\n current_potential_list.append(coins[coin])\n elif profit_min < current_potential < profit_max and last_price < buy_below and coin not in held_coins_list:\n current_potential_list.append(coins[coin])\n if current_potential_list:\n exchange = ccxt.binance()\n macd_list = []\n for i in current_potential_list:\n coin = i['symbol'] + PAIR_WITH\n current_potential = i['current_potential']\n macd1 = exchange.fetch_ohlcv(coin, timeframe='1m', limit=36)\n macd5 = exchange.fetch_ohlcv(coin, timeframe='5m', limit=36)\n macd15 = exchange.fetch_ohlcv(coin, timeframe='15m', limit=36)\n try:\n macd1day = exchange.fetch_ohlcv(coin, timeframe='1d',\n limit=36)\n except Exception as e:\n print(f'{coin} Exception {e}')\n continue\n macdbtc = exchange.fetch_ohlcv('BTCUSDT', timeframe='1m',\n limit=36)\n df1 = pd.DataFrame(macd1, columns=['time', 'open', 'high',\n 'low', 'close', 'volume'])\n df5 = pd.DataFrame(macd5, columns=['time', 'open', 'high',\n 'low', 'close', 'volume'])\n df15 = pd.DataFrame(macd15, columns=['time', 'open', 'high',\n 'low', 'close', 'volume'])\n df1day = pd.DataFrame(macd1day, columns=['time', 'open',\n 'high', 'low', 'close', 'volume'])\n dfbtc = pd.DataFrame(macdbtc, columns=['time', 'open',\n 'high', 'low', 'close', 'volume'])\n time.sleep(1)\n try:\n macd1 = df1.ta.macd(fast=12, slow=26)\n macd5 = df5.ta.macd(fast=12, slow=26)\n macd15 = df15.ta.macd(fast=12, slow=26)\n macd1day = df1day.ta.macd(fast=12, slow=26)\n macdbtc = dfbtc.ta.macd(fast=12, slow=26)\n get_hist1 = macd1.iloc[35, 1]\n get_hist5 = macd5.iloc[35, 1]\n get_hist15 = macd15.iloc[35, 1]\n get_hist1day = macd1day.iloc[35, 1]\n get_histbtc = macdbtc.iloc[35, 1]\n except Exception as e:\n print(f'{coin} Exception {e}')\n continue\n if all_info:\n if (get_hist1 >= 0 and get_hist5 >= 0 and get_hist15 >=\n 0 and get_hist1day >= 0 and get_histbtc >= 0):\n print(\n f'MACD HIST {coin} {current_potential:2f}% {TextColors.SELL_PROFIT}{get_hist1} {get_hist5} {get_hist15} {get_hist1day} {get_histbtc}{TextColors.DEFAULT}'\n )\n else:\n print(\n f'MACD HIST {coin} {current_potential:2f}% {get_hist1} {get_hist5} {get_hist15} {get_hist1day} {get_histbtc}'\n )\n if (get_hist1 >= 0 and get_hist5 >= 0 and get_hist15 >= 0 and\n get_hist1day >= 0 and get_histbtc >= 0):\n print(\n f\"\"\"{TextColors.TURQUOISE}{coin}{TextColors.DEFAULT} Potential profit: {TextColors.TURQUOISE}{current_potential:.0f}%{TextColors.DEFAULT}\n\"\"\"\n )\n macd_list.append(coins[coin])\n if macd_list:\n sort_list = sorted(macd_list, key=lambda x: x[\n f'current_potential'], reverse=True)\n for i in sort_list:\n coin = i['symbol']\n current_potential = i['current_potential']\n last_price = float(init_price[coin + PAIR_WITH]['price'])\n high_price = float(max(coins[coin]['high_price']))\n low_price = float(min(coins[coin]['low_price']))\n range = high_price - low_price\n potential = low_price / high_price * 100\n buy_above = low_price * 1.0\n buy_below = high_price - range * percent_below\n current_range = high_price - last_price\n if all_info:\n print(\n f\"\"\"\nPrice: ${last_price:.3f}\nHigh: ${high_price:.3f}\nDay Max Range: ${range:.3f}\nCurrent Range: ${current_range:.3f} \nBuy Below: ${buy_below:.3f}\nPotential profit: {TextColors.TURQUOISE}{current_potential:.0f}%{TextColors.DEFAULT}\"\"\"\n )\n with open(f'signals/snail_scan{signal_file_type}', 'a+'\n ) as f:\n f.write(str(coin + PAIR_WITH) + '\\n')\n snail_coins = len(current_potential_list)\n macd_coins = len(macd_list)\n snail_discord = (\n f'Snail found {snail_coins} coins and MACD approved {macd_coins}'\n )\n if DISCORD:\n msg_discord(snail_discord)\n print(\n f'{TextColors.TURQUOISE}Snail found {snail_coins} coins and MACD approved {macd_coins} coins. L: {LIMIT}days Min: {profit_min}% Risk: {percent_below * 100}% {TextColors.DEFAULT}'\n )\n time.sleep(180)\n",
"<docstring token>\n<import token>\nargs = parse_args()\nDEFAULT_CONFIG_FILE = 'config.yml'\nDEFAULT_CREDS_FILE = 'creds.yml'\nconfig_file = args.config if args.config else DEFAULT_CONFIG_FILE\ncreds_file = args.creds if args.creds else DEFAULT_CREDS_FILE\nparsed_creds = load_config(creds_file)\nparsed_config = load_config(config_file)\nPAIR_WITH = parsed_config['trading_options']['PAIR_WITH']\nEX_PAIRS = parsed_config['trading_options']['FIATS']\nTEST_MODE = parsed_config['script_options']['TEST_MODE']\nTAKE_PROFIT = parsed_config['trading_options']['TAKE_PROFIT']\nDISCORD_WEBHOOK = load_discord_creds(parsed_creds)\naccess_key, secret_key, passphrase_key = load_correct_creds(parsed_creds)\nclient = Client(access_key, secret_key, passphrase_key)\nCREATE_TICKER_LIST = True\nticker_type = 'all'\nif CREATE_TICKER_LIST:\n TICKERS_LIST = 'tickers_all_USDT.txt'\nelse:\n TICKERS_LIST = 'tickers_all_USDT.txt'\nBVT = False\nOLORIN = True\nif OLORIN:\n signal_file_type = '.buy'\nelse:\n signal_file_type = '.exs'\nWINDOWS = True\nDISCORD = True\nLIMIT = 4\nINTERVAL = '1day'\nprofit_min = 15\nprofit_max = 100\npercent_below = 0.7\nMOVEMENT = True\nall_info = False\n\n\nclass TextColors:\n BUY = '\\x1b[92m'\n WARNING = '\\x1b[93m'\n SELL_LOSS = '\\x1b[91m'\n SELL_PROFIT = '\\x1b[32m'\n DIM = '\\x1b[2m\\x1b[35m'\n DEFAULT = '\\x1b[39m'\n YELLOW = '\\x1b[33m'\n TURQUOISE = '\\x1b[36m'\n UNDERLINE = '\\x1b[4m'\n END = '\\x1b[0m'\n ITALICS = '\\x1b[3m'\n\n\ndef msg_discord(msg):\n message = msg + '\\n\\n'\n mUrl = 'https://discordapp.com/api/webhooks/' + DISCORD_WEBHOOK\n data = {'content': message}\n response = requests.post(mUrl, json=data)\n\n\ndef get_price(client_api):\n initial_price = {}\n tickers = [line.strip() for line in open(TICKERS_LIST)]\n prices = client_api.get_ticker()\n for coin in prices['ticker']:\n for item in tickers:\n if item + PAIR_WITH == coin['symbol'] and all(item + PAIR_WITH\n not in coin['symbol'] for item in EX_PAIRS):\n initial_price[coin['symbol']] = {'symbol': coin['symbol'],\n 'price': coin['last'], 'time': datetime.now(),\n 'price_list': [], 'change_price': 0.0, 'cov': 0.0}\n return initial_price\n\n\nasync def create_urls(ticker_list, interval) ->dict:\n coins_urls = {}\n if INTERVAL == '1day':\n st = datetime.now() - timedelta(days=float(LIMIT))\n et = datetime.now()\n start_time = int(st.timestamp())\n stop_time = int(et.timestamp())\n for coin in ticker_list:\n if type(coin) == dict:\n if all(item + PAIR_WITH not in coin['symbol'] for item in EX_PAIRS\n ):\n coins_urls[coin['symbol']] = {'symbol': coin['symbol'],\n 'url':\n f\"https://api.kucoin.com/api/v1/market/candles?symbol{coin['symbol']}&type={interval}&startAt={start_time}&endAt={stop_time}\"\n }\n else:\n coins_urls[coin] = {'symbol': coin, 'url':\n f'https://api.kucoin.com/api/v1/market/candles?symbol={coin}&type={interval}&startAt={start_time}&endAt={stop_time}'\n }\n return coins_urls\n\n\nasync def get(session: aiohttp.ClientSession, url) ->dict:\n data = {}\n symbol = re.findall('=\\\\w+', url)[0][1:]\n try:\n resp = await session.request('GET', url=url)\n data['symbol'] = symbol\n data['data'] = await resp.json()\n except Exception as e:\n print(e)\n return data\n\n\nasync def get_historical_data(ticker_list, interval):\n urls = await create_urls(ticker_list=ticker_list, interval=interval)\n if WINDOWS:\n asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())\n async with aiohttp.ClientSession() as session:\n tasks = []\n for url in urls:\n link = urls[url]['url']\n tasks.append(get(session=session, url=link))\n response = await asyncio.gather(*tasks, return_exceptions=True)\n return response\n\n\ndef get_prices_high_low(list_coins, interval):\n if WINDOWS:\n asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())\n prices_low_high = {}\n hist_data = asyncio.run(get_historical_data(ticker_list=list_coins,\n interval=interval))\n for item in hist_data:\n coin_symbol = item['symbol']\n h_p = []\n l_p = []\n try:\n for i in item['data']['data']:\n close_time = i[0]\n open_price = float(i[1])\n close_price = float(i[2])\n high_price = float(i[3])\n low_price = float(i[4])\n volume = float(i[5])\n quote_volume = i[6]\n h_p.append(high_price)\n l_p.append(low_price)\n except Exception as e:\n print(f'Exception {e}')\n continue\n prices_low_high[coin_symbol] = {'symbol': coin_symbol, 'high_price':\n h_p, 'low_price': l_p, 'current_potential': 0.0}\n return prices_low_high\n\n\ndef do_work():\n while True:\n init_price = get_price(client)\n coins = get_prices_high_low(init_price, INTERVAL)\n print(\n f'{TextColors.TURQUOISE}The Snail is checking for potential profit and buy signals{TextColors.DEFAULT}'\n )\n if os.path.exists(f'signals/snail_scan{signal_file_type}'):\n os.remove(f'signals/snail_scan{signal_file_type}')\n current_potential_list = []\n held_coins_list = {}\n if TEST_MODE:\n coin_path = 'test_coins_bought.json'\n elif BVT:\n coin_path = 'coins_bought.json'\n else:\n coin_path = 'live_coins_bought.json'\n if os.path.isfile(coin_path) and os.stat(coin_path).st_size != 0:\n with open(coin_path) as file:\n held_coins_list = json.load(file)\n for coin in coins:\n if len(coins[coin]['high_price']) == LIMIT:\n high_price = float(max(coins[coin]['high_price']))\n low_price = float(min(coins[coin]['low_price']))\n last_price = float(init_price[coin + PAIR_WITH]['price'])\n range = high_price - low_price\n potential = low_price / high_price * 100\n buy_above = low_price * 1.0\n buy_below = high_price - range * percent_below\n max_potential = potential * 0.98\n min_potential = potential * 0.6\n safe_potential = potential - 12\n current_range = high_price - last_price\n current_potential = high_price / last_price * 100 - 100\n coins[coin]['current_potential'] = current_potential\n movement = low_price / range\n if MOVEMENT:\n if (profit_min < current_potential < profit_max and \n last_price < buy_below and movement >= TAKE_PROFIT and\n coin not in held_coins_list):\n current_potential_list.append(coins[coin])\n elif profit_min < current_potential < profit_max and last_price < buy_below and coin not in held_coins_list:\n current_potential_list.append(coins[coin])\n if current_potential_list:\n exchange = ccxt.binance()\n macd_list = []\n for i in current_potential_list:\n coin = i['symbol'] + PAIR_WITH\n current_potential = i['current_potential']\n macd1 = exchange.fetch_ohlcv(coin, timeframe='1m', limit=36)\n macd5 = exchange.fetch_ohlcv(coin, timeframe='5m', limit=36)\n macd15 = exchange.fetch_ohlcv(coin, timeframe='15m', limit=36)\n try:\n macd1day = exchange.fetch_ohlcv(coin, timeframe='1d',\n limit=36)\n except Exception as e:\n print(f'{coin} Exception {e}')\n continue\n macdbtc = exchange.fetch_ohlcv('BTCUSDT', timeframe='1m',\n limit=36)\n df1 = pd.DataFrame(macd1, columns=['time', 'open', 'high',\n 'low', 'close', 'volume'])\n df5 = pd.DataFrame(macd5, columns=['time', 'open', 'high',\n 'low', 'close', 'volume'])\n df15 = pd.DataFrame(macd15, columns=['time', 'open', 'high',\n 'low', 'close', 'volume'])\n df1day = pd.DataFrame(macd1day, columns=['time', 'open',\n 'high', 'low', 'close', 'volume'])\n dfbtc = pd.DataFrame(macdbtc, columns=['time', 'open',\n 'high', 'low', 'close', 'volume'])\n time.sleep(1)\n try:\n macd1 = df1.ta.macd(fast=12, slow=26)\n macd5 = df5.ta.macd(fast=12, slow=26)\n macd15 = df15.ta.macd(fast=12, slow=26)\n macd1day = df1day.ta.macd(fast=12, slow=26)\n macdbtc = dfbtc.ta.macd(fast=12, slow=26)\n get_hist1 = macd1.iloc[35, 1]\n get_hist5 = macd5.iloc[35, 1]\n get_hist15 = macd15.iloc[35, 1]\n get_hist1day = macd1day.iloc[35, 1]\n get_histbtc = macdbtc.iloc[35, 1]\n except Exception as e:\n print(f'{coin} Exception {e}')\n continue\n if all_info:\n if (get_hist1 >= 0 and get_hist5 >= 0 and get_hist15 >=\n 0 and get_hist1day >= 0 and get_histbtc >= 0):\n print(\n f'MACD HIST {coin} {current_potential:2f}% {TextColors.SELL_PROFIT}{get_hist1} {get_hist5} {get_hist15} {get_hist1day} {get_histbtc}{TextColors.DEFAULT}'\n )\n else:\n print(\n f'MACD HIST {coin} {current_potential:2f}% {get_hist1} {get_hist5} {get_hist15} {get_hist1day} {get_histbtc}'\n )\n if (get_hist1 >= 0 and get_hist5 >= 0 and get_hist15 >= 0 and\n get_hist1day >= 0 and get_histbtc >= 0):\n print(\n f\"\"\"{TextColors.TURQUOISE}{coin}{TextColors.DEFAULT} Potential profit: {TextColors.TURQUOISE}{current_potential:.0f}%{TextColors.DEFAULT}\n\"\"\"\n )\n macd_list.append(coins[coin])\n if macd_list:\n sort_list = sorted(macd_list, key=lambda x: x[\n f'current_potential'], reverse=True)\n for i in sort_list:\n coin = i['symbol']\n current_potential = i['current_potential']\n last_price = float(init_price[coin + PAIR_WITH]['price'])\n high_price = float(max(coins[coin]['high_price']))\n low_price = float(min(coins[coin]['low_price']))\n range = high_price - low_price\n potential = low_price / high_price * 100\n buy_above = low_price * 1.0\n buy_below = high_price - range * percent_below\n current_range = high_price - last_price\n if all_info:\n print(\n f\"\"\"\nPrice: ${last_price:.3f}\nHigh: ${high_price:.3f}\nDay Max Range: ${range:.3f}\nCurrent Range: ${current_range:.3f} \nBuy Below: ${buy_below:.3f}\nPotential profit: {TextColors.TURQUOISE}{current_potential:.0f}%{TextColors.DEFAULT}\"\"\"\n )\n with open(f'signals/snail_scan{signal_file_type}', 'a+'\n ) as f:\n f.write(str(coin + PAIR_WITH) + '\\n')\n snail_coins = len(current_potential_list)\n macd_coins = len(macd_list)\n snail_discord = (\n f'Snail found {snail_coins} coins and MACD approved {macd_coins}'\n )\n if DISCORD:\n msg_discord(snail_discord)\n print(\n f'{TextColors.TURQUOISE}Snail found {snail_coins} coins and MACD approved {macd_coins} coins. L: {LIMIT}days Min: {profit_min}% Risk: {percent_below * 100}% {TextColors.DEFAULT}'\n )\n time.sleep(180)\n",
"<docstring token>\n<import token>\n<assignment token>\nif CREATE_TICKER_LIST:\n TICKERS_LIST = 'tickers_all_USDT.txt'\nelse:\n TICKERS_LIST = 'tickers_all_USDT.txt'\n<assignment token>\nif OLORIN:\n signal_file_type = '.buy'\nelse:\n signal_file_type = '.exs'\n<assignment token>\n\n\nclass TextColors:\n BUY = '\\x1b[92m'\n WARNING = '\\x1b[93m'\n SELL_LOSS = '\\x1b[91m'\n SELL_PROFIT = '\\x1b[32m'\n DIM = '\\x1b[2m\\x1b[35m'\n DEFAULT = '\\x1b[39m'\n YELLOW = '\\x1b[33m'\n TURQUOISE = '\\x1b[36m'\n UNDERLINE = '\\x1b[4m'\n END = '\\x1b[0m'\n ITALICS = '\\x1b[3m'\n\n\ndef msg_discord(msg):\n message = msg + '\\n\\n'\n mUrl = 'https://discordapp.com/api/webhooks/' + DISCORD_WEBHOOK\n data = {'content': message}\n response = requests.post(mUrl, json=data)\n\n\ndef get_price(client_api):\n initial_price = {}\n tickers = [line.strip() for line in open(TICKERS_LIST)]\n prices = client_api.get_ticker()\n for coin in prices['ticker']:\n for item in tickers:\n if item + PAIR_WITH == coin['symbol'] and all(item + PAIR_WITH\n not in coin['symbol'] for item in EX_PAIRS):\n initial_price[coin['symbol']] = {'symbol': coin['symbol'],\n 'price': coin['last'], 'time': datetime.now(),\n 'price_list': [], 'change_price': 0.0, 'cov': 0.0}\n return initial_price\n\n\nasync def create_urls(ticker_list, interval) ->dict:\n coins_urls = {}\n if INTERVAL == '1day':\n st = datetime.now() - timedelta(days=float(LIMIT))\n et = datetime.now()\n start_time = int(st.timestamp())\n stop_time = int(et.timestamp())\n for coin in ticker_list:\n if type(coin) == dict:\n if all(item + PAIR_WITH not in coin['symbol'] for item in EX_PAIRS\n ):\n coins_urls[coin['symbol']] = {'symbol': coin['symbol'],\n 'url':\n f\"https://api.kucoin.com/api/v1/market/candles?symbol{coin['symbol']}&type={interval}&startAt={start_time}&endAt={stop_time}\"\n }\n else:\n coins_urls[coin] = {'symbol': coin, 'url':\n f'https://api.kucoin.com/api/v1/market/candles?symbol={coin}&type={interval}&startAt={start_time}&endAt={stop_time}'\n }\n return coins_urls\n\n\nasync def get(session: aiohttp.ClientSession, url) ->dict:\n data = {}\n symbol = re.findall('=\\\\w+', url)[0][1:]\n try:\n resp = await session.request('GET', url=url)\n data['symbol'] = symbol\n data['data'] = await resp.json()\n except Exception as e:\n print(e)\n return data\n\n\nasync def get_historical_data(ticker_list, interval):\n urls = await create_urls(ticker_list=ticker_list, interval=interval)\n if WINDOWS:\n asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())\n async with aiohttp.ClientSession() as session:\n tasks = []\n for url in urls:\n link = urls[url]['url']\n tasks.append(get(session=session, url=link))\n response = await asyncio.gather(*tasks, return_exceptions=True)\n return response\n\n\ndef get_prices_high_low(list_coins, interval):\n if WINDOWS:\n asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())\n prices_low_high = {}\n hist_data = asyncio.run(get_historical_data(ticker_list=list_coins,\n interval=interval))\n for item in hist_data:\n coin_symbol = item['symbol']\n h_p = []\n l_p = []\n try:\n for i in item['data']['data']:\n close_time = i[0]\n open_price = float(i[1])\n close_price = float(i[2])\n high_price = float(i[3])\n low_price = float(i[4])\n volume = float(i[5])\n quote_volume = i[6]\n h_p.append(high_price)\n l_p.append(low_price)\n except Exception as e:\n print(f'Exception {e}')\n continue\n prices_low_high[coin_symbol] = {'symbol': coin_symbol, 'high_price':\n h_p, 'low_price': l_p, 'current_potential': 0.0}\n return prices_low_high\n\n\ndef do_work():\n while True:\n init_price = get_price(client)\n coins = get_prices_high_low(init_price, INTERVAL)\n print(\n f'{TextColors.TURQUOISE}The Snail is checking for potential profit and buy signals{TextColors.DEFAULT}'\n )\n if os.path.exists(f'signals/snail_scan{signal_file_type}'):\n os.remove(f'signals/snail_scan{signal_file_type}')\n current_potential_list = []\n held_coins_list = {}\n if TEST_MODE:\n coin_path = 'test_coins_bought.json'\n elif BVT:\n coin_path = 'coins_bought.json'\n else:\n coin_path = 'live_coins_bought.json'\n if os.path.isfile(coin_path) and os.stat(coin_path).st_size != 0:\n with open(coin_path) as file:\n held_coins_list = json.load(file)\n for coin in coins:\n if len(coins[coin]['high_price']) == LIMIT:\n high_price = float(max(coins[coin]['high_price']))\n low_price = float(min(coins[coin]['low_price']))\n last_price = float(init_price[coin + PAIR_WITH]['price'])\n range = high_price - low_price\n potential = low_price / high_price * 100\n buy_above = low_price * 1.0\n buy_below = high_price - range * percent_below\n max_potential = potential * 0.98\n min_potential = potential * 0.6\n safe_potential = potential - 12\n current_range = high_price - last_price\n current_potential = high_price / last_price * 100 - 100\n coins[coin]['current_potential'] = current_potential\n movement = low_price / range\n if MOVEMENT:\n if (profit_min < current_potential < profit_max and \n last_price < buy_below and movement >= TAKE_PROFIT and\n coin not in held_coins_list):\n current_potential_list.append(coins[coin])\n elif profit_min < current_potential < profit_max and last_price < buy_below and coin not in held_coins_list:\n current_potential_list.append(coins[coin])\n if current_potential_list:\n exchange = ccxt.binance()\n macd_list = []\n for i in current_potential_list:\n coin = i['symbol'] + PAIR_WITH\n current_potential = i['current_potential']\n macd1 = exchange.fetch_ohlcv(coin, timeframe='1m', limit=36)\n macd5 = exchange.fetch_ohlcv(coin, timeframe='5m', limit=36)\n macd15 = exchange.fetch_ohlcv(coin, timeframe='15m', limit=36)\n try:\n macd1day = exchange.fetch_ohlcv(coin, timeframe='1d',\n limit=36)\n except Exception as e:\n print(f'{coin} Exception {e}')\n continue\n macdbtc = exchange.fetch_ohlcv('BTCUSDT', timeframe='1m',\n limit=36)\n df1 = pd.DataFrame(macd1, columns=['time', 'open', 'high',\n 'low', 'close', 'volume'])\n df5 = pd.DataFrame(macd5, columns=['time', 'open', 'high',\n 'low', 'close', 'volume'])\n df15 = pd.DataFrame(macd15, columns=['time', 'open', 'high',\n 'low', 'close', 'volume'])\n df1day = pd.DataFrame(macd1day, columns=['time', 'open',\n 'high', 'low', 'close', 'volume'])\n dfbtc = pd.DataFrame(macdbtc, columns=['time', 'open',\n 'high', 'low', 'close', 'volume'])\n time.sleep(1)\n try:\n macd1 = df1.ta.macd(fast=12, slow=26)\n macd5 = df5.ta.macd(fast=12, slow=26)\n macd15 = df15.ta.macd(fast=12, slow=26)\n macd1day = df1day.ta.macd(fast=12, slow=26)\n macdbtc = dfbtc.ta.macd(fast=12, slow=26)\n get_hist1 = macd1.iloc[35, 1]\n get_hist5 = macd5.iloc[35, 1]\n get_hist15 = macd15.iloc[35, 1]\n get_hist1day = macd1day.iloc[35, 1]\n get_histbtc = macdbtc.iloc[35, 1]\n except Exception as e:\n print(f'{coin} Exception {e}')\n continue\n if all_info:\n if (get_hist1 >= 0 and get_hist5 >= 0 and get_hist15 >=\n 0 and get_hist1day >= 0 and get_histbtc >= 0):\n print(\n f'MACD HIST {coin} {current_potential:2f}% {TextColors.SELL_PROFIT}{get_hist1} {get_hist5} {get_hist15} {get_hist1day} {get_histbtc}{TextColors.DEFAULT}'\n )\n else:\n print(\n f'MACD HIST {coin} {current_potential:2f}% {get_hist1} {get_hist5} {get_hist15} {get_hist1day} {get_histbtc}'\n )\n if (get_hist1 >= 0 and get_hist5 >= 0 and get_hist15 >= 0 and\n get_hist1day >= 0 and get_histbtc >= 0):\n print(\n f\"\"\"{TextColors.TURQUOISE}{coin}{TextColors.DEFAULT} Potential profit: {TextColors.TURQUOISE}{current_potential:.0f}%{TextColors.DEFAULT}\n\"\"\"\n )\n macd_list.append(coins[coin])\n if macd_list:\n sort_list = sorted(macd_list, key=lambda x: x[\n f'current_potential'], reverse=True)\n for i in sort_list:\n coin = i['symbol']\n current_potential = i['current_potential']\n last_price = float(init_price[coin + PAIR_WITH]['price'])\n high_price = float(max(coins[coin]['high_price']))\n low_price = float(min(coins[coin]['low_price']))\n range = high_price - low_price\n potential = low_price / high_price * 100\n buy_above = low_price * 1.0\n buy_below = high_price - range * percent_below\n current_range = high_price - last_price\n if all_info:\n print(\n f\"\"\"\nPrice: ${last_price:.3f}\nHigh: ${high_price:.3f}\nDay Max Range: ${range:.3f}\nCurrent Range: ${current_range:.3f} \nBuy Below: ${buy_below:.3f}\nPotential profit: {TextColors.TURQUOISE}{current_potential:.0f}%{TextColors.DEFAULT}\"\"\"\n )\n with open(f'signals/snail_scan{signal_file_type}', 'a+'\n ) as f:\n f.write(str(coin + PAIR_WITH) + '\\n')\n snail_coins = len(current_potential_list)\n macd_coins = len(macd_list)\n snail_discord = (\n f'Snail found {snail_coins} coins and MACD approved {macd_coins}'\n )\n if DISCORD:\n msg_discord(snail_discord)\n print(\n f'{TextColors.TURQUOISE}Snail found {snail_coins} coins and MACD approved {macd_coins} coins. L: {LIMIT}days Min: {profit_min}% Risk: {percent_below * 100}% {TextColors.DEFAULT}'\n )\n time.sleep(180)\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n\n\nclass TextColors:\n BUY = '\\x1b[92m'\n WARNING = '\\x1b[93m'\n SELL_LOSS = '\\x1b[91m'\n SELL_PROFIT = '\\x1b[32m'\n DIM = '\\x1b[2m\\x1b[35m'\n DEFAULT = '\\x1b[39m'\n YELLOW = '\\x1b[33m'\n TURQUOISE = '\\x1b[36m'\n UNDERLINE = '\\x1b[4m'\n END = '\\x1b[0m'\n ITALICS = '\\x1b[3m'\n\n\ndef msg_discord(msg):\n message = msg + '\\n\\n'\n mUrl = 'https://discordapp.com/api/webhooks/' + DISCORD_WEBHOOK\n data = {'content': message}\n response = requests.post(mUrl, json=data)\n\n\ndef get_price(client_api):\n initial_price = {}\n tickers = [line.strip() for line in open(TICKERS_LIST)]\n prices = client_api.get_ticker()\n for coin in prices['ticker']:\n for item in tickers:\n if item + PAIR_WITH == coin['symbol'] and all(item + PAIR_WITH\n not in coin['symbol'] for item in EX_PAIRS):\n initial_price[coin['symbol']] = {'symbol': coin['symbol'],\n 'price': coin['last'], 'time': datetime.now(),\n 'price_list': [], 'change_price': 0.0, 'cov': 0.0}\n return initial_price\n\n\n<code token>\n\n\ndef get_prices_high_low(list_coins, interval):\n if WINDOWS:\n asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())\n prices_low_high = {}\n hist_data = asyncio.run(get_historical_data(ticker_list=list_coins,\n interval=interval))\n for item in hist_data:\n coin_symbol = item['symbol']\n h_p = []\n l_p = []\n try:\n for i in item['data']['data']:\n close_time = i[0]\n open_price = float(i[1])\n close_price = float(i[2])\n high_price = float(i[3])\n low_price = float(i[4])\n volume = float(i[5])\n quote_volume = i[6]\n h_p.append(high_price)\n l_p.append(low_price)\n except Exception as e:\n print(f'Exception {e}')\n continue\n prices_low_high[coin_symbol] = {'symbol': coin_symbol, 'high_price':\n h_p, 'low_price': l_p, 'current_potential': 0.0}\n return prices_low_high\n\n\ndef do_work():\n while True:\n init_price = get_price(client)\n coins = get_prices_high_low(init_price, INTERVAL)\n print(\n f'{TextColors.TURQUOISE}The Snail is checking for potential profit and buy signals{TextColors.DEFAULT}'\n )\n if os.path.exists(f'signals/snail_scan{signal_file_type}'):\n os.remove(f'signals/snail_scan{signal_file_type}')\n current_potential_list = []\n held_coins_list = {}\n if TEST_MODE:\n coin_path = 'test_coins_bought.json'\n elif BVT:\n coin_path = 'coins_bought.json'\n else:\n coin_path = 'live_coins_bought.json'\n if os.path.isfile(coin_path) and os.stat(coin_path).st_size != 0:\n with open(coin_path) as file:\n held_coins_list = json.load(file)\n for coin in coins:\n if len(coins[coin]['high_price']) == LIMIT:\n high_price = float(max(coins[coin]['high_price']))\n low_price = float(min(coins[coin]['low_price']))\n last_price = float(init_price[coin + PAIR_WITH]['price'])\n range = high_price - low_price\n potential = low_price / high_price * 100\n buy_above = low_price * 1.0\n buy_below = high_price - range * percent_below\n max_potential = potential * 0.98\n min_potential = potential * 0.6\n safe_potential = potential - 12\n current_range = high_price - last_price\n current_potential = high_price / last_price * 100 - 100\n coins[coin]['current_potential'] = current_potential\n movement = low_price / range\n if MOVEMENT:\n if (profit_min < current_potential < profit_max and \n last_price < buy_below and movement >= TAKE_PROFIT and\n coin not in held_coins_list):\n current_potential_list.append(coins[coin])\n elif profit_min < current_potential < profit_max and last_price < buy_below and coin not in held_coins_list:\n current_potential_list.append(coins[coin])\n if current_potential_list:\n exchange = ccxt.binance()\n macd_list = []\n for i in current_potential_list:\n coin = i['symbol'] + PAIR_WITH\n current_potential = i['current_potential']\n macd1 = exchange.fetch_ohlcv(coin, timeframe='1m', limit=36)\n macd5 = exchange.fetch_ohlcv(coin, timeframe='5m', limit=36)\n macd15 = exchange.fetch_ohlcv(coin, timeframe='15m', limit=36)\n try:\n macd1day = exchange.fetch_ohlcv(coin, timeframe='1d',\n limit=36)\n except Exception as e:\n print(f'{coin} Exception {e}')\n continue\n macdbtc = exchange.fetch_ohlcv('BTCUSDT', timeframe='1m',\n limit=36)\n df1 = pd.DataFrame(macd1, columns=['time', 'open', 'high',\n 'low', 'close', 'volume'])\n df5 = pd.DataFrame(macd5, columns=['time', 'open', 'high',\n 'low', 'close', 'volume'])\n df15 = pd.DataFrame(macd15, columns=['time', 'open', 'high',\n 'low', 'close', 'volume'])\n df1day = pd.DataFrame(macd1day, columns=['time', 'open',\n 'high', 'low', 'close', 'volume'])\n dfbtc = pd.DataFrame(macdbtc, columns=['time', 'open',\n 'high', 'low', 'close', 'volume'])\n time.sleep(1)\n try:\n macd1 = df1.ta.macd(fast=12, slow=26)\n macd5 = df5.ta.macd(fast=12, slow=26)\n macd15 = df15.ta.macd(fast=12, slow=26)\n macd1day = df1day.ta.macd(fast=12, slow=26)\n macdbtc = dfbtc.ta.macd(fast=12, slow=26)\n get_hist1 = macd1.iloc[35, 1]\n get_hist5 = macd5.iloc[35, 1]\n get_hist15 = macd15.iloc[35, 1]\n get_hist1day = macd1day.iloc[35, 1]\n get_histbtc = macdbtc.iloc[35, 1]\n except Exception as e:\n print(f'{coin} Exception {e}')\n continue\n if all_info:\n if (get_hist1 >= 0 and get_hist5 >= 0 and get_hist15 >=\n 0 and get_hist1day >= 0 and get_histbtc >= 0):\n print(\n f'MACD HIST {coin} {current_potential:2f}% {TextColors.SELL_PROFIT}{get_hist1} {get_hist5} {get_hist15} {get_hist1day} {get_histbtc}{TextColors.DEFAULT}'\n )\n else:\n print(\n f'MACD HIST {coin} {current_potential:2f}% {get_hist1} {get_hist5} {get_hist15} {get_hist1day} {get_histbtc}'\n )\n if (get_hist1 >= 0 and get_hist5 >= 0 and get_hist15 >= 0 and\n get_hist1day >= 0 and get_histbtc >= 0):\n print(\n f\"\"\"{TextColors.TURQUOISE}{coin}{TextColors.DEFAULT} Potential profit: {TextColors.TURQUOISE}{current_potential:.0f}%{TextColors.DEFAULT}\n\"\"\"\n )\n macd_list.append(coins[coin])\n if macd_list:\n sort_list = sorted(macd_list, key=lambda x: x[\n f'current_potential'], reverse=True)\n for i in sort_list:\n coin = i['symbol']\n current_potential = i['current_potential']\n last_price = float(init_price[coin + PAIR_WITH]['price'])\n high_price = float(max(coins[coin]['high_price']))\n low_price = float(min(coins[coin]['low_price']))\n range = high_price - low_price\n potential = low_price / high_price * 100\n buy_above = low_price * 1.0\n buy_below = high_price - range * percent_below\n current_range = high_price - last_price\n if all_info:\n print(\n f\"\"\"\nPrice: ${last_price:.3f}\nHigh: ${high_price:.3f}\nDay Max Range: ${range:.3f}\nCurrent Range: ${current_range:.3f} \nBuy Below: ${buy_below:.3f}\nPotential profit: {TextColors.TURQUOISE}{current_potential:.0f}%{TextColors.DEFAULT}\"\"\"\n )\n with open(f'signals/snail_scan{signal_file_type}', 'a+'\n ) as f:\n f.write(str(coin + PAIR_WITH) + '\\n')\n snail_coins = len(current_potential_list)\n macd_coins = len(macd_list)\n snail_discord = (\n f'Snail found {snail_coins} coins and MACD approved {macd_coins}'\n )\n if DISCORD:\n msg_discord(snail_discord)\n print(\n f'{TextColors.TURQUOISE}Snail found {snail_coins} coins and MACD approved {macd_coins} coins. L: {LIMIT}days Min: {profit_min}% Risk: {percent_below * 100}% {TextColors.DEFAULT}'\n )\n time.sleep(180)\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n\n\nclass TextColors:\n BUY = '\\x1b[92m'\n WARNING = '\\x1b[93m'\n SELL_LOSS = '\\x1b[91m'\n SELL_PROFIT = '\\x1b[32m'\n DIM = '\\x1b[2m\\x1b[35m'\n DEFAULT = '\\x1b[39m'\n YELLOW = '\\x1b[33m'\n TURQUOISE = '\\x1b[36m'\n UNDERLINE = '\\x1b[4m'\n END = '\\x1b[0m'\n ITALICS = '\\x1b[3m'\n\n\n<function token>\n\n\ndef get_price(client_api):\n initial_price = {}\n tickers = [line.strip() for line in open(TICKERS_LIST)]\n prices = client_api.get_ticker()\n for coin in prices['ticker']:\n for item in tickers:\n if item + PAIR_WITH == coin['symbol'] and all(item + PAIR_WITH\n not in coin['symbol'] for item in EX_PAIRS):\n initial_price[coin['symbol']] = {'symbol': coin['symbol'],\n 'price': coin['last'], 'time': datetime.now(),\n 'price_list': [], 'change_price': 0.0, 'cov': 0.0}\n return initial_price\n\n\n<code token>\n\n\ndef get_prices_high_low(list_coins, interval):\n if WINDOWS:\n asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())\n prices_low_high = {}\n hist_data = asyncio.run(get_historical_data(ticker_list=list_coins,\n interval=interval))\n for item in hist_data:\n coin_symbol = item['symbol']\n h_p = []\n l_p = []\n try:\n for i in item['data']['data']:\n close_time = i[0]\n open_price = float(i[1])\n close_price = float(i[2])\n high_price = float(i[3])\n low_price = float(i[4])\n volume = float(i[5])\n quote_volume = i[6]\n h_p.append(high_price)\n l_p.append(low_price)\n except Exception as e:\n print(f'Exception {e}')\n continue\n prices_low_high[coin_symbol] = {'symbol': coin_symbol, 'high_price':\n h_p, 'low_price': l_p, 'current_potential': 0.0}\n return prices_low_high\n\n\ndef do_work():\n while True:\n init_price = get_price(client)\n coins = get_prices_high_low(init_price, INTERVAL)\n print(\n f'{TextColors.TURQUOISE}The Snail is checking for potential profit and buy signals{TextColors.DEFAULT}'\n )\n if os.path.exists(f'signals/snail_scan{signal_file_type}'):\n os.remove(f'signals/snail_scan{signal_file_type}')\n current_potential_list = []\n held_coins_list = {}\n if TEST_MODE:\n coin_path = 'test_coins_bought.json'\n elif BVT:\n coin_path = 'coins_bought.json'\n else:\n coin_path = 'live_coins_bought.json'\n if os.path.isfile(coin_path) and os.stat(coin_path).st_size != 0:\n with open(coin_path) as file:\n held_coins_list = json.load(file)\n for coin in coins:\n if len(coins[coin]['high_price']) == LIMIT:\n high_price = float(max(coins[coin]['high_price']))\n low_price = float(min(coins[coin]['low_price']))\n last_price = float(init_price[coin + PAIR_WITH]['price'])\n range = high_price - low_price\n potential = low_price / high_price * 100\n buy_above = low_price * 1.0\n buy_below = high_price - range * percent_below\n max_potential = potential * 0.98\n min_potential = potential * 0.6\n safe_potential = potential - 12\n current_range = high_price - last_price\n current_potential = high_price / last_price * 100 - 100\n coins[coin]['current_potential'] = current_potential\n movement = low_price / range\n if MOVEMENT:\n if (profit_min < current_potential < profit_max and \n last_price < buy_below and movement >= TAKE_PROFIT and\n coin not in held_coins_list):\n current_potential_list.append(coins[coin])\n elif profit_min < current_potential < profit_max and last_price < buy_below and coin not in held_coins_list:\n current_potential_list.append(coins[coin])\n if current_potential_list:\n exchange = ccxt.binance()\n macd_list = []\n for i in current_potential_list:\n coin = i['symbol'] + PAIR_WITH\n current_potential = i['current_potential']\n macd1 = exchange.fetch_ohlcv(coin, timeframe='1m', limit=36)\n macd5 = exchange.fetch_ohlcv(coin, timeframe='5m', limit=36)\n macd15 = exchange.fetch_ohlcv(coin, timeframe='15m', limit=36)\n try:\n macd1day = exchange.fetch_ohlcv(coin, timeframe='1d',\n limit=36)\n except Exception as e:\n print(f'{coin} Exception {e}')\n continue\n macdbtc = exchange.fetch_ohlcv('BTCUSDT', timeframe='1m',\n limit=36)\n df1 = pd.DataFrame(macd1, columns=['time', 'open', 'high',\n 'low', 'close', 'volume'])\n df5 = pd.DataFrame(macd5, columns=['time', 'open', 'high',\n 'low', 'close', 'volume'])\n df15 = pd.DataFrame(macd15, columns=['time', 'open', 'high',\n 'low', 'close', 'volume'])\n df1day = pd.DataFrame(macd1day, columns=['time', 'open',\n 'high', 'low', 'close', 'volume'])\n dfbtc = pd.DataFrame(macdbtc, columns=['time', 'open',\n 'high', 'low', 'close', 'volume'])\n time.sleep(1)\n try:\n macd1 = df1.ta.macd(fast=12, slow=26)\n macd5 = df5.ta.macd(fast=12, slow=26)\n macd15 = df15.ta.macd(fast=12, slow=26)\n macd1day = df1day.ta.macd(fast=12, slow=26)\n macdbtc = dfbtc.ta.macd(fast=12, slow=26)\n get_hist1 = macd1.iloc[35, 1]\n get_hist5 = macd5.iloc[35, 1]\n get_hist15 = macd15.iloc[35, 1]\n get_hist1day = macd1day.iloc[35, 1]\n get_histbtc = macdbtc.iloc[35, 1]\n except Exception as e:\n print(f'{coin} Exception {e}')\n continue\n if all_info:\n if (get_hist1 >= 0 and get_hist5 >= 0 and get_hist15 >=\n 0 and get_hist1day >= 0 and get_histbtc >= 0):\n print(\n f'MACD HIST {coin} {current_potential:2f}% {TextColors.SELL_PROFIT}{get_hist1} {get_hist5} {get_hist15} {get_hist1day} {get_histbtc}{TextColors.DEFAULT}'\n )\n else:\n print(\n f'MACD HIST {coin} {current_potential:2f}% {get_hist1} {get_hist5} {get_hist15} {get_hist1day} {get_histbtc}'\n )\n if (get_hist1 >= 0 and get_hist5 >= 0 and get_hist15 >= 0 and\n get_hist1day >= 0 and get_histbtc >= 0):\n print(\n f\"\"\"{TextColors.TURQUOISE}{coin}{TextColors.DEFAULT} Potential profit: {TextColors.TURQUOISE}{current_potential:.0f}%{TextColors.DEFAULT}\n\"\"\"\n )\n macd_list.append(coins[coin])\n if macd_list:\n sort_list = sorted(macd_list, key=lambda x: x[\n f'current_potential'], reverse=True)\n for i in sort_list:\n coin = i['symbol']\n current_potential = i['current_potential']\n last_price = float(init_price[coin + PAIR_WITH]['price'])\n high_price = float(max(coins[coin]['high_price']))\n low_price = float(min(coins[coin]['low_price']))\n range = high_price - low_price\n potential = low_price / high_price * 100\n buy_above = low_price * 1.0\n buy_below = high_price - range * percent_below\n current_range = high_price - last_price\n if all_info:\n print(\n f\"\"\"\nPrice: ${last_price:.3f}\nHigh: ${high_price:.3f}\nDay Max Range: ${range:.3f}\nCurrent Range: ${current_range:.3f} \nBuy Below: ${buy_below:.3f}\nPotential profit: {TextColors.TURQUOISE}{current_potential:.0f}%{TextColors.DEFAULT}\"\"\"\n )\n with open(f'signals/snail_scan{signal_file_type}', 'a+'\n ) as f:\n f.write(str(coin + PAIR_WITH) + '\\n')\n snail_coins = len(current_potential_list)\n macd_coins = len(macd_list)\n snail_discord = (\n f'Snail found {snail_coins} coins and MACD approved {macd_coins}'\n )\n if DISCORD:\n msg_discord(snail_discord)\n print(\n f'{TextColors.TURQUOISE}Snail found {snail_coins} coins and MACD approved {macd_coins} coins. L: {LIMIT}days Min: {profit_min}% Risk: {percent_below * 100}% {TextColors.DEFAULT}'\n )\n time.sleep(180)\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n\n\nclass TextColors:\n BUY = '\\x1b[92m'\n WARNING = '\\x1b[93m'\n SELL_LOSS = '\\x1b[91m'\n SELL_PROFIT = '\\x1b[32m'\n DIM = '\\x1b[2m\\x1b[35m'\n DEFAULT = '\\x1b[39m'\n YELLOW = '\\x1b[33m'\n TURQUOISE = '\\x1b[36m'\n UNDERLINE = '\\x1b[4m'\n END = '\\x1b[0m'\n ITALICS = '\\x1b[3m'\n\n\n<function token>\n\n\ndef get_price(client_api):\n initial_price = {}\n tickers = [line.strip() for line in open(TICKERS_LIST)]\n prices = client_api.get_ticker()\n for coin in prices['ticker']:\n for item in tickers:\n if item + PAIR_WITH == coin['symbol'] and all(item + PAIR_WITH\n not in coin['symbol'] for item in EX_PAIRS):\n initial_price[coin['symbol']] = {'symbol': coin['symbol'],\n 'price': coin['last'], 'time': datetime.now(),\n 'price_list': [], 'change_price': 0.0, 'cov': 0.0}\n return initial_price\n\n\n<code token>\n<function token>\n\n\ndef do_work():\n while True:\n init_price = get_price(client)\n coins = get_prices_high_low(init_price, INTERVAL)\n print(\n f'{TextColors.TURQUOISE}The Snail is checking for potential profit and buy signals{TextColors.DEFAULT}'\n )\n if os.path.exists(f'signals/snail_scan{signal_file_type}'):\n os.remove(f'signals/snail_scan{signal_file_type}')\n current_potential_list = []\n held_coins_list = {}\n if TEST_MODE:\n coin_path = 'test_coins_bought.json'\n elif BVT:\n coin_path = 'coins_bought.json'\n else:\n coin_path = 'live_coins_bought.json'\n if os.path.isfile(coin_path) and os.stat(coin_path).st_size != 0:\n with open(coin_path) as file:\n held_coins_list = json.load(file)\n for coin in coins:\n if len(coins[coin]['high_price']) == LIMIT:\n high_price = float(max(coins[coin]['high_price']))\n low_price = float(min(coins[coin]['low_price']))\n last_price = float(init_price[coin + PAIR_WITH]['price'])\n range = high_price - low_price\n potential = low_price / high_price * 100\n buy_above = low_price * 1.0\n buy_below = high_price - range * percent_below\n max_potential = potential * 0.98\n min_potential = potential * 0.6\n safe_potential = potential - 12\n current_range = high_price - last_price\n current_potential = high_price / last_price * 100 - 100\n coins[coin]['current_potential'] = current_potential\n movement = low_price / range\n if MOVEMENT:\n if (profit_min < current_potential < profit_max and \n last_price < buy_below and movement >= TAKE_PROFIT and\n coin not in held_coins_list):\n current_potential_list.append(coins[coin])\n elif profit_min < current_potential < profit_max and last_price < buy_below and coin not in held_coins_list:\n current_potential_list.append(coins[coin])\n if current_potential_list:\n exchange = ccxt.binance()\n macd_list = []\n for i in current_potential_list:\n coin = i['symbol'] + PAIR_WITH\n current_potential = i['current_potential']\n macd1 = exchange.fetch_ohlcv(coin, timeframe='1m', limit=36)\n macd5 = exchange.fetch_ohlcv(coin, timeframe='5m', limit=36)\n macd15 = exchange.fetch_ohlcv(coin, timeframe='15m', limit=36)\n try:\n macd1day = exchange.fetch_ohlcv(coin, timeframe='1d',\n limit=36)\n except Exception as e:\n print(f'{coin} Exception {e}')\n continue\n macdbtc = exchange.fetch_ohlcv('BTCUSDT', timeframe='1m',\n limit=36)\n df1 = pd.DataFrame(macd1, columns=['time', 'open', 'high',\n 'low', 'close', 'volume'])\n df5 = pd.DataFrame(macd5, columns=['time', 'open', 'high',\n 'low', 'close', 'volume'])\n df15 = pd.DataFrame(macd15, columns=['time', 'open', 'high',\n 'low', 'close', 'volume'])\n df1day = pd.DataFrame(macd1day, columns=['time', 'open',\n 'high', 'low', 'close', 'volume'])\n dfbtc = pd.DataFrame(macdbtc, columns=['time', 'open',\n 'high', 'low', 'close', 'volume'])\n time.sleep(1)\n try:\n macd1 = df1.ta.macd(fast=12, slow=26)\n macd5 = df5.ta.macd(fast=12, slow=26)\n macd15 = df15.ta.macd(fast=12, slow=26)\n macd1day = df1day.ta.macd(fast=12, slow=26)\n macdbtc = dfbtc.ta.macd(fast=12, slow=26)\n get_hist1 = macd1.iloc[35, 1]\n get_hist5 = macd5.iloc[35, 1]\n get_hist15 = macd15.iloc[35, 1]\n get_hist1day = macd1day.iloc[35, 1]\n get_histbtc = macdbtc.iloc[35, 1]\n except Exception as e:\n print(f'{coin} Exception {e}')\n continue\n if all_info:\n if (get_hist1 >= 0 and get_hist5 >= 0 and get_hist15 >=\n 0 and get_hist1day >= 0 and get_histbtc >= 0):\n print(\n f'MACD HIST {coin} {current_potential:2f}% {TextColors.SELL_PROFIT}{get_hist1} {get_hist5} {get_hist15} {get_hist1day} {get_histbtc}{TextColors.DEFAULT}'\n )\n else:\n print(\n f'MACD HIST {coin} {current_potential:2f}% {get_hist1} {get_hist5} {get_hist15} {get_hist1day} {get_histbtc}'\n )\n if (get_hist1 >= 0 and get_hist5 >= 0 and get_hist15 >= 0 and\n get_hist1day >= 0 and get_histbtc >= 0):\n print(\n f\"\"\"{TextColors.TURQUOISE}{coin}{TextColors.DEFAULT} Potential profit: {TextColors.TURQUOISE}{current_potential:.0f}%{TextColors.DEFAULT}\n\"\"\"\n )\n macd_list.append(coins[coin])\n if macd_list:\n sort_list = sorted(macd_list, key=lambda x: x[\n f'current_potential'], reverse=True)\n for i in sort_list:\n coin = i['symbol']\n current_potential = i['current_potential']\n last_price = float(init_price[coin + PAIR_WITH]['price'])\n high_price = float(max(coins[coin]['high_price']))\n low_price = float(min(coins[coin]['low_price']))\n range = high_price - low_price\n potential = low_price / high_price * 100\n buy_above = low_price * 1.0\n buy_below = high_price - range * percent_below\n current_range = high_price - last_price\n if all_info:\n print(\n f\"\"\"\nPrice: ${last_price:.3f}\nHigh: ${high_price:.3f}\nDay Max Range: ${range:.3f}\nCurrent Range: ${current_range:.3f} \nBuy Below: ${buy_below:.3f}\nPotential profit: {TextColors.TURQUOISE}{current_potential:.0f}%{TextColors.DEFAULT}\"\"\"\n )\n with open(f'signals/snail_scan{signal_file_type}', 'a+'\n ) as f:\n f.write(str(coin + PAIR_WITH) + '\\n')\n snail_coins = len(current_potential_list)\n macd_coins = len(macd_list)\n snail_discord = (\n f'Snail found {snail_coins} coins and MACD approved {macd_coins}'\n )\n if DISCORD:\n msg_discord(snail_discord)\n print(\n f'{TextColors.TURQUOISE}Snail found {snail_coins} coins and MACD approved {macd_coins} coins. L: {LIMIT}days Min: {profit_min}% Risk: {percent_below * 100}% {TextColors.DEFAULT}'\n )\n time.sleep(180)\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n\n\nclass TextColors:\n BUY = '\\x1b[92m'\n WARNING = '\\x1b[93m'\n SELL_LOSS = '\\x1b[91m'\n SELL_PROFIT = '\\x1b[32m'\n DIM = '\\x1b[2m\\x1b[35m'\n DEFAULT = '\\x1b[39m'\n YELLOW = '\\x1b[33m'\n TURQUOISE = '\\x1b[36m'\n UNDERLINE = '\\x1b[4m'\n END = '\\x1b[0m'\n ITALICS = '\\x1b[3m'\n\n\n<function token>\n\n\ndef get_price(client_api):\n initial_price = {}\n tickers = [line.strip() for line in open(TICKERS_LIST)]\n prices = client_api.get_ticker()\n for coin in prices['ticker']:\n for item in tickers:\n if item + PAIR_WITH == coin['symbol'] and all(item + PAIR_WITH\n not in coin['symbol'] for item in EX_PAIRS):\n initial_price[coin['symbol']] = {'symbol': coin['symbol'],\n 'price': coin['last'], 'time': datetime.now(),\n 'price_list': [], 'change_price': 0.0, 'cov': 0.0}\n return initial_price\n\n\n<code token>\n<function token>\n<function token>\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n\n\nclass TextColors:\n BUY = '\\x1b[92m'\n WARNING = '\\x1b[93m'\n SELL_LOSS = '\\x1b[91m'\n SELL_PROFIT = '\\x1b[32m'\n DIM = '\\x1b[2m\\x1b[35m'\n DEFAULT = '\\x1b[39m'\n YELLOW = '\\x1b[33m'\n TURQUOISE = '\\x1b[36m'\n UNDERLINE = '\\x1b[4m'\n END = '\\x1b[0m'\n ITALICS = '\\x1b[3m'\n\n\n<function token>\n<function token>\n<code token>\n<function token>\n<function token>\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n\n\nclass TextColors:\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n\n<function token>\n<function token>\n<code token>\n<function token>\n<function token>\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<class token>\n<function token>\n<function token>\n<code token>\n<function token>\n<function token>\n"
] | false |
845 |
ea07cb640e76ced8be92b55ee14e1d3058e073c9
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .variational_legacy import *
|
[
"#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nfrom .variational_legacy import *\n",
"from .variational_legacy import *\n",
"<import token>\n"
] | false |
846 |
351b2c2a18473e6ac541a96165c69c836ea101de
|
#
# @lc app=leetcode.cn id=2006 lang=python3
#
# [2006] 差的绝对值为 K 的数对数目
#
# @lc code=start
class Solution:
def countKDifference(self, nums: List[int], k: int) -> int:
def abs(x,y):
if(x-y>=0):
return x-y
else:
return y-x
ret = 0
for i in range(len(nums)):
for j in range(i,len(nums)):
if(abs(nums[i],nums[j])==k):
ret += 1
return ret
# @lc code=end
|
[
"#\n# @lc app=leetcode.cn id=2006 lang=python3\n#\n# [2006] 差的绝对值为 K 的数对数目\n#\n\n# @lc code=start\nclass Solution:\n def countKDifference(self, nums: List[int], k: int) -> int:\n def abs(x,y):\n if(x-y>=0):\n return x-y \n else:\n return y-x \n ret = 0\n for i in range(len(nums)):\n for j in range(i,len(nums)):\n if(abs(nums[i],nums[j])==k):\n ret += 1 \n return ret \n# @lc code=end\n\n",
"class Solution:\n\n def countKDifference(self, nums: List[int], k: int) ->int:\n\n def abs(x, y):\n if x - y >= 0:\n return x - y\n else:\n return y - x\n ret = 0\n for i in range(len(nums)):\n for j in range(i, len(nums)):\n if abs(nums[i], nums[j]) == k:\n ret += 1\n return ret\n",
"class Solution:\n <function token>\n",
"<class token>\n"
] | false |
847 |
0fbf8efd39f583581c46fcd3f84c65a7787145cd
|
import tensorflow as tf
def build_shared_network(x, add_summaries=False):
conv1 = tf.layers.conv2d(x, 16, 8, 4, activation=tf.nn.relu, name="conv1")
conv2 = tf.layers.conv2d(conv1, 32, 4, 2, activation=tf.nn.relu, name="conv2")
fc1 = tf.layers.dense(tf.layers.flatten(conv2), 256, name="fc1")
if add_summaries:
tf.contrib.layers.summarize_activation(conv1)
tf.contrib.layers.summarize_activation(conv2)
tf.contrib.layers.summarize_activation(fc1)
return fc1
class PolicyEstimator():
def __init__(self, num_ouptuts, reuse=False, trainable=True):
self.num_outputs = num_ouptuts
self.states = tf.placeholder(shape=[None, 84, 84, 4], dtype=tf.uint8, name="X")
self.targets = tf.placeholder(shape=[None], dtype=tf.float32, name="Y")
self.actions = tf.placeholder(shape=[None], dtype=tf.int32, name="actions")
x = tf.to_float(self.states) / 255.0
batch_size = tf.shape(self.states)[0]
with tf.variable_scope("shared", reuse=reuse):
fc1 = build_shared_network(x, add_summaries=(not reuse))
with tf.variable_scope("policy_net"):
self.logits = tf.layers.dense(fc1, num_ouptuts, activation=None)
self.probs = tf.nn.softmax(self.logits) + 1e-8
self.predictions = {"logits": self.logits, "probs": self.probs}
self.entropy = -tf.reduce_sum(self.probs * tf.log(self.probs), 1, name="entropy")
self.entropy_mean = tf.reduce_mean(self.entropy, name="entropy_mean")
# 배열을 리스트처럼 만듬 => 각 데이터의 시작 부분(offset) + action값(onehot 아님) = action의 위치
# 그 후 tf.gather을 이용해 원하는 action에 해당하는 확률값만 뽑아냄
gather_indices = tf.range(batch_size) * tf.shape(self.probs)[1] + self.actions
self.picked_action_probs = tf.gather(tf.reshape(self.probs, [-1]), gather_indices)
self.losses = - (tf.log(self.picked_action_probs) * self.targets + 0.01*self.entropy)
self.loss = tf.reduce_sum(self.losses, name="loss")
tf.summary.scalar(self.loss.op.name, self.loss)
tf.summary.scalar(self.entropy_mean.op.name, self.entropy_mean)
tf.summary.histogram(self.entropy.op.name, self.entropy)
if trainable:
self.optimizer = tf.train.RMSPropOptimizer(0.00025, 0.99, 0.0, 1e-6)
self.grads_and_vars = self.optimizer.compute_gradients(self.loss)
# grad가 None인 경우 학습이 망가지는 것을 막기 위해서 이렇게 만든 듯 하다.
self.grads_and_vars = [[grad, var] for grad, var in self.grads_and_vars if grad is not None]
# 여기 train_op 정작 쓰진 않음. worker에서 apply_gradient를 함. 지워도 될 듯
self.train_op = self.optimizer.apply_gradients(self.grads_and_vars, global_step=tf.train.get_global_step())
var_scope_name = tf.get_variable_scope().name
summary_ops = tf.get_collection(tf.GraphKeys.SUMMARIES)
summaries = [s for s in summary_ops if "policy_net" in s.name or "shared" in s.name]
summaries = [s for s in summary_ops if var_scope_name in s.name]
self.summaries = tf.summary.merge(summaries)
class ValueEstimator():
def __init__(self, reuse=False, trainable=True):
self.states = tf.placeholder(shape=[None, 84, 84, 4], dtype=tf.uint8, name="X")
self.targets = tf.placeholder(shape=[None], dtype=tf.float32, name="Y")
x = tf.to_float(self.states) / 255.0
with tf.variable_scope("shared", reuse=reuse):
fc1 = build_shared_network(x, add_summaries=(not reuse))
with tf.variable_scope("value_net"):
self.logits = tf.layers.dense(fc1, 1, activation=None)
# squeeze는 1인 차원(행렬)을 날림. => [1, 2, 3] squeeze => [2, 3]
self.logits = tf.squeeze(self.logits, squeeze_dims=[1], name="logits")
self.losses = tf.squared_difference(self.logits, self.targets)
self.loss = tf.reduce_sum(self.losses, name="loss")
self.predictions = { "logits": self.logits }
prefix = tf.get_variable_scope().name
tf.summary.scalar(self.loss.name, self.loss)
tf.summary.scalar("{}/max_value".format(prefix), tf.reduce_max(self.logits))
tf.summary.scalar("{}/min_value".format(prefix), tf.reduce_min(self.logits))
tf.summary.scalar("{}/mean_value".format(prefix), tf.reduce_mean(self.logits))
tf.summary.scalar("{}/reward_max".format(prefix), tf.reduce_max(self.targets))
tf.summary.scalar("{}/reward_min".format(prefix), tf.reduce_min(self.targets))
tf.summary.scalar("{}/reward_mean".format(prefix), tf.reduce_mean(self.targets))
tf.summary.histogram("{}/reward_targets".format(prefix), self.targets)
tf.summary.histogram("{}/values".format(prefix), self.logits)
if trainable:
self.optimizer = tf.train.RMSPropOptimizer(0.00025, 0.99, 0.0, 1e-6)
self.grads_and_vars = self.optimizer.compute_gradients(self.loss)
self.grads_and_vars = [[grad, var] for grad, var in self.grads_and_vars if grad is not None]
self.train_op = self.optimizer.apply_gradients(self.grads_and_vars, global_step=tf.train.get_global_step())
var_scope_name = tf.get_variable_scope().name
summary_ops = tf.get_collection(tf.GraphKeys.SUMMARIES)
summaries = [s for s in summary_ops if "policy_net" in s.name or "shared" in s.name]
summaries = [s for s in summary_ops if var_scope_name in s.name]
self.summaries = tf.summary.merge(summaries)
|
[
"import tensorflow as tf\n\ndef build_shared_network(x, add_summaries=False):\n conv1 = tf.layers.conv2d(x, 16, 8, 4, activation=tf.nn.relu, name=\"conv1\")\n conv2 = tf.layers.conv2d(conv1, 32, 4, 2, activation=tf.nn.relu, name=\"conv2\")\n\n fc1 = tf.layers.dense(tf.layers.flatten(conv2), 256, name=\"fc1\")\n\n if add_summaries:\n tf.contrib.layers.summarize_activation(conv1)\n tf.contrib.layers.summarize_activation(conv2)\n tf.contrib.layers.summarize_activation(fc1)\n\n return fc1\n\nclass PolicyEstimator():\n def __init__(self, num_ouptuts, reuse=False, trainable=True):\n self.num_outputs = num_ouptuts\n\n self.states = tf.placeholder(shape=[None, 84, 84, 4], dtype=tf.uint8, name=\"X\")\n self.targets = tf.placeholder(shape=[None], dtype=tf.float32, name=\"Y\")\n self.actions = tf.placeholder(shape=[None], dtype=tf.int32, name=\"actions\")\n\n x = tf.to_float(self.states) / 255.0\n batch_size = tf.shape(self.states)[0]\n\n with tf.variable_scope(\"shared\", reuse=reuse):\n fc1 = build_shared_network(x, add_summaries=(not reuse))\n\n with tf.variable_scope(\"policy_net\"):\n self.logits = tf.layers.dense(fc1, num_ouptuts, activation=None)\n self.probs = tf.nn.softmax(self.logits) + 1e-8\n\n self.predictions = {\"logits\": self.logits, \"probs\": self.probs}\n\n self.entropy = -tf.reduce_sum(self.probs * tf.log(self.probs), 1, name=\"entropy\")\n self.entropy_mean = tf.reduce_mean(self.entropy, name=\"entropy_mean\")\n\n # 배열을 리스트처럼 만듬 => 각 데이터의 시작 부분(offset) + action값(onehot 아님) = action의 위치\n # 그 후 tf.gather을 이용해 원하는 action에 해당하는 확률값만 뽑아냄\n gather_indices = tf.range(batch_size) * tf.shape(self.probs)[1] + self.actions\n self.picked_action_probs = tf.gather(tf.reshape(self.probs, [-1]), gather_indices)\n\n self.losses = - (tf.log(self.picked_action_probs) * self.targets + 0.01*self.entropy)\n self.loss = tf.reduce_sum(self.losses, name=\"loss\")\n\n tf.summary.scalar(self.loss.op.name, self.loss)\n tf.summary.scalar(self.entropy_mean.op.name, self.entropy_mean)\n tf.summary.histogram(self.entropy.op.name, self.entropy)\n\n if trainable:\n self.optimizer = tf.train.RMSPropOptimizer(0.00025, 0.99, 0.0, 1e-6)\n self.grads_and_vars = self.optimizer.compute_gradients(self.loss)\n # grad가 None인 경우 학습이 망가지는 것을 막기 위해서 이렇게 만든 듯 하다.\n self.grads_and_vars = [[grad, var] for grad, var in self.grads_and_vars if grad is not None]\n # 여기 train_op 정작 쓰진 않음. worker에서 apply_gradient를 함. 지워도 될 듯\n self.train_op = self.optimizer.apply_gradients(self.grads_and_vars, global_step=tf.train.get_global_step())\n\n var_scope_name = tf.get_variable_scope().name\n summary_ops = tf.get_collection(tf.GraphKeys.SUMMARIES)\n summaries = [s for s in summary_ops if \"policy_net\" in s.name or \"shared\" in s.name]\n summaries = [s for s in summary_ops if var_scope_name in s.name]\n self.summaries = tf.summary.merge(summaries)\n\nclass ValueEstimator():\n def __init__(self, reuse=False, trainable=True):\n\n self.states = tf.placeholder(shape=[None, 84, 84, 4], dtype=tf.uint8, name=\"X\")\n self.targets = tf.placeholder(shape=[None], dtype=tf.float32, name=\"Y\")\n\n x = tf.to_float(self.states) / 255.0\n\n with tf.variable_scope(\"shared\", reuse=reuse):\n fc1 = build_shared_network(x, add_summaries=(not reuse))\n\n with tf.variable_scope(\"value_net\"):\n self.logits = tf.layers.dense(fc1, 1, activation=None)\n # squeeze는 1인 차원(행렬)을 날림. => [1, 2, 3] squeeze => [2, 3]\n self.logits = tf.squeeze(self.logits, squeeze_dims=[1], name=\"logits\")\n\n self.losses = tf.squared_difference(self.logits, self.targets)\n self.loss = tf.reduce_sum(self.losses, name=\"loss\")\n\n self.predictions = { \"logits\": self.logits }\n\n prefix = tf.get_variable_scope().name\n tf.summary.scalar(self.loss.name, self.loss)\n tf.summary.scalar(\"{}/max_value\".format(prefix), tf.reduce_max(self.logits))\n tf.summary.scalar(\"{}/min_value\".format(prefix), tf.reduce_min(self.logits))\n tf.summary.scalar(\"{}/mean_value\".format(prefix), tf.reduce_mean(self.logits))\n tf.summary.scalar(\"{}/reward_max\".format(prefix), tf.reduce_max(self.targets))\n tf.summary.scalar(\"{}/reward_min\".format(prefix), tf.reduce_min(self.targets))\n tf.summary.scalar(\"{}/reward_mean\".format(prefix), tf.reduce_mean(self.targets))\n tf.summary.histogram(\"{}/reward_targets\".format(prefix), self.targets)\n tf.summary.histogram(\"{}/values\".format(prefix), self.logits)\n\n if trainable:\n self.optimizer = tf.train.RMSPropOptimizer(0.00025, 0.99, 0.0, 1e-6)\n self.grads_and_vars = self.optimizer.compute_gradients(self.loss)\n self.grads_and_vars = [[grad, var] for grad, var in self.grads_and_vars if grad is not None]\n self.train_op = self.optimizer.apply_gradients(self.grads_and_vars, global_step=tf.train.get_global_step())\n\n var_scope_name = tf.get_variable_scope().name\n summary_ops = tf.get_collection(tf.GraphKeys.SUMMARIES)\n summaries = [s for s in summary_ops if \"policy_net\" in s.name or \"shared\" in s.name]\n summaries = [s for s in summary_ops if var_scope_name in s.name]\n self.summaries = tf.summary.merge(summaries)\n",
"import tensorflow as tf\n\n\ndef build_shared_network(x, add_summaries=False):\n conv1 = tf.layers.conv2d(x, 16, 8, 4, activation=tf.nn.relu, name='conv1')\n conv2 = tf.layers.conv2d(conv1, 32, 4, 2, activation=tf.nn.relu, name=\n 'conv2')\n fc1 = tf.layers.dense(tf.layers.flatten(conv2), 256, name='fc1')\n if add_summaries:\n tf.contrib.layers.summarize_activation(conv1)\n tf.contrib.layers.summarize_activation(conv2)\n tf.contrib.layers.summarize_activation(fc1)\n return fc1\n\n\nclass PolicyEstimator:\n\n def __init__(self, num_ouptuts, reuse=False, trainable=True):\n self.num_outputs = num_ouptuts\n self.states = tf.placeholder(shape=[None, 84, 84, 4], dtype=tf.\n uint8, name='X')\n self.targets = tf.placeholder(shape=[None], dtype=tf.float32, name='Y')\n self.actions = tf.placeholder(shape=[None], dtype=tf.int32, name=\n 'actions')\n x = tf.to_float(self.states) / 255.0\n batch_size = tf.shape(self.states)[0]\n with tf.variable_scope('shared', reuse=reuse):\n fc1 = build_shared_network(x, add_summaries=not reuse)\n with tf.variable_scope('policy_net'):\n self.logits = tf.layers.dense(fc1, num_ouptuts, activation=None)\n self.probs = tf.nn.softmax(self.logits) + 1e-08\n self.predictions = {'logits': self.logits, 'probs': self.probs}\n self.entropy = -tf.reduce_sum(self.probs * tf.log(self.probs), \n 1, name='entropy')\n self.entropy_mean = tf.reduce_mean(self.entropy, name=\n 'entropy_mean')\n gather_indices = tf.range(batch_size) * tf.shape(self.probs)[1\n ] + self.actions\n self.picked_action_probs = tf.gather(tf.reshape(self.probs, [-1\n ]), gather_indices)\n self.losses = -(tf.log(self.picked_action_probs) * self.targets +\n 0.01 * self.entropy)\n self.loss = tf.reduce_sum(self.losses, name='loss')\n tf.summary.scalar(self.loss.op.name, self.loss)\n tf.summary.scalar(self.entropy_mean.op.name, self.entropy_mean)\n tf.summary.histogram(self.entropy.op.name, self.entropy)\n if trainable:\n self.optimizer = tf.train.RMSPropOptimizer(0.00025, 0.99, \n 0.0, 1e-06)\n self.grads_and_vars = self.optimizer.compute_gradients(self\n .loss)\n self.grads_and_vars = [[grad, var] for grad, var in self.\n grads_and_vars if grad is not None]\n self.train_op = self.optimizer.apply_gradients(self.\n grads_and_vars, global_step=tf.train.get_global_step())\n var_scope_name = tf.get_variable_scope().name\n summary_ops = tf.get_collection(tf.GraphKeys.SUMMARIES)\n summaries = [s for s in summary_ops if 'policy_net' in s.\n name or 'shared' in s.name]\n summaries = [s for s in summary_ops if var_scope_name in s.name\n ]\n self.summaries = tf.summary.merge(summaries)\n\n\nclass ValueEstimator:\n\n def __init__(self, reuse=False, trainable=True):\n self.states = tf.placeholder(shape=[None, 84, 84, 4], dtype=tf.\n uint8, name='X')\n self.targets = tf.placeholder(shape=[None], dtype=tf.float32, name='Y')\n x = tf.to_float(self.states) / 255.0\n with tf.variable_scope('shared', reuse=reuse):\n fc1 = build_shared_network(x, add_summaries=not reuse)\n with tf.variable_scope('value_net'):\n self.logits = tf.layers.dense(fc1, 1, activation=None)\n self.logits = tf.squeeze(self.logits, squeeze_dims=[1], name=\n 'logits')\n self.losses = tf.squared_difference(self.logits, self.targets)\n self.loss = tf.reduce_sum(self.losses, name='loss')\n self.predictions = {'logits': self.logits}\n prefix = tf.get_variable_scope().name\n tf.summary.scalar(self.loss.name, self.loss)\n tf.summary.scalar('{}/max_value'.format(prefix), tf.reduce_max(\n self.logits))\n tf.summary.scalar('{}/min_value'.format(prefix), tf.reduce_min(\n self.logits))\n tf.summary.scalar('{}/mean_value'.format(prefix), tf.\n reduce_mean(self.logits))\n tf.summary.scalar('{}/reward_max'.format(prefix), tf.reduce_max\n (self.targets))\n tf.summary.scalar('{}/reward_min'.format(prefix), tf.reduce_min\n (self.targets))\n tf.summary.scalar('{}/reward_mean'.format(prefix), tf.\n reduce_mean(self.targets))\n tf.summary.histogram('{}/reward_targets'.format(prefix), self.\n targets)\n tf.summary.histogram('{}/values'.format(prefix), self.logits)\n if trainable:\n self.optimizer = tf.train.RMSPropOptimizer(0.00025, 0.99, \n 0.0, 1e-06)\n self.grads_and_vars = self.optimizer.compute_gradients(self\n .loss)\n self.grads_and_vars = [[grad, var] for grad, var in self.\n grads_and_vars if grad is not None]\n self.train_op = self.optimizer.apply_gradients(self.\n grads_and_vars, global_step=tf.train.get_global_step())\n var_scope_name = tf.get_variable_scope().name\n summary_ops = tf.get_collection(tf.GraphKeys.SUMMARIES)\n summaries = [s for s in summary_ops if 'policy_net' in s.name or\n 'shared' in s.name]\n summaries = [s for s in summary_ops if var_scope_name in s.name]\n self.summaries = tf.summary.merge(summaries)\n",
"<import token>\n\n\ndef build_shared_network(x, add_summaries=False):\n conv1 = tf.layers.conv2d(x, 16, 8, 4, activation=tf.nn.relu, name='conv1')\n conv2 = tf.layers.conv2d(conv1, 32, 4, 2, activation=tf.nn.relu, name=\n 'conv2')\n fc1 = tf.layers.dense(tf.layers.flatten(conv2), 256, name='fc1')\n if add_summaries:\n tf.contrib.layers.summarize_activation(conv1)\n tf.contrib.layers.summarize_activation(conv2)\n tf.contrib.layers.summarize_activation(fc1)\n return fc1\n\n\nclass PolicyEstimator:\n\n def __init__(self, num_ouptuts, reuse=False, trainable=True):\n self.num_outputs = num_ouptuts\n self.states = tf.placeholder(shape=[None, 84, 84, 4], dtype=tf.\n uint8, name='X')\n self.targets = tf.placeholder(shape=[None], dtype=tf.float32, name='Y')\n self.actions = tf.placeholder(shape=[None], dtype=tf.int32, name=\n 'actions')\n x = tf.to_float(self.states) / 255.0\n batch_size = tf.shape(self.states)[0]\n with tf.variable_scope('shared', reuse=reuse):\n fc1 = build_shared_network(x, add_summaries=not reuse)\n with tf.variable_scope('policy_net'):\n self.logits = tf.layers.dense(fc1, num_ouptuts, activation=None)\n self.probs = tf.nn.softmax(self.logits) + 1e-08\n self.predictions = {'logits': self.logits, 'probs': self.probs}\n self.entropy = -tf.reduce_sum(self.probs * tf.log(self.probs), \n 1, name='entropy')\n self.entropy_mean = tf.reduce_mean(self.entropy, name=\n 'entropy_mean')\n gather_indices = tf.range(batch_size) * tf.shape(self.probs)[1\n ] + self.actions\n self.picked_action_probs = tf.gather(tf.reshape(self.probs, [-1\n ]), gather_indices)\n self.losses = -(tf.log(self.picked_action_probs) * self.targets +\n 0.01 * self.entropy)\n self.loss = tf.reduce_sum(self.losses, name='loss')\n tf.summary.scalar(self.loss.op.name, self.loss)\n tf.summary.scalar(self.entropy_mean.op.name, self.entropy_mean)\n tf.summary.histogram(self.entropy.op.name, self.entropy)\n if trainable:\n self.optimizer = tf.train.RMSPropOptimizer(0.00025, 0.99, \n 0.0, 1e-06)\n self.grads_and_vars = self.optimizer.compute_gradients(self\n .loss)\n self.grads_and_vars = [[grad, var] for grad, var in self.\n grads_and_vars if grad is not None]\n self.train_op = self.optimizer.apply_gradients(self.\n grads_and_vars, global_step=tf.train.get_global_step())\n var_scope_name = tf.get_variable_scope().name\n summary_ops = tf.get_collection(tf.GraphKeys.SUMMARIES)\n summaries = [s for s in summary_ops if 'policy_net' in s.\n name or 'shared' in s.name]\n summaries = [s for s in summary_ops if var_scope_name in s.name\n ]\n self.summaries = tf.summary.merge(summaries)\n\n\nclass ValueEstimator:\n\n def __init__(self, reuse=False, trainable=True):\n self.states = tf.placeholder(shape=[None, 84, 84, 4], dtype=tf.\n uint8, name='X')\n self.targets = tf.placeholder(shape=[None], dtype=tf.float32, name='Y')\n x = tf.to_float(self.states) / 255.0\n with tf.variable_scope('shared', reuse=reuse):\n fc1 = build_shared_network(x, add_summaries=not reuse)\n with tf.variable_scope('value_net'):\n self.logits = tf.layers.dense(fc1, 1, activation=None)\n self.logits = tf.squeeze(self.logits, squeeze_dims=[1], name=\n 'logits')\n self.losses = tf.squared_difference(self.logits, self.targets)\n self.loss = tf.reduce_sum(self.losses, name='loss')\n self.predictions = {'logits': self.logits}\n prefix = tf.get_variable_scope().name\n tf.summary.scalar(self.loss.name, self.loss)\n tf.summary.scalar('{}/max_value'.format(prefix), tf.reduce_max(\n self.logits))\n tf.summary.scalar('{}/min_value'.format(prefix), tf.reduce_min(\n self.logits))\n tf.summary.scalar('{}/mean_value'.format(prefix), tf.\n reduce_mean(self.logits))\n tf.summary.scalar('{}/reward_max'.format(prefix), tf.reduce_max\n (self.targets))\n tf.summary.scalar('{}/reward_min'.format(prefix), tf.reduce_min\n (self.targets))\n tf.summary.scalar('{}/reward_mean'.format(prefix), tf.\n reduce_mean(self.targets))\n tf.summary.histogram('{}/reward_targets'.format(prefix), self.\n targets)\n tf.summary.histogram('{}/values'.format(prefix), self.logits)\n if trainable:\n self.optimizer = tf.train.RMSPropOptimizer(0.00025, 0.99, \n 0.0, 1e-06)\n self.grads_and_vars = self.optimizer.compute_gradients(self\n .loss)\n self.grads_and_vars = [[grad, var] for grad, var in self.\n grads_and_vars if grad is not None]\n self.train_op = self.optimizer.apply_gradients(self.\n grads_and_vars, global_step=tf.train.get_global_step())\n var_scope_name = tf.get_variable_scope().name\n summary_ops = tf.get_collection(tf.GraphKeys.SUMMARIES)\n summaries = [s for s in summary_ops if 'policy_net' in s.name or\n 'shared' in s.name]\n summaries = [s for s in summary_ops if var_scope_name in s.name]\n self.summaries = tf.summary.merge(summaries)\n",
"<import token>\n<function token>\n\n\nclass PolicyEstimator:\n\n def __init__(self, num_ouptuts, reuse=False, trainable=True):\n self.num_outputs = num_ouptuts\n self.states = tf.placeholder(shape=[None, 84, 84, 4], dtype=tf.\n uint8, name='X')\n self.targets = tf.placeholder(shape=[None], dtype=tf.float32, name='Y')\n self.actions = tf.placeholder(shape=[None], dtype=tf.int32, name=\n 'actions')\n x = tf.to_float(self.states) / 255.0\n batch_size = tf.shape(self.states)[0]\n with tf.variable_scope('shared', reuse=reuse):\n fc1 = build_shared_network(x, add_summaries=not reuse)\n with tf.variable_scope('policy_net'):\n self.logits = tf.layers.dense(fc1, num_ouptuts, activation=None)\n self.probs = tf.nn.softmax(self.logits) + 1e-08\n self.predictions = {'logits': self.logits, 'probs': self.probs}\n self.entropy = -tf.reduce_sum(self.probs * tf.log(self.probs), \n 1, name='entropy')\n self.entropy_mean = tf.reduce_mean(self.entropy, name=\n 'entropy_mean')\n gather_indices = tf.range(batch_size) * tf.shape(self.probs)[1\n ] + self.actions\n self.picked_action_probs = tf.gather(tf.reshape(self.probs, [-1\n ]), gather_indices)\n self.losses = -(tf.log(self.picked_action_probs) * self.targets +\n 0.01 * self.entropy)\n self.loss = tf.reduce_sum(self.losses, name='loss')\n tf.summary.scalar(self.loss.op.name, self.loss)\n tf.summary.scalar(self.entropy_mean.op.name, self.entropy_mean)\n tf.summary.histogram(self.entropy.op.name, self.entropy)\n if trainable:\n self.optimizer = tf.train.RMSPropOptimizer(0.00025, 0.99, \n 0.0, 1e-06)\n self.grads_and_vars = self.optimizer.compute_gradients(self\n .loss)\n self.grads_and_vars = [[grad, var] for grad, var in self.\n grads_and_vars if grad is not None]\n self.train_op = self.optimizer.apply_gradients(self.\n grads_and_vars, global_step=tf.train.get_global_step())\n var_scope_name = tf.get_variable_scope().name\n summary_ops = tf.get_collection(tf.GraphKeys.SUMMARIES)\n summaries = [s for s in summary_ops if 'policy_net' in s.\n name or 'shared' in s.name]\n summaries = [s for s in summary_ops if var_scope_name in s.name\n ]\n self.summaries = tf.summary.merge(summaries)\n\n\nclass ValueEstimator:\n\n def __init__(self, reuse=False, trainable=True):\n self.states = tf.placeholder(shape=[None, 84, 84, 4], dtype=tf.\n uint8, name='X')\n self.targets = tf.placeholder(shape=[None], dtype=tf.float32, name='Y')\n x = tf.to_float(self.states) / 255.0\n with tf.variable_scope('shared', reuse=reuse):\n fc1 = build_shared_network(x, add_summaries=not reuse)\n with tf.variable_scope('value_net'):\n self.logits = tf.layers.dense(fc1, 1, activation=None)\n self.logits = tf.squeeze(self.logits, squeeze_dims=[1], name=\n 'logits')\n self.losses = tf.squared_difference(self.logits, self.targets)\n self.loss = tf.reduce_sum(self.losses, name='loss')\n self.predictions = {'logits': self.logits}\n prefix = tf.get_variable_scope().name\n tf.summary.scalar(self.loss.name, self.loss)\n tf.summary.scalar('{}/max_value'.format(prefix), tf.reduce_max(\n self.logits))\n tf.summary.scalar('{}/min_value'.format(prefix), tf.reduce_min(\n self.logits))\n tf.summary.scalar('{}/mean_value'.format(prefix), tf.\n reduce_mean(self.logits))\n tf.summary.scalar('{}/reward_max'.format(prefix), tf.reduce_max\n (self.targets))\n tf.summary.scalar('{}/reward_min'.format(prefix), tf.reduce_min\n (self.targets))\n tf.summary.scalar('{}/reward_mean'.format(prefix), tf.\n reduce_mean(self.targets))\n tf.summary.histogram('{}/reward_targets'.format(prefix), self.\n targets)\n tf.summary.histogram('{}/values'.format(prefix), self.logits)\n if trainable:\n self.optimizer = tf.train.RMSPropOptimizer(0.00025, 0.99, \n 0.0, 1e-06)\n self.grads_and_vars = self.optimizer.compute_gradients(self\n .loss)\n self.grads_and_vars = [[grad, var] for grad, var in self.\n grads_and_vars if grad is not None]\n self.train_op = self.optimizer.apply_gradients(self.\n grads_and_vars, global_step=tf.train.get_global_step())\n var_scope_name = tf.get_variable_scope().name\n summary_ops = tf.get_collection(tf.GraphKeys.SUMMARIES)\n summaries = [s for s in summary_ops if 'policy_net' in s.name or\n 'shared' in s.name]\n summaries = [s for s in summary_ops if var_scope_name in s.name]\n self.summaries = tf.summary.merge(summaries)\n",
"<import token>\n<function token>\n\n\nclass PolicyEstimator:\n <function token>\n\n\nclass ValueEstimator:\n\n def __init__(self, reuse=False, trainable=True):\n self.states = tf.placeholder(shape=[None, 84, 84, 4], dtype=tf.\n uint8, name='X')\n self.targets = tf.placeholder(shape=[None], dtype=tf.float32, name='Y')\n x = tf.to_float(self.states) / 255.0\n with tf.variable_scope('shared', reuse=reuse):\n fc1 = build_shared_network(x, add_summaries=not reuse)\n with tf.variable_scope('value_net'):\n self.logits = tf.layers.dense(fc1, 1, activation=None)\n self.logits = tf.squeeze(self.logits, squeeze_dims=[1], name=\n 'logits')\n self.losses = tf.squared_difference(self.logits, self.targets)\n self.loss = tf.reduce_sum(self.losses, name='loss')\n self.predictions = {'logits': self.logits}\n prefix = tf.get_variable_scope().name\n tf.summary.scalar(self.loss.name, self.loss)\n tf.summary.scalar('{}/max_value'.format(prefix), tf.reduce_max(\n self.logits))\n tf.summary.scalar('{}/min_value'.format(prefix), tf.reduce_min(\n self.logits))\n tf.summary.scalar('{}/mean_value'.format(prefix), tf.\n reduce_mean(self.logits))\n tf.summary.scalar('{}/reward_max'.format(prefix), tf.reduce_max\n (self.targets))\n tf.summary.scalar('{}/reward_min'.format(prefix), tf.reduce_min\n (self.targets))\n tf.summary.scalar('{}/reward_mean'.format(prefix), tf.\n reduce_mean(self.targets))\n tf.summary.histogram('{}/reward_targets'.format(prefix), self.\n targets)\n tf.summary.histogram('{}/values'.format(prefix), self.logits)\n if trainable:\n self.optimizer = tf.train.RMSPropOptimizer(0.00025, 0.99, \n 0.0, 1e-06)\n self.grads_and_vars = self.optimizer.compute_gradients(self\n .loss)\n self.grads_and_vars = [[grad, var] for grad, var in self.\n grads_and_vars if grad is not None]\n self.train_op = self.optimizer.apply_gradients(self.\n grads_and_vars, global_step=tf.train.get_global_step())\n var_scope_name = tf.get_variable_scope().name\n summary_ops = tf.get_collection(tf.GraphKeys.SUMMARIES)\n summaries = [s for s in summary_ops if 'policy_net' in s.name or\n 'shared' in s.name]\n summaries = [s for s in summary_ops if var_scope_name in s.name]\n self.summaries = tf.summary.merge(summaries)\n",
"<import token>\n<function token>\n<class token>\n\n\nclass ValueEstimator:\n\n def __init__(self, reuse=False, trainable=True):\n self.states = tf.placeholder(shape=[None, 84, 84, 4], dtype=tf.\n uint8, name='X')\n self.targets = tf.placeholder(shape=[None], dtype=tf.float32, name='Y')\n x = tf.to_float(self.states) / 255.0\n with tf.variable_scope('shared', reuse=reuse):\n fc1 = build_shared_network(x, add_summaries=not reuse)\n with tf.variable_scope('value_net'):\n self.logits = tf.layers.dense(fc1, 1, activation=None)\n self.logits = tf.squeeze(self.logits, squeeze_dims=[1], name=\n 'logits')\n self.losses = tf.squared_difference(self.logits, self.targets)\n self.loss = tf.reduce_sum(self.losses, name='loss')\n self.predictions = {'logits': self.logits}\n prefix = tf.get_variable_scope().name\n tf.summary.scalar(self.loss.name, self.loss)\n tf.summary.scalar('{}/max_value'.format(prefix), tf.reduce_max(\n self.logits))\n tf.summary.scalar('{}/min_value'.format(prefix), tf.reduce_min(\n self.logits))\n tf.summary.scalar('{}/mean_value'.format(prefix), tf.\n reduce_mean(self.logits))\n tf.summary.scalar('{}/reward_max'.format(prefix), tf.reduce_max\n (self.targets))\n tf.summary.scalar('{}/reward_min'.format(prefix), tf.reduce_min\n (self.targets))\n tf.summary.scalar('{}/reward_mean'.format(prefix), tf.\n reduce_mean(self.targets))\n tf.summary.histogram('{}/reward_targets'.format(prefix), self.\n targets)\n tf.summary.histogram('{}/values'.format(prefix), self.logits)\n if trainable:\n self.optimizer = tf.train.RMSPropOptimizer(0.00025, 0.99, \n 0.0, 1e-06)\n self.grads_and_vars = self.optimizer.compute_gradients(self\n .loss)\n self.grads_and_vars = [[grad, var] for grad, var in self.\n grads_and_vars if grad is not None]\n self.train_op = self.optimizer.apply_gradients(self.\n grads_and_vars, global_step=tf.train.get_global_step())\n var_scope_name = tf.get_variable_scope().name\n summary_ops = tf.get_collection(tf.GraphKeys.SUMMARIES)\n summaries = [s for s in summary_ops if 'policy_net' in s.name or\n 'shared' in s.name]\n summaries = [s for s in summary_ops if var_scope_name in s.name]\n self.summaries = tf.summary.merge(summaries)\n",
"<import token>\n<function token>\n<class token>\n\n\nclass ValueEstimator:\n <function token>\n",
"<import token>\n<function token>\n<class token>\n<class token>\n"
] | false |
848 |
4d63a5f09164b78faa731af6dce41969edc2c4f5
|
import datastructure
import wordUri
class Question:
def __init__(self, nlp, otter, nounArray, verbArray):
self.nlp = nlp
self.nounArray = nounArray
self.verbArray = verbArray
self.file = otter
def findFirst(self, sentence):
sentenceDoc = self.nlp(sentence)
for word in sentenceDoc:
if word.dep_ == "ROOT":
verb = self.verbArray.findWord(word.orth_)
children = []
for ch in word.children:
children.append(ch)
self.findSecond(sentenceDoc, verb, children)
break
def findSecond(self, sentenceDoc, verb, children):
for child in children:
if child.dep_ == "attr" or child.dep_ == "nsubj":
temp = self.nounArray.findWord(child.orth_)
subjectChildren = []
for ch in child.children:
subjectChildren.append(ch)
if not subjectChildren:
subjectChildren = children
subjectChildren.remove(child)
self.findThird(sentenceDoc, temp, verb, subjectChildren, False)
break
def findThird(self, sentenceDoc, subject, verb, children, flag):
for child in children:
if child.dep_ == "appos" or child.dep_ == "pobj":
temp = self.nounArray.findWord(child.orth_)
if temp is None:
w = datastructure.Word(child.orth_)
w.addType(child.pos_)
w.addUri(wordUri.findUri(w))
#w.addUri(w.word + "URI")
print(subject.uri, "- " + verb.uri + " -", w.uri)
self.writeOtter(subject.uri, verb.uri, w.uri)
else:
print(subject.uri, "- " + verb.uri + " -", temp.uri)
self.writeOtter(subject.uri, verb.uri, temp.uri)
#self.recoursiveFind(sentenceDoc, subject, verb, child)
if child.dep_ == "prep" or child.dep_ == "acomp":
if not flag:
verb = datastructure.Word(child.orth_)
verb.addType(child.pos_)
verb.addUri(wordUri.findUri(verb))
verbChildren = []
for ch in child.children:
verbChildren.append(ch)
self.findThird(sentenceDoc, subject, verb, verbChildren, True)
def writeOtter(self, first, second, third):
self.file.write("-rdf(\"" + first + "\", \"" + second + "\", \"" + third + "\").\n")
|
[
"import datastructure\nimport wordUri\n\n\nclass Question:\n def __init__(self, nlp, otter, nounArray, verbArray):\n self.nlp = nlp\n self.nounArray = nounArray\n self.verbArray = verbArray\n self.file = otter\n\n\n def findFirst(self, sentence):\n sentenceDoc = self.nlp(sentence)\n for word in sentenceDoc:\n if word.dep_ == \"ROOT\":\n verb = self.verbArray.findWord(word.orth_)\n\n children = []\n for ch in word.children:\n children.append(ch)\n self.findSecond(sentenceDoc, verb, children)\n break\n\n def findSecond(self, sentenceDoc, verb, children):\n\n for child in children:\n if child.dep_ == \"attr\" or child.dep_ == \"nsubj\":\n temp = self.nounArray.findWord(child.orth_)\n\n subjectChildren = []\n for ch in child.children:\n subjectChildren.append(ch)\n\n if not subjectChildren:\n subjectChildren = children\n subjectChildren.remove(child)\n self.findThird(sentenceDoc, temp, verb, subjectChildren, False)\n break\n\n def findThird(self, sentenceDoc, subject, verb, children, flag):\n for child in children:\n if child.dep_ == \"appos\" or child.dep_ == \"pobj\":\n temp = self.nounArray.findWord(child.orth_)\n if temp is None:\n w = datastructure.Word(child.orth_)\n w.addType(child.pos_)\n w.addUri(wordUri.findUri(w))\n #w.addUri(w.word + \"URI\")\n print(subject.uri, \"- \" + verb.uri + \" -\", w.uri)\n\n self.writeOtter(subject.uri, verb.uri, w.uri)\n\n else:\n print(subject.uri, \"- \" + verb.uri + \" -\", temp.uri)\n self.writeOtter(subject.uri, verb.uri, temp.uri)\n\n #self.recoursiveFind(sentenceDoc, subject, verb, child)\n if child.dep_ == \"prep\" or child.dep_ == \"acomp\":\n if not flag:\n verb = datastructure.Word(child.orth_)\n verb.addType(child.pos_)\n verb.addUri(wordUri.findUri(verb))\n\n verbChildren = []\n for ch in child.children:\n verbChildren.append(ch)\n\n self.findThird(sentenceDoc, subject, verb, verbChildren, True)\n\n def writeOtter(self, first, second, third):\n self.file.write(\"-rdf(\\\"\" + first + \"\\\", \\\"\" + second + \"\\\", \\\"\" + third + \"\\\").\\n\")\n",
"import datastructure\nimport wordUri\n\n\nclass Question:\n\n def __init__(self, nlp, otter, nounArray, verbArray):\n self.nlp = nlp\n self.nounArray = nounArray\n self.verbArray = verbArray\n self.file = otter\n\n def findFirst(self, sentence):\n sentenceDoc = self.nlp(sentence)\n for word in sentenceDoc:\n if word.dep_ == 'ROOT':\n verb = self.verbArray.findWord(word.orth_)\n children = []\n for ch in word.children:\n children.append(ch)\n self.findSecond(sentenceDoc, verb, children)\n break\n\n def findSecond(self, sentenceDoc, verb, children):\n for child in children:\n if child.dep_ == 'attr' or child.dep_ == 'nsubj':\n temp = self.nounArray.findWord(child.orth_)\n subjectChildren = []\n for ch in child.children:\n subjectChildren.append(ch)\n if not subjectChildren:\n subjectChildren = children\n subjectChildren.remove(child)\n self.findThird(sentenceDoc, temp, verb, subjectChildren, False)\n break\n\n def findThird(self, sentenceDoc, subject, verb, children, flag):\n for child in children:\n if child.dep_ == 'appos' or child.dep_ == 'pobj':\n temp = self.nounArray.findWord(child.orth_)\n if temp is None:\n w = datastructure.Word(child.orth_)\n w.addType(child.pos_)\n w.addUri(wordUri.findUri(w))\n print(subject.uri, '- ' + verb.uri + ' -', w.uri)\n self.writeOtter(subject.uri, verb.uri, w.uri)\n else:\n print(subject.uri, '- ' + verb.uri + ' -', temp.uri)\n self.writeOtter(subject.uri, verb.uri, temp.uri)\n if child.dep_ == 'prep' or child.dep_ == 'acomp':\n if not flag:\n verb = datastructure.Word(child.orth_)\n verb.addType(child.pos_)\n verb.addUri(wordUri.findUri(verb))\n verbChildren = []\n for ch in child.children:\n verbChildren.append(ch)\n self.findThird(sentenceDoc, subject, verb, verbChildren, True)\n\n def writeOtter(self, first, second, third):\n self.file.write('-rdf(\"' + first + '\", \"' + second + '\", \"' + third +\n '\").\\n')\n",
"<import token>\n\n\nclass Question:\n\n def __init__(self, nlp, otter, nounArray, verbArray):\n self.nlp = nlp\n self.nounArray = nounArray\n self.verbArray = verbArray\n self.file = otter\n\n def findFirst(self, sentence):\n sentenceDoc = self.nlp(sentence)\n for word in sentenceDoc:\n if word.dep_ == 'ROOT':\n verb = self.verbArray.findWord(word.orth_)\n children = []\n for ch in word.children:\n children.append(ch)\n self.findSecond(sentenceDoc, verb, children)\n break\n\n def findSecond(self, sentenceDoc, verb, children):\n for child in children:\n if child.dep_ == 'attr' or child.dep_ == 'nsubj':\n temp = self.nounArray.findWord(child.orth_)\n subjectChildren = []\n for ch in child.children:\n subjectChildren.append(ch)\n if not subjectChildren:\n subjectChildren = children\n subjectChildren.remove(child)\n self.findThird(sentenceDoc, temp, verb, subjectChildren, False)\n break\n\n def findThird(self, sentenceDoc, subject, verb, children, flag):\n for child in children:\n if child.dep_ == 'appos' or child.dep_ == 'pobj':\n temp = self.nounArray.findWord(child.orth_)\n if temp is None:\n w = datastructure.Word(child.orth_)\n w.addType(child.pos_)\n w.addUri(wordUri.findUri(w))\n print(subject.uri, '- ' + verb.uri + ' -', w.uri)\n self.writeOtter(subject.uri, verb.uri, w.uri)\n else:\n print(subject.uri, '- ' + verb.uri + ' -', temp.uri)\n self.writeOtter(subject.uri, verb.uri, temp.uri)\n if child.dep_ == 'prep' or child.dep_ == 'acomp':\n if not flag:\n verb = datastructure.Word(child.orth_)\n verb.addType(child.pos_)\n verb.addUri(wordUri.findUri(verb))\n verbChildren = []\n for ch in child.children:\n verbChildren.append(ch)\n self.findThird(sentenceDoc, subject, verb, verbChildren, True)\n\n def writeOtter(self, first, second, third):\n self.file.write('-rdf(\"' + first + '\", \"' + second + '\", \"' + third +\n '\").\\n')\n",
"<import token>\n\n\nclass Question:\n\n def __init__(self, nlp, otter, nounArray, verbArray):\n self.nlp = nlp\n self.nounArray = nounArray\n self.verbArray = verbArray\n self.file = otter\n\n def findFirst(self, sentence):\n sentenceDoc = self.nlp(sentence)\n for word in sentenceDoc:\n if word.dep_ == 'ROOT':\n verb = self.verbArray.findWord(word.orth_)\n children = []\n for ch in word.children:\n children.append(ch)\n self.findSecond(sentenceDoc, verb, children)\n break\n <function token>\n\n def findThird(self, sentenceDoc, subject, verb, children, flag):\n for child in children:\n if child.dep_ == 'appos' or child.dep_ == 'pobj':\n temp = self.nounArray.findWord(child.orth_)\n if temp is None:\n w = datastructure.Word(child.orth_)\n w.addType(child.pos_)\n w.addUri(wordUri.findUri(w))\n print(subject.uri, '- ' + verb.uri + ' -', w.uri)\n self.writeOtter(subject.uri, verb.uri, w.uri)\n else:\n print(subject.uri, '- ' + verb.uri + ' -', temp.uri)\n self.writeOtter(subject.uri, verb.uri, temp.uri)\n if child.dep_ == 'prep' or child.dep_ == 'acomp':\n if not flag:\n verb = datastructure.Word(child.orth_)\n verb.addType(child.pos_)\n verb.addUri(wordUri.findUri(verb))\n verbChildren = []\n for ch in child.children:\n verbChildren.append(ch)\n self.findThird(sentenceDoc, subject, verb, verbChildren, True)\n\n def writeOtter(self, first, second, third):\n self.file.write('-rdf(\"' + first + '\", \"' + second + '\", \"' + third +\n '\").\\n')\n",
"<import token>\n\n\nclass Question:\n <function token>\n\n def findFirst(self, sentence):\n sentenceDoc = self.nlp(sentence)\n for word in sentenceDoc:\n if word.dep_ == 'ROOT':\n verb = self.verbArray.findWord(word.orth_)\n children = []\n for ch in word.children:\n children.append(ch)\n self.findSecond(sentenceDoc, verb, children)\n break\n <function token>\n\n def findThird(self, sentenceDoc, subject, verb, children, flag):\n for child in children:\n if child.dep_ == 'appos' or child.dep_ == 'pobj':\n temp = self.nounArray.findWord(child.orth_)\n if temp is None:\n w = datastructure.Word(child.orth_)\n w.addType(child.pos_)\n w.addUri(wordUri.findUri(w))\n print(subject.uri, '- ' + verb.uri + ' -', w.uri)\n self.writeOtter(subject.uri, verb.uri, w.uri)\n else:\n print(subject.uri, '- ' + verb.uri + ' -', temp.uri)\n self.writeOtter(subject.uri, verb.uri, temp.uri)\n if child.dep_ == 'prep' or child.dep_ == 'acomp':\n if not flag:\n verb = datastructure.Word(child.orth_)\n verb.addType(child.pos_)\n verb.addUri(wordUri.findUri(verb))\n verbChildren = []\n for ch in child.children:\n verbChildren.append(ch)\n self.findThird(sentenceDoc, subject, verb, verbChildren, True)\n\n def writeOtter(self, first, second, third):\n self.file.write('-rdf(\"' + first + '\", \"' + second + '\", \"' + third +\n '\").\\n')\n",
"<import token>\n\n\nclass Question:\n <function token>\n\n def findFirst(self, sentence):\n sentenceDoc = self.nlp(sentence)\n for word in sentenceDoc:\n if word.dep_ == 'ROOT':\n verb = self.verbArray.findWord(word.orth_)\n children = []\n for ch in word.children:\n children.append(ch)\n self.findSecond(sentenceDoc, verb, children)\n break\n <function token>\n <function token>\n\n def writeOtter(self, first, second, third):\n self.file.write('-rdf(\"' + first + '\", \"' + second + '\", \"' + third +\n '\").\\n')\n",
"<import token>\n\n\nclass Question:\n <function token>\n <function token>\n <function token>\n <function token>\n\n def writeOtter(self, first, second, third):\n self.file.write('-rdf(\"' + first + '\", \"' + second + '\", \"' + third +\n '\").\\n')\n",
"<import token>\n\n\nclass Question:\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n<class token>\n"
] | false |
849 |
6e01e36170f3f08f2030dbd4dd91019936fb9f5c
|
# Copyright (c) 2020 Open Collector, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
import logging
import typing
from urllib.parse import urlparse
from sqlalchemy.orm import exc as orm_exc
from starlette.exceptions import HTTPException
from starlette.responses import JSONResponse, RedirectResponse, Response
from starlette.routing import Router
from starlette.status import HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND
from ...executor import async_
from ...middlewares import WithTemplates
from ...utils import ContextualHTTPEndpoint
from ..application import POOL_KEY
from ..models import AuxiliaryIdentityAttribute, UserPool
from ..utils import build_jwt_public_key_from_private_key
logger = logging.getLogger(__name__)
routes = Router()
class PoolHTTPEndpoint(ContextualHTTPEndpoint):
@property
def templates(self):
return lambda name, context={}, *args, **kwargs: (
typing.cast(WithTemplates, self.request).templates(
name,
{**context, "pool": self.request.scope.get(POOL_KEY)},
*args,
**kwargs,
)
)
@property
def pool(self) -> typing.Optional[UserPool]:
return typing.cast(typing.Optional[UserPool], self.request.get(POOL_KEY))
@property
def per_pool_session(self) -> typing.Dict[str, typing.Any]:
pool = self.pool
if pool is not None:
return self.request.scope["session"].setdefault(pool.key, {})
else:
return self.request.scope["session"]
async def dispatch(self):
if self.request.get(POOL_KEY) is None:
raise HTTPException(status_code=HTTP_404_NOT_FOUND)
await super().dispatch()
@property
def success_page_url(self):
return self.request.url_for("pools:signin_success", pool=self.pool.key)
def bool_val(v: typing.Optional[str]) -> bool:
return v not in ("false", "no", "0", None)
@routes.route("/signin", name="signin")
class SigninEndpoint(PoolHTTPEndpoint):
template = "pools/signin.html"
@property
def back_to(self) -> typing.Optional[str]:
return self.request.session.get("back_to")
@back_to.setter
def back_to(self, value: typing.Optional[str]):
self.request.session["back_to"] = value
def render_template(self, context: typing.Dict[str, typing.Any] = {}) -> Response:
assert self.pool is not None
if self.pool.username_attributes:
email = AuxiliaryIdentityAttribute.EMAIL in self.pool.username_attributes
phone_number = (
AuxiliaryIdentityAttribute.PHONE_NUMBER in self.pool.username_attributes
)
if email and phone_number:
label = "E-mail address or phone number"
elif email:
label = "E-mail address"
elif phone_number:
label = "Phone number"
else:
raise AssertionError()
else:
label = "User name"
context["username_label"] = label
return self.templates(self.template, context=context)
async def get(self):
assert self.pool is not None
back_to = self.request.query_params.get("back_to")
reauth = bool_val(self.request.query_params.get("reauth"))
if self.request.user.is_authenticated and not reauth:
return RedirectResponse(back_to or self.success_page_url)
parsed_back_to = urlparse(back_to)
if (
parsed_back_to.scheme and parsed_back_to.scheme != self.request.url.scheme
) or (
parsed_back_to.hostname
and parsed_back_to.hostname != self.request.url.hostname
):
raise HTTPException(status_code=HTTP_400_BAD_REQUEST)
if back_to is not None:
self.back_to = back_to
return self.render_template(context={"form": {"reauth": reauth}})
async def post(self):
assert self.pool is not None
form = await self.request.form()
try:
user = await async_(lambda: self.pool.query_user(form["username"]).one())()
self.request.app.state.kdf.verify(user.password, form["password"])
except Exception as e:
logger.debug(f"failed login attempt: {form['username']} - {e!r}")
return self.render_template(
context={
"form": form,
"alerts": ["No user registered with that user name and password."],
}
)
self.per_pool_session["user_id"] = user.id
return RedirectResponse(self.back_to or self.success_page_url, status_code=302)
@routes.route("/signin/success", name="signin_success")
class SignedinEndpoint(PoolHTTPEndpoint):
template = "pools/signin_success.html"
async def get(self):
return self.templates(self.template)
@routes.route("/signout", name="signout", methods=["post"])
class SignOutEndpoint(PoolHTTPEndpoint):
async def post(self):
form = await self.request.form()
client_id = form.get("client_id")
try:
client = await async_(
self.pool.clients.filter_by(oauth2_client_id=client_id).one
)()
except orm_exc.NoResultFound as e:
raise HTTPException(status_code=HTTP_404_NOT_FOUND) from e
back_to = form.get("back_to")
if back_to is None or back_to not in client.logout_uris:
back_to = self.request.url_for("pools:signout_success", pool=self.pool.key)
if self.request.user.is_authenticated:
del self.per_pool_session["user_id"]
return RedirectResponse(back_to, status_code=302)
@routes.route("/signout/success", name="signout_success")
class SignedOutEndpoint(PoolHTTPEndpoint):
async def get(self):
return self.templates("pools/signout_success.html")
@routes.route("/", name="index")
class IndexEndpoint(PoolHTTPEndpoint):
async def get(self):
return self.templates("pools/index.html")
@routes.route("/.well-known/jwks.json", name="signin_success")
class JWKSEndpoint(PoolHTTPEndpoint):
async def get(self):
keys = []
if isinstance(self.request.app.state.jwt_config.key, dict):
public_jwk = build_jwt_public_key_from_private_key(
self.request.app.state.jwt_config.key
)
public_jwk["use"] = "sig"
keys.append(public_jwk)
return JSONResponse(
{
"keys": keys,
}
)
|
[
"# Copyright (c) 2020 Open Collector, Inc.\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to\n# deal in the Software without restriction, including without limitation the\n# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n# sell copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\n# DEALINGS IN THE SOFTWARE.\n\nimport logging\nimport typing\nfrom urllib.parse import urlparse\n\nfrom sqlalchemy.orm import exc as orm_exc\nfrom starlette.exceptions import HTTPException\nfrom starlette.responses import JSONResponse, RedirectResponse, Response\nfrom starlette.routing import Router\nfrom starlette.status import HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND\n\nfrom ...executor import async_\nfrom ...middlewares import WithTemplates\nfrom ...utils import ContextualHTTPEndpoint\nfrom ..application import POOL_KEY\nfrom ..models import AuxiliaryIdentityAttribute, UserPool\nfrom ..utils import build_jwt_public_key_from_private_key\n\nlogger = logging.getLogger(__name__)\nroutes = Router()\n\n\nclass PoolHTTPEndpoint(ContextualHTTPEndpoint):\n @property\n def templates(self):\n return lambda name, context={}, *args, **kwargs: (\n typing.cast(WithTemplates, self.request).templates(\n name,\n {**context, \"pool\": self.request.scope.get(POOL_KEY)},\n *args,\n **kwargs,\n )\n )\n\n @property\n def pool(self) -> typing.Optional[UserPool]:\n return typing.cast(typing.Optional[UserPool], self.request.get(POOL_KEY))\n\n @property\n def per_pool_session(self) -> typing.Dict[str, typing.Any]:\n pool = self.pool\n if pool is not None:\n return self.request.scope[\"session\"].setdefault(pool.key, {})\n else:\n return self.request.scope[\"session\"]\n\n async def dispatch(self):\n if self.request.get(POOL_KEY) is None:\n raise HTTPException(status_code=HTTP_404_NOT_FOUND)\n await super().dispatch()\n\n @property\n def success_page_url(self):\n return self.request.url_for(\"pools:signin_success\", pool=self.pool.key)\n\n\ndef bool_val(v: typing.Optional[str]) -> bool:\n return v not in (\"false\", \"no\", \"0\", None)\n\n\[email protected](\"/signin\", name=\"signin\")\nclass SigninEndpoint(PoolHTTPEndpoint):\n template = \"pools/signin.html\"\n\n @property\n def back_to(self) -> typing.Optional[str]:\n return self.request.session.get(\"back_to\")\n\n @back_to.setter\n def back_to(self, value: typing.Optional[str]):\n self.request.session[\"back_to\"] = value\n\n def render_template(self, context: typing.Dict[str, typing.Any] = {}) -> Response:\n assert self.pool is not None\n if self.pool.username_attributes:\n email = AuxiliaryIdentityAttribute.EMAIL in self.pool.username_attributes\n phone_number = (\n AuxiliaryIdentityAttribute.PHONE_NUMBER in self.pool.username_attributes\n )\n if email and phone_number:\n label = \"E-mail address or phone number\"\n elif email:\n label = \"E-mail address\"\n elif phone_number:\n label = \"Phone number\"\n else:\n raise AssertionError()\n else:\n label = \"User name\"\n context[\"username_label\"] = label\n return self.templates(self.template, context=context)\n\n async def get(self):\n assert self.pool is not None\n back_to = self.request.query_params.get(\"back_to\")\n reauth = bool_val(self.request.query_params.get(\"reauth\"))\n if self.request.user.is_authenticated and not reauth:\n return RedirectResponse(back_to or self.success_page_url)\n parsed_back_to = urlparse(back_to)\n if (\n parsed_back_to.scheme and parsed_back_to.scheme != self.request.url.scheme\n ) or (\n parsed_back_to.hostname\n and parsed_back_to.hostname != self.request.url.hostname\n ):\n raise HTTPException(status_code=HTTP_400_BAD_REQUEST)\n if back_to is not None:\n self.back_to = back_to\n return self.render_template(context={\"form\": {\"reauth\": reauth}})\n\n async def post(self):\n assert self.pool is not None\n form = await self.request.form()\n try:\n user = await async_(lambda: self.pool.query_user(form[\"username\"]).one())()\n self.request.app.state.kdf.verify(user.password, form[\"password\"])\n except Exception as e:\n logger.debug(f\"failed login attempt: {form['username']} - {e!r}\")\n return self.render_template(\n context={\n \"form\": form,\n \"alerts\": [\"No user registered with that user name and password.\"],\n }\n )\n self.per_pool_session[\"user_id\"] = user.id\n return RedirectResponse(self.back_to or self.success_page_url, status_code=302)\n\n\[email protected](\"/signin/success\", name=\"signin_success\")\nclass SignedinEndpoint(PoolHTTPEndpoint):\n template = \"pools/signin_success.html\"\n\n async def get(self):\n return self.templates(self.template)\n\n\[email protected](\"/signout\", name=\"signout\", methods=[\"post\"])\nclass SignOutEndpoint(PoolHTTPEndpoint):\n async def post(self):\n form = await self.request.form()\n client_id = form.get(\"client_id\")\n try:\n client = await async_(\n self.pool.clients.filter_by(oauth2_client_id=client_id).one\n )()\n except orm_exc.NoResultFound as e:\n raise HTTPException(status_code=HTTP_404_NOT_FOUND) from e\n back_to = form.get(\"back_to\")\n if back_to is None or back_to not in client.logout_uris:\n back_to = self.request.url_for(\"pools:signout_success\", pool=self.pool.key)\n if self.request.user.is_authenticated:\n del self.per_pool_session[\"user_id\"]\n return RedirectResponse(back_to, status_code=302)\n\n\[email protected](\"/signout/success\", name=\"signout_success\")\nclass SignedOutEndpoint(PoolHTTPEndpoint):\n async def get(self):\n return self.templates(\"pools/signout_success.html\")\n\n\[email protected](\"/\", name=\"index\")\nclass IndexEndpoint(PoolHTTPEndpoint):\n async def get(self):\n return self.templates(\"pools/index.html\")\n\n\[email protected](\"/.well-known/jwks.json\", name=\"signin_success\")\nclass JWKSEndpoint(PoolHTTPEndpoint):\n async def get(self):\n keys = []\n if isinstance(self.request.app.state.jwt_config.key, dict):\n public_jwk = build_jwt_public_key_from_private_key(\n self.request.app.state.jwt_config.key\n )\n public_jwk[\"use\"] = \"sig\"\n keys.append(public_jwk)\n return JSONResponse(\n {\n \"keys\": keys,\n }\n )\n",
"import logging\nimport typing\nfrom urllib.parse import urlparse\nfrom sqlalchemy.orm import exc as orm_exc\nfrom starlette.exceptions import HTTPException\nfrom starlette.responses import JSONResponse, RedirectResponse, Response\nfrom starlette.routing import Router\nfrom starlette.status import HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND\nfrom ...executor import async_\nfrom ...middlewares import WithTemplates\nfrom ...utils import ContextualHTTPEndpoint\nfrom ..application import POOL_KEY\nfrom ..models import AuxiliaryIdentityAttribute, UserPool\nfrom ..utils import build_jwt_public_key_from_private_key\nlogger = logging.getLogger(__name__)\nroutes = Router()\n\n\nclass PoolHTTPEndpoint(ContextualHTTPEndpoint):\n\n @property\n def templates(self):\n return lambda name, context={}, *args, **kwargs: typing.cast(\n WithTemplates, self.request).templates(name, {**context, 'pool':\n self.request.scope.get(POOL_KEY)}, *args, **kwargs)\n\n @property\n def pool(self) ->typing.Optional[UserPool]:\n return typing.cast(typing.Optional[UserPool], self.request.get(\n POOL_KEY))\n\n @property\n def per_pool_session(self) ->typing.Dict[str, typing.Any]:\n pool = self.pool\n if pool is not None:\n return self.request.scope['session'].setdefault(pool.key, {})\n else:\n return self.request.scope['session']\n\n async def dispatch(self):\n if self.request.get(POOL_KEY) is None:\n raise HTTPException(status_code=HTTP_404_NOT_FOUND)\n await super().dispatch()\n\n @property\n def success_page_url(self):\n return self.request.url_for('pools:signin_success', pool=self.pool.key)\n\n\ndef bool_val(v: typing.Optional[str]) ->bool:\n return v not in ('false', 'no', '0', None)\n\n\[email protected]('/signin', name='signin')\nclass SigninEndpoint(PoolHTTPEndpoint):\n template = 'pools/signin.html'\n\n @property\n def back_to(self) ->typing.Optional[str]:\n return self.request.session.get('back_to')\n\n @back_to.setter\n def back_to(self, value: typing.Optional[str]):\n self.request.session['back_to'] = value\n\n def render_template(self, context: typing.Dict[str, typing.Any]={}\n ) ->Response:\n assert self.pool is not None\n if self.pool.username_attributes:\n email = (AuxiliaryIdentityAttribute.EMAIL in self.pool.\n username_attributes)\n phone_number = (AuxiliaryIdentityAttribute.PHONE_NUMBER in self\n .pool.username_attributes)\n if email and phone_number:\n label = 'E-mail address or phone number'\n elif email:\n label = 'E-mail address'\n elif phone_number:\n label = 'Phone number'\n else:\n raise AssertionError()\n else:\n label = 'User name'\n context['username_label'] = label\n return self.templates(self.template, context=context)\n\n async def get(self):\n assert self.pool is not None\n back_to = self.request.query_params.get('back_to')\n reauth = bool_val(self.request.query_params.get('reauth'))\n if self.request.user.is_authenticated and not reauth:\n return RedirectResponse(back_to or self.success_page_url)\n parsed_back_to = urlparse(back_to)\n if (parsed_back_to.scheme and parsed_back_to.scheme != self.request\n .url.scheme or parsed_back_to.hostname and parsed_back_to.\n hostname != self.request.url.hostname):\n raise HTTPException(status_code=HTTP_400_BAD_REQUEST)\n if back_to is not None:\n self.back_to = back_to\n return self.render_template(context={'form': {'reauth': reauth}})\n\n async def post(self):\n assert self.pool is not None\n form = await self.request.form()\n try:\n user = await async_(lambda : self.pool.query_user(form[\n 'username']).one())()\n self.request.app.state.kdf.verify(user.password, form['password'])\n except Exception as e:\n logger.debug(f\"failed login attempt: {form['username']} - {e!r}\")\n return self.render_template(context={'form': form, 'alerts': [\n 'No user registered with that user name and password.']})\n self.per_pool_session['user_id'] = user.id\n return RedirectResponse(self.back_to or self.success_page_url,\n status_code=302)\n\n\[email protected]('/signin/success', name='signin_success')\nclass SignedinEndpoint(PoolHTTPEndpoint):\n template = 'pools/signin_success.html'\n\n async def get(self):\n return self.templates(self.template)\n\n\[email protected]('/signout', name='signout', methods=['post'])\nclass SignOutEndpoint(PoolHTTPEndpoint):\n\n async def post(self):\n form = await self.request.form()\n client_id = form.get('client_id')\n try:\n client = await async_(self.pool.clients.filter_by(\n oauth2_client_id=client_id).one)()\n except orm_exc.NoResultFound as e:\n raise HTTPException(status_code=HTTP_404_NOT_FOUND) from e\n back_to = form.get('back_to')\n if back_to is None or back_to not in client.logout_uris:\n back_to = self.request.url_for('pools:signout_success', pool=\n self.pool.key)\n if self.request.user.is_authenticated:\n del self.per_pool_session['user_id']\n return RedirectResponse(back_to, status_code=302)\n\n\[email protected]('/signout/success', name='signout_success')\nclass SignedOutEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/signout_success.html')\n\n\[email protected]('/', name='index')\nclass IndexEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/index.html')\n\n\[email protected]('/.well-known/jwks.json', name='signin_success')\nclass JWKSEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n keys = []\n if isinstance(self.request.app.state.jwt_config.key, dict):\n public_jwk = build_jwt_public_key_from_private_key(self.request\n .app.state.jwt_config.key)\n public_jwk['use'] = 'sig'\n keys.append(public_jwk)\n return JSONResponse({'keys': keys})\n",
"<import token>\nlogger = logging.getLogger(__name__)\nroutes = Router()\n\n\nclass PoolHTTPEndpoint(ContextualHTTPEndpoint):\n\n @property\n def templates(self):\n return lambda name, context={}, *args, **kwargs: typing.cast(\n WithTemplates, self.request).templates(name, {**context, 'pool':\n self.request.scope.get(POOL_KEY)}, *args, **kwargs)\n\n @property\n def pool(self) ->typing.Optional[UserPool]:\n return typing.cast(typing.Optional[UserPool], self.request.get(\n POOL_KEY))\n\n @property\n def per_pool_session(self) ->typing.Dict[str, typing.Any]:\n pool = self.pool\n if pool is not None:\n return self.request.scope['session'].setdefault(pool.key, {})\n else:\n return self.request.scope['session']\n\n async def dispatch(self):\n if self.request.get(POOL_KEY) is None:\n raise HTTPException(status_code=HTTP_404_NOT_FOUND)\n await super().dispatch()\n\n @property\n def success_page_url(self):\n return self.request.url_for('pools:signin_success', pool=self.pool.key)\n\n\ndef bool_val(v: typing.Optional[str]) ->bool:\n return v not in ('false', 'no', '0', None)\n\n\[email protected]('/signin', name='signin')\nclass SigninEndpoint(PoolHTTPEndpoint):\n template = 'pools/signin.html'\n\n @property\n def back_to(self) ->typing.Optional[str]:\n return self.request.session.get('back_to')\n\n @back_to.setter\n def back_to(self, value: typing.Optional[str]):\n self.request.session['back_to'] = value\n\n def render_template(self, context: typing.Dict[str, typing.Any]={}\n ) ->Response:\n assert self.pool is not None\n if self.pool.username_attributes:\n email = (AuxiliaryIdentityAttribute.EMAIL in self.pool.\n username_attributes)\n phone_number = (AuxiliaryIdentityAttribute.PHONE_NUMBER in self\n .pool.username_attributes)\n if email and phone_number:\n label = 'E-mail address or phone number'\n elif email:\n label = 'E-mail address'\n elif phone_number:\n label = 'Phone number'\n else:\n raise AssertionError()\n else:\n label = 'User name'\n context['username_label'] = label\n return self.templates(self.template, context=context)\n\n async def get(self):\n assert self.pool is not None\n back_to = self.request.query_params.get('back_to')\n reauth = bool_val(self.request.query_params.get('reauth'))\n if self.request.user.is_authenticated and not reauth:\n return RedirectResponse(back_to or self.success_page_url)\n parsed_back_to = urlparse(back_to)\n if (parsed_back_to.scheme and parsed_back_to.scheme != self.request\n .url.scheme or parsed_back_to.hostname and parsed_back_to.\n hostname != self.request.url.hostname):\n raise HTTPException(status_code=HTTP_400_BAD_REQUEST)\n if back_to is not None:\n self.back_to = back_to\n return self.render_template(context={'form': {'reauth': reauth}})\n\n async def post(self):\n assert self.pool is not None\n form = await self.request.form()\n try:\n user = await async_(lambda : self.pool.query_user(form[\n 'username']).one())()\n self.request.app.state.kdf.verify(user.password, form['password'])\n except Exception as e:\n logger.debug(f\"failed login attempt: {form['username']} - {e!r}\")\n return self.render_template(context={'form': form, 'alerts': [\n 'No user registered with that user name and password.']})\n self.per_pool_session['user_id'] = user.id\n return RedirectResponse(self.back_to or self.success_page_url,\n status_code=302)\n\n\[email protected]('/signin/success', name='signin_success')\nclass SignedinEndpoint(PoolHTTPEndpoint):\n template = 'pools/signin_success.html'\n\n async def get(self):\n return self.templates(self.template)\n\n\[email protected]('/signout', name='signout', methods=['post'])\nclass SignOutEndpoint(PoolHTTPEndpoint):\n\n async def post(self):\n form = await self.request.form()\n client_id = form.get('client_id')\n try:\n client = await async_(self.pool.clients.filter_by(\n oauth2_client_id=client_id).one)()\n except orm_exc.NoResultFound as e:\n raise HTTPException(status_code=HTTP_404_NOT_FOUND) from e\n back_to = form.get('back_to')\n if back_to is None or back_to not in client.logout_uris:\n back_to = self.request.url_for('pools:signout_success', pool=\n self.pool.key)\n if self.request.user.is_authenticated:\n del self.per_pool_session['user_id']\n return RedirectResponse(back_to, status_code=302)\n\n\[email protected]('/signout/success', name='signout_success')\nclass SignedOutEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/signout_success.html')\n\n\[email protected]('/', name='index')\nclass IndexEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/index.html')\n\n\[email protected]('/.well-known/jwks.json', name='signin_success')\nclass JWKSEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n keys = []\n if isinstance(self.request.app.state.jwt_config.key, dict):\n public_jwk = build_jwt_public_key_from_private_key(self.request\n .app.state.jwt_config.key)\n public_jwk['use'] = 'sig'\n keys.append(public_jwk)\n return JSONResponse({'keys': keys})\n",
"<import token>\n<assignment token>\n\n\nclass PoolHTTPEndpoint(ContextualHTTPEndpoint):\n\n @property\n def templates(self):\n return lambda name, context={}, *args, **kwargs: typing.cast(\n WithTemplates, self.request).templates(name, {**context, 'pool':\n self.request.scope.get(POOL_KEY)}, *args, **kwargs)\n\n @property\n def pool(self) ->typing.Optional[UserPool]:\n return typing.cast(typing.Optional[UserPool], self.request.get(\n POOL_KEY))\n\n @property\n def per_pool_session(self) ->typing.Dict[str, typing.Any]:\n pool = self.pool\n if pool is not None:\n return self.request.scope['session'].setdefault(pool.key, {})\n else:\n return self.request.scope['session']\n\n async def dispatch(self):\n if self.request.get(POOL_KEY) is None:\n raise HTTPException(status_code=HTTP_404_NOT_FOUND)\n await super().dispatch()\n\n @property\n def success_page_url(self):\n return self.request.url_for('pools:signin_success', pool=self.pool.key)\n\n\ndef bool_val(v: typing.Optional[str]) ->bool:\n return v not in ('false', 'no', '0', None)\n\n\[email protected]('/signin', name='signin')\nclass SigninEndpoint(PoolHTTPEndpoint):\n template = 'pools/signin.html'\n\n @property\n def back_to(self) ->typing.Optional[str]:\n return self.request.session.get('back_to')\n\n @back_to.setter\n def back_to(self, value: typing.Optional[str]):\n self.request.session['back_to'] = value\n\n def render_template(self, context: typing.Dict[str, typing.Any]={}\n ) ->Response:\n assert self.pool is not None\n if self.pool.username_attributes:\n email = (AuxiliaryIdentityAttribute.EMAIL in self.pool.\n username_attributes)\n phone_number = (AuxiliaryIdentityAttribute.PHONE_NUMBER in self\n .pool.username_attributes)\n if email and phone_number:\n label = 'E-mail address or phone number'\n elif email:\n label = 'E-mail address'\n elif phone_number:\n label = 'Phone number'\n else:\n raise AssertionError()\n else:\n label = 'User name'\n context['username_label'] = label\n return self.templates(self.template, context=context)\n\n async def get(self):\n assert self.pool is not None\n back_to = self.request.query_params.get('back_to')\n reauth = bool_val(self.request.query_params.get('reauth'))\n if self.request.user.is_authenticated and not reauth:\n return RedirectResponse(back_to or self.success_page_url)\n parsed_back_to = urlparse(back_to)\n if (parsed_back_to.scheme and parsed_back_to.scheme != self.request\n .url.scheme or parsed_back_to.hostname and parsed_back_to.\n hostname != self.request.url.hostname):\n raise HTTPException(status_code=HTTP_400_BAD_REQUEST)\n if back_to is not None:\n self.back_to = back_to\n return self.render_template(context={'form': {'reauth': reauth}})\n\n async def post(self):\n assert self.pool is not None\n form = await self.request.form()\n try:\n user = await async_(lambda : self.pool.query_user(form[\n 'username']).one())()\n self.request.app.state.kdf.verify(user.password, form['password'])\n except Exception as e:\n logger.debug(f\"failed login attempt: {form['username']} - {e!r}\")\n return self.render_template(context={'form': form, 'alerts': [\n 'No user registered with that user name and password.']})\n self.per_pool_session['user_id'] = user.id\n return RedirectResponse(self.back_to or self.success_page_url,\n status_code=302)\n\n\[email protected]('/signin/success', name='signin_success')\nclass SignedinEndpoint(PoolHTTPEndpoint):\n template = 'pools/signin_success.html'\n\n async def get(self):\n return self.templates(self.template)\n\n\[email protected]('/signout', name='signout', methods=['post'])\nclass SignOutEndpoint(PoolHTTPEndpoint):\n\n async def post(self):\n form = await self.request.form()\n client_id = form.get('client_id')\n try:\n client = await async_(self.pool.clients.filter_by(\n oauth2_client_id=client_id).one)()\n except orm_exc.NoResultFound as e:\n raise HTTPException(status_code=HTTP_404_NOT_FOUND) from e\n back_to = form.get('back_to')\n if back_to is None or back_to not in client.logout_uris:\n back_to = self.request.url_for('pools:signout_success', pool=\n self.pool.key)\n if self.request.user.is_authenticated:\n del self.per_pool_session['user_id']\n return RedirectResponse(back_to, status_code=302)\n\n\[email protected]('/signout/success', name='signout_success')\nclass SignedOutEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/signout_success.html')\n\n\[email protected]('/', name='index')\nclass IndexEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/index.html')\n\n\[email protected]('/.well-known/jwks.json', name='signin_success')\nclass JWKSEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n keys = []\n if isinstance(self.request.app.state.jwt_config.key, dict):\n public_jwk = build_jwt_public_key_from_private_key(self.request\n .app.state.jwt_config.key)\n public_jwk['use'] = 'sig'\n keys.append(public_jwk)\n return JSONResponse({'keys': keys})\n",
"<import token>\n<assignment token>\n\n\nclass PoolHTTPEndpoint(ContextualHTTPEndpoint):\n\n @property\n def templates(self):\n return lambda name, context={}, *args, **kwargs: typing.cast(\n WithTemplates, self.request).templates(name, {**context, 'pool':\n self.request.scope.get(POOL_KEY)}, *args, **kwargs)\n\n @property\n def pool(self) ->typing.Optional[UserPool]:\n return typing.cast(typing.Optional[UserPool], self.request.get(\n POOL_KEY))\n\n @property\n def per_pool_session(self) ->typing.Dict[str, typing.Any]:\n pool = self.pool\n if pool is not None:\n return self.request.scope['session'].setdefault(pool.key, {})\n else:\n return self.request.scope['session']\n\n async def dispatch(self):\n if self.request.get(POOL_KEY) is None:\n raise HTTPException(status_code=HTTP_404_NOT_FOUND)\n await super().dispatch()\n\n @property\n def success_page_url(self):\n return self.request.url_for('pools:signin_success', pool=self.pool.key)\n\n\n<function token>\n\n\[email protected]('/signin', name='signin')\nclass SigninEndpoint(PoolHTTPEndpoint):\n template = 'pools/signin.html'\n\n @property\n def back_to(self) ->typing.Optional[str]:\n return self.request.session.get('back_to')\n\n @back_to.setter\n def back_to(self, value: typing.Optional[str]):\n self.request.session['back_to'] = value\n\n def render_template(self, context: typing.Dict[str, typing.Any]={}\n ) ->Response:\n assert self.pool is not None\n if self.pool.username_attributes:\n email = (AuxiliaryIdentityAttribute.EMAIL in self.pool.\n username_attributes)\n phone_number = (AuxiliaryIdentityAttribute.PHONE_NUMBER in self\n .pool.username_attributes)\n if email and phone_number:\n label = 'E-mail address or phone number'\n elif email:\n label = 'E-mail address'\n elif phone_number:\n label = 'Phone number'\n else:\n raise AssertionError()\n else:\n label = 'User name'\n context['username_label'] = label\n return self.templates(self.template, context=context)\n\n async def get(self):\n assert self.pool is not None\n back_to = self.request.query_params.get('back_to')\n reauth = bool_val(self.request.query_params.get('reauth'))\n if self.request.user.is_authenticated and not reauth:\n return RedirectResponse(back_to or self.success_page_url)\n parsed_back_to = urlparse(back_to)\n if (parsed_back_to.scheme and parsed_back_to.scheme != self.request\n .url.scheme or parsed_back_to.hostname and parsed_back_to.\n hostname != self.request.url.hostname):\n raise HTTPException(status_code=HTTP_400_BAD_REQUEST)\n if back_to is not None:\n self.back_to = back_to\n return self.render_template(context={'form': {'reauth': reauth}})\n\n async def post(self):\n assert self.pool is not None\n form = await self.request.form()\n try:\n user = await async_(lambda : self.pool.query_user(form[\n 'username']).one())()\n self.request.app.state.kdf.verify(user.password, form['password'])\n except Exception as e:\n logger.debug(f\"failed login attempt: {form['username']} - {e!r}\")\n return self.render_template(context={'form': form, 'alerts': [\n 'No user registered with that user name and password.']})\n self.per_pool_session['user_id'] = user.id\n return RedirectResponse(self.back_to or self.success_page_url,\n status_code=302)\n\n\[email protected]('/signin/success', name='signin_success')\nclass SignedinEndpoint(PoolHTTPEndpoint):\n template = 'pools/signin_success.html'\n\n async def get(self):\n return self.templates(self.template)\n\n\[email protected]('/signout', name='signout', methods=['post'])\nclass SignOutEndpoint(PoolHTTPEndpoint):\n\n async def post(self):\n form = await self.request.form()\n client_id = form.get('client_id')\n try:\n client = await async_(self.pool.clients.filter_by(\n oauth2_client_id=client_id).one)()\n except orm_exc.NoResultFound as e:\n raise HTTPException(status_code=HTTP_404_NOT_FOUND) from e\n back_to = form.get('back_to')\n if back_to is None or back_to not in client.logout_uris:\n back_to = self.request.url_for('pools:signout_success', pool=\n self.pool.key)\n if self.request.user.is_authenticated:\n del self.per_pool_session['user_id']\n return RedirectResponse(back_to, status_code=302)\n\n\[email protected]('/signout/success', name='signout_success')\nclass SignedOutEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/signout_success.html')\n\n\[email protected]('/', name='index')\nclass IndexEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/index.html')\n\n\[email protected]('/.well-known/jwks.json', name='signin_success')\nclass JWKSEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n keys = []\n if isinstance(self.request.app.state.jwt_config.key, dict):\n public_jwk = build_jwt_public_key_from_private_key(self.request\n .app.state.jwt_config.key)\n public_jwk['use'] = 'sig'\n keys.append(public_jwk)\n return JSONResponse({'keys': keys})\n",
"<import token>\n<assignment token>\n\n\nclass PoolHTTPEndpoint(ContextualHTTPEndpoint):\n\n @property\n def templates(self):\n return lambda name, context={}, *args, **kwargs: typing.cast(\n WithTemplates, self.request).templates(name, {**context, 'pool':\n self.request.scope.get(POOL_KEY)}, *args, **kwargs)\n\n @property\n def pool(self) ->typing.Optional[UserPool]:\n return typing.cast(typing.Optional[UserPool], self.request.get(\n POOL_KEY))\n <function token>\n\n async def dispatch(self):\n if self.request.get(POOL_KEY) is None:\n raise HTTPException(status_code=HTTP_404_NOT_FOUND)\n await super().dispatch()\n\n @property\n def success_page_url(self):\n return self.request.url_for('pools:signin_success', pool=self.pool.key)\n\n\n<function token>\n\n\[email protected]('/signin', name='signin')\nclass SigninEndpoint(PoolHTTPEndpoint):\n template = 'pools/signin.html'\n\n @property\n def back_to(self) ->typing.Optional[str]:\n return self.request.session.get('back_to')\n\n @back_to.setter\n def back_to(self, value: typing.Optional[str]):\n self.request.session['back_to'] = value\n\n def render_template(self, context: typing.Dict[str, typing.Any]={}\n ) ->Response:\n assert self.pool is not None\n if self.pool.username_attributes:\n email = (AuxiliaryIdentityAttribute.EMAIL in self.pool.\n username_attributes)\n phone_number = (AuxiliaryIdentityAttribute.PHONE_NUMBER in self\n .pool.username_attributes)\n if email and phone_number:\n label = 'E-mail address or phone number'\n elif email:\n label = 'E-mail address'\n elif phone_number:\n label = 'Phone number'\n else:\n raise AssertionError()\n else:\n label = 'User name'\n context['username_label'] = label\n return self.templates(self.template, context=context)\n\n async def get(self):\n assert self.pool is not None\n back_to = self.request.query_params.get('back_to')\n reauth = bool_val(self.request.query_params.get('reauth'))\n if self.request.user.is_authenticated and not reauth:\n return RedirectResponse(back_to or self.success_page_url)\n parsed_back_to = urlparse(back_to)\n if (parsed_back_to.scheme and parsed_back_to.scheme != self.request\n .url.scheme or parsed_back_to.hostname and parsed_back_to.\n hostname != self.request.url.hostname):\n raise HTTPException(status_code=HTTP_400_BAD_REQUEST)\n if back_to is not None:\n self.back_to = back_to\n return self.render_template(context={'form': {'reauth': reauth}})\n\n async def post(self):\n assert self.pool is not None\n form = await self.request.form()\n try:\n user = await async_(lambda : self.pool.query_user(form[\n 'username']).one())()\n self.request.app.state.kdf.verify(user.password, form['password'])\n except Exception as e:\n logger.debug(f\"failed login attempt: {form['username']} - {e!r}\")\n return self.render_template(context={'form': form, 'alerts': [\n 'No user registered with that user name and password.']})\n self.per_pool_session['user_id'] = user.id\n return RedirectResponse(self.back_to or self.success_page_url,\n status_code=302)\n\n\[email protected]('/signin/success', name='signin_success')\nclass SignedinEndpoint(PoolHTTPEndpoint):\n template = 'pools/signin_success.html'\n\n async def get(self):\n return self.templates(self.template)\n\n\[email protected]('/signout', name='signout', methods=['post'])\nclass SignOutEndpoint(PoolHTTPEndpoint):\n\n async def post(self):\n form = await self.request.form()\n client_id = form.get('client_id')\n try:\n client = await async_(self.pool.clients.filter_by(\n oauth2_client_id=client_id).one)()\n except orm_exc.NoResultFound as e:\n raise HTTPException(status_code=HTTP_404_NOT_FOUND) from e\n back_to = form.get('back_to')\n if back_to is None or back_to not in client.logout_uris:\n back_to = self.request.url_for('pools:signout_success', pool=\n self.pool.key)\n if self.request.user.is_authenticated:\n del self.per_pool_session['user_id']\n return RedirectResponse(back_to, status_code=302)\n\n\[email protected]('/signout/success', name='signout_success')\nclass SignedOutEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/signout_success.html')\n\n\[email protected]('/', name='index')\nclass IndexEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/index.html')\n\n\[email protected]('/.well-known/jwks.json', name='signin_success')\nclass JWKSEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n keys = []\n if isinstance(self.request.app.state.jwt_config.key, dict):\n public_jwk = build_jwt_public_key_from_private_key(self.request\n .app.state.jwt_config.key)\n public_jwk['use'] = 'sig'\n keys.append(public_jwk)\n return JSONResponse({'keys': keys})\n",
"<import token>\n<assignment token>\n\n\nclass PoolHTTPEndpoint(ContextualHTTPEndpoint):\n <function token>\n\n @property\n def pool(self) ->typing.Optional[UserPool]:\n return typing.cast(typing.Optional[UserPool], self.request.get(\n POOL_KEY))\n <function token>\n\n async def dispatch(self):\n if self.request.get(POOL_KEY) is None:\n raise HTTPException(status_code=HTTP_404_NOT_FOUND)\n await super().dispatch()\n\n @property\n def success_page_url(self):\n return self.request.url_for('pools:signin_success', pool=self.pool.key)\n\n\n<function token>\n\n\[email protected]('/signin', name='signin')\nclass SigninEndpoint(PoolHTTPEndpoint):\n template = 'pools/signin.html'\n\n @property\n def back_to(self) ->typing.Optional[str]:\n return self.request.session.get('back_to')\n\n @back_to.setter\n def back_to(self, value: typing.Optional[str]):\n self.request.session['back_to'] = value\n\n def render_template(self, context: typing.Dict[str, typing.Any]={}\n ) ->Response:\n assert self.pool is not None\n if self.pool.username_attributes:\n email = (AuxiliaryIdentityAttribute.EMAIL in self.pool.\n username_attributes)\n phone_number = (AuxiliaryIdentityAttribute.PHONE_NUMBER in self\n .pool.username_attributes)\n if email and phone_number:\n label = 'E-mail address or phone number'\n elif email:\n label = 'E-mail address'\n elif phone_number:\n label = 'Phone number'\n else:\n raise AssertionError()\n else:\n label = 'User name'\n context['username_label'] = label\n return self.templates(self.template, context=context)\n\n async def get(self):\n assert self.pool is not None\n back_to = self.request.query_params.get('back_to')\n reauth = bool_val(self.request.query_params.get('reauth'))\n if self.request.user.is_authenticated and not reauth:\n return RedirectResponse(back_to or self.success_page_url)\n parsed_back_to = urlparse(back_to)\n if (parsed_back_to.scheme and parsed_back_to.scheme != self.request\n .url.scheme or parsed_back_to.hostname and parsed_back_to.\n hostname != self.request.url.hostname):\n raise HTTPException(status_code=HTTP_400_BAD_REQUEST)\n if back_to is not None:\n self.back_to = back_to\n return self.render_template(context={'form': {'reauth': reauth}})\n\n async def post(self):\n assert self.pool is not None\n form = await self.request.form()\n try:\n user = await async_(lambda : self.pool.query_user(form[\n 'username']).one())()\n self.request.app.state.kdf.verify(user.password, form['password'])\n except Exception as e:\n logger.debug(f\"failed login attempt: {form['username']} - {e!r}\")\n return self.render_template(context={'form': form, 'alerts': [\n 'No user registered with that user name and password.']})\n self.per_pool_session['user_id'] = user.id\n return RedirectResponse(self.back_to or self.success_page_url,\n status_code=302)\n\n\[email protected]('/signin/success', name='signin_success')\nclass SignedinEndpoint(PoolHTTPEndpoint):\n template = 'pools/signin_success.html'\n\n async def get(self):\n return self.templates(self.template)\n\n\[email protected]('/signout', name='signout', methods=['post'])\nclass SignOutEndpoint(PoolHTTPEndpoint):\n\n async def post(self):\n form = await self.request.form()\n client_id = form.get('client_id')\n try:\n client = await async_(self.pool.clients.filter_by(\n oauth2_client_id=client_id).one)()\n except orm_exc.NoResultFound as e:\n raise HTTPException(status_code=HTTP_404_NOT_FOUND) from e\n back_to = form.get('back_to')\n if back_to is None or back_to not in client.logout_uris:\n back_to = self.request.url_for('pools:signout_success', pool=\n self.pool.key)\n if self.request.user.is_authenticated:\n del self.per_pool_session['user_id']\n return RedirectResponse(back_to, status_code=302)\n\n\[email protected]('/signout/success', name='signout_success')\nclass SignedOutEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/signout_success.html')\n\n\[email protected]('/', name='index')\nclass IndexEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/index.html')\n\n\[email protected]('/.well-known/jwks.json', name='signin_success')\nclass JWKSEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n keys = []\n if isinstance(self.request.app.state.jwt_config.key, dict):\n public_jwk = build_jwt_public_key_from_private_key(self.request\n .app.state.jwt_config.key)\n public_jwk['use'] = 'sig'\n keys.append(public_jwk)\n return JSONResponse({'keys': keys})\n",
"<import token>\n<assignment token>\n\n\nclass PoolHTTPEndpoint(ContextualHTTPEndpoint):\n <function token>\n <function token>\n <function token>\n\n async def dispatch(self):\n if self.request.get(POOL_KEY) is None:\n raise HTTPException(status_code=HTTP_404_NOT_FOUND)\n await super().dispatch()\n\n @property\n def success_page_url(self):\n return self.request.url_for('pools:signin_success', pool=self.pool.key)\n\n\n<function token>\n\n\[email protected]('/signin', name='signin')\nclass SigninEndpoint(PoolHTTPEndpoint):\n template = 'pools/signin.html'\n\n @property\n def back_to(self) ->typing.Optional[str]:\n return self.request.session.get('back_to')\n\n @back_to.setter\n def back_to(self, value: typing.Optional[str]):\n self.request.session['back_to'] = value\n\n def render_template(self, context: typing.Dict[str, typing.Any]={}\n ) ->Response:\n assert self.pool is not None\n if self.pool.username_attributes:\n email = (AuxiliaryIdentityAttribute.EMAIL in self.pool.\n username_attributes)\n phone_number = (AuxiliaryIdentityAttribute.PHONE_NUMBER in self\n .pool.username_attributes)\n if email and phone_number:\n label = 'E-mail address or phone number'\n elif email:\n label = 'E-mail address'\n elif phone_number:\n label = 'Phone number'\n else:\n raise AssertionError()\n else:\n label = 'User name'\n context['username_label'] = label\n return self.templates(self.template, context=context)\n\n async def get(self):\n assert self.pool is not None\n back_to = self.request.query_params.get('back_to')\n reauth = bool_val(self.request.query_params.get('reauth'))\n if self.request.user.is_authenticated and not reauth:\n return RedirectResponse(back_to or self.success_page_url)\n parsed_back_to = urlparse(back_to)\n if (parsed_back_to.scheme and parsed_back_to.scheme != self.request\n .url.scheme or parsed_back_to.hostname and parsed_back_to.\n hostname != self.request.url.hostname):\n raise HTTPException(status_code=HTTP_400_BAD_REQUEST)\n if back_to is not None:\n self.back_to = back_to\n return self.render_template(context={'form': {'reauth': reauth}})\n\n async def post(self):\n assert self.pool is not None\n form = await self.request.form()\n try:\n user = await async_(lambda : self.pool.query_user(form[\n 'username']).one())()\n self.request.app.state.kdf.verify(user.password, form['password'])\n except Exception as e:\n logger.debug(f\"failed login attempt: {form['username']} - {e!r}\")\n return self.render_template(context={'form': form, 'alerts': [\n 'No user registered with that user name and password.']})\n self.per_pool_session['user_id'] = user.id\n return RedirectResponse(self.back_to or self.success_page_url,\n status_code=302)\n\n\[email protected]('/signin/success', name='signin_success')\nclass SignedinEndpoint(PoolHTTPEndpoint):\n template = 'pools/signin_success.html'\n\n async def get(self):\n return self.templates(self.template)\n\n\[email protected]('/signout', name='signout', methods=['post'])\nclass SignOutEndpoint(PoolHTTPEndpoint):\n\n async def post(self):\n form = await self.request.form()\n client_id = form.get('client_id')\n try:\n client = await async_(self.pool.clients.filter_by(\n oauth2_client_id=client_id).one)()\n except orm_exc.NoResultFound as e:\n raise HTTPException(status_code=HTTP_404_NOT_FOUND) from e\n back_to = form.get('back_to')\n if back_to is None or back_to not in client.logout_uris:\n back_to = self.request.url_for('pools:signout_success', pool=\n self.pool.key)\n if self.request.user.is_authenticated:\n del self.per_pool_session['user_id']\n return RedirectResponse(back_to, status_code=302)\n\n\[email protected]('/signout/success', name='signout_success')\nclass SignedOutEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/signout_success.html')\n\n\[email protected]('/', name='index')\nclass IndexEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/index.html')\n\n\[email protected]('/.well-known/jwks.json', name='signin_success')\nclass JWKSEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n keys = []\n if isinstance(self.request.app.state.jwt_config.key, dict):\n public_jwk = build_jwt_public_key_from_private_key(self.request\n .app.state.jwt_config.key)\n public_jwk['use'] = 'sig'\n keys.append(public_jwk)\n return JSONResponse({'keys': keys})\n",
"<import token>\n<assignment token>\n\n\nclass PoolHTTPEndpoint(ContextualHTTPEndpoint):\n <function token>\n <function token>\n <function token>\n\n async def dispatch(self):\n if self.request.get(POOL_KEY) is None:\n raise HTTPException(status_code=HTTP_404_NOT_FOUND)\n await super().dispatch()\n <function token>\n\n\n<function token>\n\n\[email protected]('/signin', name='signin')\nclass SigninEndpoint(PoolHTTPEndpoint):\n template = 'pools/signin.html'\n\n @property\n def back_to(self) ->typing.Optional[str]:\n return self.request.session.get('back_to')\n\n @back_to.setter\n def back_to(self, value: typing.Optional[str]):\n self.request.session['back_to'] = value\n\n def render_template(self, context: typing.Dict[str, typing.Any]={}\n ) ->Response:\n assert self.pool is not None\n if self.pool.username_attributes:\n email = (AuxiliaryIdentityAttribute.EMAIL in self.pool.\n username_attributes)\n phone_number = (AuxiliaryIdentityAttribute.PHONE_NUMBER in self\n .pool.username_attributes)\n if email and phone_number:\n label = 'E-mail address or phone number'\n elif email:\n label = 'E-mail address'\n elif phone_number:\n label = 'Phone number'\n else:\n raise AssertionError()\n else:\n label = 'User name'\n context['username_label'] = label\n return self.templates(self.template, context=context)\n\n async def get(self):\n assert self.pool is not None\n back_to = self.request.query_params.get('back_to')\n reauth = bool_val(self.request.query_params.get('reauth'))\n if self.request.user.is_authenticated and not reauth:\n return RedirectResponse(back_to or self.success_page_url)\n parsed_back_to = urlparse(back_to)\n if (parsed_back_to.scheme and parsed_back_to.scheme != self.request\n .url.scheme or parsed_back_to.hostname and parsed_back_to.\n hostname != self.request.url.hostname):\n raise HTTPException(status_code=HTTP_400_BAD_REQUEST)\n if back_to is not None:\n self.back_to = back_to\n return self.render_template(context={'form': {'reauth': reauth}})\n\n async def post(self):\n assert self.pool is not None\n form = await self.request.form()\n try:\n user = await async_(lambda : self.pool.query_user(form[\n 'username']).one())()\n self.request.app.state.kdf.verify(user.password, form['password'])\n except Exception as e:\n logger.debug(f\"failed login attempt: {form['username']} - {e!r}\")\n return self.render_template(context={'form': form, 'alerts': [\n 'No user registered with that user name and password.']})\n self.per_pool_session['user_id'] = user.id\n return RedirectResponse(self.back_to or self.success_page_url,\n status_code=302)\n\n\[email protected]('/signin/success', name='signin_success')\nclass SignedinEndpoint(PoolHTTPEndpoint):\n template = 'pools/signin_success.html'\n\n async def get(self):\n return self.templates(self.template)\n\n\[email protected]('/signout', name='signout', methods=['post'])\nclass SignOutEndpoint(PoolHTTPEndpoint):\n\n async def post(self):\n form = await self.request.form()\n client_id = form.get('client_id')\n try:\n client = await async_(self.pool.clients.filter_by(\n oauth2_client_id=client_id).one)()\n except orm_exc.NoResultFound as e:\n raise HTTPException(status_code=HTTP_404_NOT_FOUND) from e\n back_to = form.get('back_to')\n if back_to is None or back_to not in client.logout_uris:\n back_to = self.request.url_for('pools:signout_success', pool=\n self.pool.key)\n if self.request.user.is_authenticated:\n del self.per_pool_session['user_id']\n return RedirectResponse(back_to, status_code=302)\n\n\[email protected]('/signout/success', name='signout_success')\nclass SignedOutEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/signout_success.html')\n\n\[email protected]('/', name='index')\nclass IndexEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/index.html')\n\n\[email protected]('/.well-known/jwks.json', name='signin_success')\nclass JWKSEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n keys = []\n if isinstance(self.request.app.state.jwt_config.key, dict):\n public_jwk = build_jwt_public_key_from_private_key(self.request\n .app.state.jwt_config.key)\n public_jwk['use'] = 'sig'\n keys.append(public_jwk)\n return JSONResponse({'keys': keys})\n",
"<import token>\n<assignment token>\n<class token>\n<function token>\n\n\[email protected]('/signin', name='signin')\nclass SigninEndpoint(PoolHTTPEndpoint):\n template = 'pools/signin.html'\n\n @property\n def back_to(self) ->typing.Optional[str]:\n return self.request.session.get('back_to')\n\n @back_to.setter\n def back_to(self, value: typing.Optional[str]):\n self.request.session['back_to'] = value\n\n def render_template(self, context: typing.Dict[str, typing.Any]={}\n ) ->Response:\n assert self.pool is not None\n if self.pool.username_attributes:\n email = (AuxiliaryIdentityAttribute.EMAIL in self.pool.\n username_attributes)\n phone_number = (AuxiliaryIdentityAttribute.PHONE_NUMBER in self\n .pool.username_attributes)\n if email and phone_number:\n label = 'E-mail address or phone number'\n elif email:\n label = 'E-mail address'\n elif phone_number:\n label = 'Phone number'\n else:\n raise AssertionError()\n else:\n label = 'User name'\n context['username_label'] = label\n return self.templates(self.template, context=context)\n\n async def get(self):\n assert self.pool is not None\n back_to = self.request.query_params.get('back_to')\n reauth = bool_val(self.request.query_params.get('reauth'))\n if self.request.user.is_authenticated and not reauth:\n return RedirectResponse(back_to or self.success_page_url)\n parsed_back_to = urlparse(back_to)\n if (parsed_back_to.scheme and parsed_back_to.scheme != self.request\n .url.scheme or parsed_back_to.hostname and parsed_back_to.\n hostname != self.request.url.hostname):\n raise HTTPException(status_code=HTTP_400_BAD_REQUEST)\n if back_to is not None:\n self.back_to = back_to\n return self.render_template(context={'form': {'reauth': reauth}})\n\n async def post(self):\n assert self.pool is not None\n form = await self.request.form()\n try:\n user = await async_(lambda : self.pool.query_user(form[\n 'username']).one())()\n self.request.app.state.kdf.verify(user.password, form['password'])\n except Exception as e:\n logger.debug(f\"failed login attempt: {form['username']} - {e!r}\")\n return self.render_template(context={'form': form, 'alerts': [\n 'No user registered with that user name and password.']})\n self.per_pool_session['user_id'] = user.id\n return RedirectResponse(self.back_to or self.success_page_url,\n status_code=302)\n\n\[email protected]('/signin/success', name='signin_success')\nclass SignedinEndpoint(PoolHTTPEndpoint):\n template = 'pools/signin_success.html'\n\n async def get(self):\n return self.templates(self.template)\n\n\[email protected]('/signout', name='signout', methods=['post'])\nclass SignOutEndpoint(PoolHTTPEndpoint):\n\n async def post(self):\n form = await self.request.form()\n client_id = form.get('client_id')\n try:\n client = await async_(self.pool.clients.filter_by(\n oauth2_client_id=client_id).one)()\n except orm_exc.NoResultFound as e:\n raise HTTPException(status_code=HTTP_404_NOT_FOUND) from e\n back_to = form.get('back_to')\n if back_to is None or back_to not in client.logout_uris:\n back_to = self.request.url_for('pools:signout_success', pool=\n self.pool.key)\n if self.request.user.is_authenticated:\n del self.per_pool_session['user_id']\n return RedirectResponse(back_to, status_code=302)\n\n\[email protected]('/signout/success', name='signout_success')\nclass SignedOutEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/signout_success.html')\n\n\[email protected]('/', name='index')\nclass IndexEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/index.html')\n\n\[email protected]('/.well-known/jwks.json', name='signin_success')\nclass JWKSEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n keys = []\n if isinstance(self.request.app.state.jwt_config.key, dict):\n public_jwk = build_jwt_public_key_from_private_key(self.request\n .app.state.jwt_config.key)\n public_jwk['use'] = 'sig'\n keys.append(public_jwk)\n return JSONResponse({'keys': keys})\n",
"<import token>\n<assignment token>\n<class token>\n<function token>\n\n\[email protected]('/signin', name='signin')\nclass SigninEndpoint(PoolHTTPEndpoint):\n <assignment token>\n\n @property\n def back_to(self) ->typing.Optional[str]:\n return self.request.session.get('back_to')\n\n @back_to.setter\n def back_to(self, value: typing.Optional[str]):\n self.request.session['back_to'] = value\n\n def render_template(self, context: typing.Dict[str, typing.Any]={}\n ) ->Response:\n assert self.pool is not None\n if self.pool.username_attributes:\n email = (AuxiliaryIdentityAttribute.EMAIL in self.pool.\n username_attributes)\n phone_number = (AuxiliaryIdentityAttribute.PHONE_NUMBER in self\n .pool.username_attributes)\n if email and phone_number:\n label = 'E-mail address or phone number'\n elif email:\n label = 'E-mail address'\n elif phone_number:\n label = 'Phone number'\n else:\n raise AssertionError()\n else:\n label = 'User name'\n context['username_label'] = label\n return self.templates(self.template, context=context)\n\n async def get(self):\n assert self.pool is not None\n back_to = self.request.query_params.get('back_to')\n reauth = bool_val(self.request.query_params.get('reauth'))\n if self.request.user.is_authenticated and not reauth:\n return RedirectResponse(back_to or self.success_page_url)\n parsed_back_to = urlparse(back_to)\n if (parsed_back_to.scheme and parsed_back_to.scheme != self.request\n .url.scheme or parsed_back_to.hostname and parsed_back_to.\n hostname != self.request.url.hostname):\n raise HTTPException(status_code=HTTP_400_BAD_REQUEST)\n if back_to is not None:\n self.back_to = back_to\n return self.render_template(context={'form': {'reauth': reauth}})\n\n async def post(self):\n assert self.pool is not None\n form = await self.request.form()\n try:\n user = await async_(lambda : self.pool.query_user(form[\n 'username']).one())()\n self.request.app.state.kdf.verify(user.password, form['password'])\n except Exception as e:\n logger.debug(f\"failed login attempt: {form['username']} - {e!r}\")\n return self.render_template(context={'form': form, 'alerts': [\n 'No user registered with that user name and password.']})\n self.per_pool_session['user_id'] = user.id\n return RedirectResponse(self.back_to or self.success_page_url,\n status_code=302)\n\n\[email protected]('/signin/success', name='signin_success')\nclass SignedinEndpoint(PoolHTTPEndpoint):\n template = 'pools/signin_success.html'\n\n async def get(self):\n return self.templates(self.template)\n\n\[email protected]('/signout', name='signout', methods=['post'])\nclass SignOutEndpoint(PoolHTTPEndpoint):\n\n async def post(self):\n form = await self.request.form()\n client_id = form.get('client_id')\n try:\n client = await async_(self.pool.clients.filter_by(\n oauth2_client_id=client_id).one)()\n except orm_exc.NoResultFound as e:\n raise HTTPException(status_code=HTTP_404_NOT_FOUND) from e\n back_to = form.get('back_to')\n if back_to is None or back_to not in client.logout_uris:\n back_to = self.request.url_for('pools:signout_success', pool=\n self.pool.key)\n if self.request.user.is_authenticated:\n del self.per_pool_session['user_id']\n return RedirectResponse(back_to, status_code=302)\n\n\[email protected]('/signout/success', name='signout_success')\nclass SignedOutEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/signout_success.html')\n\n\[email protected]('/', name='index')\nclass IndexEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/index.html')\n\n\[email protected]('/.well-known/jwks.json', name='signin_success')\nclass JWKSEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n keys = []\n if isinstance(self.request.app.state.jwt_config.key, dict):\n public_jwk = build_jwt_public_key_from_private_key(self.request\n .app.state.jwt_config.key)\n public_jwk['use'] = 'sig'\n keys.append(public_jwk)\n return JSONResponse({'keys': keys})\n",
"<import token>\n<assignment token>\n<class token>\n<function token>\n\n\[email protected]('/signin', name='signin')\nclass SigninEndpoint(PoolHTTPEndpoint):\n <assignment token>\n\n @property\n def back_to(self) ->typing.Optional[str]:\n return self.request.session.get('back_to')\n <function token>\n\n def render_template(self, context: typing.Dict[str, typing.Any]={}\n ) ->Response:\n assert self.pool is not None\n if self.pool.username_attributes:\n email = (AuxiliaryIdentityAttribute.EMAIL in self.pool.\n username_attributes)\n phone_number = (AuxiliaryIdentityAttribute.PHONE_NUMBER in self\n .pool.username_attributes)\n if email and phone_number:\n label = 'E-mail address or phone number'\n elif email:\n label = 'E-mail address'\n elif phone_number:\n label = 'Phone number'\n else:\n raise AssertionError()\n else:\n label = 'User name'\n context['username_label'] = label\n return self.templates(self.template, context=context)\n\n async def get(self):\n assert self.pool is not None\n back_to = self.request.query_params.get('back_to')\n reauth = bool_val(self.request.query_params.get('reauth'))\n if self.request.user.is_authenticated and not reauth:\n return RedirectResponse(back_to or self.success_page_url)\n parsed_back_to = urlparse(back_to)\n if (parsed_back_to.scheme and parsed_back_to.scheme != self.request\n .url.scheme or parsed_back_to.hostname and parsed_back_to.\n hostname != self.request.url.hostname):\n raise HTTPException(status_code=HTTP_400_BAD_REQUEST)\n if back_to is not None:\n self.back_to = back_to\n return self.render_template(context={'form': {'reauth': reauth}})\n\n async def post(self):\n assert self.pool is not None\n form = await self.request.form()\n try:\n user = await async_(lambda : self.pool.query_user(form[\n 'username']).one())()\n self.request.app.state.kdf.verify(user.password, form['password'])\n except Exception as e:\n logger.debug(f\"failed login attempt: {form['username']} - {e!r}\")\n return self.render_template(context={'form': form, 'alerts': [\n 'No user registered with that user name and password.']})\n self.per_pool_session['user_id'] = user.id\n return RedirectResponse(self.back_to or self.success_page_url,\n status_code=302)\n\n\[email protected]('/signin/success', name='signin_success')\nclass SignedinEndpoint(PoolHTTPEndpoint):\n template = 'pools/signin_success.html'\n\n async def get(self):\n return self.templates(self.template)\n\n\[email protected]('/signout', name='signout', methods=['post'])\nclass SignOutEndpoint(PoolHTTPEndpoint):\n\n async def post(self):\n form = await self.request.form()\n client_id = form.get('client_id')\n try:\n client = await async_(self.pool.clients.filter_by(\n oauth2_client_id=client_id).one)()\n except orm_exc.NoResultFound as e:\n raise HTTPException(status_code=HTTP_404_NOT_FOUND) from e\n back_to = form.get('back_to')\n if back_to is None or back_to not in client.logout_uris:\n back_to = self.request.url_for('pools:signout_success', pool=\n self.pool.key)\n if self.request.user.is_authenticated:\n del self.per_pool_session['user_id']\n return RedirectResponse(back_to, status_code=302)\n\n\[email protected]('/signout/success', name='signout_success')\nclass SignedOutEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/signout_success.html')\n\n\[email protected]('/', name='index')\nclass IndexEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/index.html')\n\n\[email protected]('/.well-known/jwks.json', name='signin_success')\nclass JWKSEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n keys = []\n if isinstance(self.request.app.state.jwt_config.key, dict):\n public_jwk = build_jwt_public_key_from_private_key(self.request\n .app.state.jwt_config.key)\n public_jwk['use'] = 'sig'\n keys.append(public_jwk)\n return JSONResponse({'keys': keys})\n",
"<import token>\n<assignment token>\n<class token>\n<function token>\n\n\[email protected]('/signin', name='signin')\nclass SigninEndpoint(PoolHTTPEndpoint):\n <assignment token>\n\n @property\n def back_to(self) ->typing.Optional[str]:\n return self.request.session.get('back_to')\n <function token>\n <function token>\n\n async def get(self):\n assert self.pool is not None\n back_to = self.request.query_params.get('back_to')\n reauth = bool_val(self.request.query_params.get('reauth'))\n if self.request.user.is_authenticated and not reauth:\n return RedirectResponse(back_to or self.success_page_url)\n parsed_back_to = urlparse(back_to)\n if (parsed_back_to.scheme and parsed_back_to.scheme != self.request\n .url.scheme or parsed_back_to.hostname and parsed_back_to.\n hostname != self.request.url.hostname):\n raise HTTPException(status_code=HTTP_400_BAD_REQUEST)\n if back_to is not None:\n self.back_to = back_to\n return self.render_template(context={'form': {'reauth': reauth}})\n\n async def post(self):\n assert self.pool is not None\n form = await self.request.form()\n try:\n user = await async_(lambda : self.pool.query_user(form[\n 'username']).one())()\n self.request.app.state.kdf.verify(user.password, form['password'])\n except Exception as e:\n logger.debug(f\"failed login attempt: {form['username']} - {e!r}\")\n return self.render_template(context={'form': form, 'alerts': [\n 'No user registered with that user name and password.']})\n self.per_pool_session['user_id'] = user.id\n return RedirectResponse(self.back_to or self.success_page_url,\n status_code=302)\n\n\[email protected]('/signin/success', name='signin_success')\nclass SignedinEndpoint(PoolHTTPEndpoint):\n template = 'pools/signin_success.html'\n\n async def get(self):\n return self.templates(self.template)\n\n\[email protected]('/signout', name='signout', methods=['post'])\nclass SignOutEndpoint(PoolHTTPEndpoint):\n\n async def post(self):\n form = await self.request.form()\n client_id = form.get('client_id')\n try:\n client = await async_(self.pool.clients.filter_by(\n oauth2_client_id=client_id).one)()\n except orm_exc.NoResultFound as e:\n raise HTTPException(status_code=HTTP_404_NOT_FOUND) from e\n back_to = form.get('back_to')\n if back_to is None or back_to not in client.logout_uris:\n back_to = self.request.url_for('pools:signout_success', pool=\n self.pool.key)\n if self.request.user.is_authenticated:\n del self.per_pool_session['user_id']\n return RedirectResponse(back_to, status_code=302)\n\n\[email protected]('/signout/success', name='signout_success')\nclass SignedOutEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/signout_success.html')\n\n\[email protected]('/', name='index')\nclass IndexEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/index.html')\n\n\[email protected]('/.well-known/jwks.json', name='signin_success')\nclass JWKSEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n keys = []\n if isinstance(self.request.app.state.jwt_config.key, dict):\n public_jwk = build_jwt_public_key_from_private_key(self.request\n .app.state.jwt_config.key)\n public_jwk['use'] = 'sig'\n keys.append(public_jwk)\n return JSONResponse({'keys': keys})\n",
"<import token>\n<assignment token>\n<class token>\n<function token>\n\n\[email protected]('/signin', name='signin')\nclass SigninEndpoint(PoolHTTPEndpoint):\n <assignment token>\n <function token>\n <function token>\n <function token>\n\n async def get(self):\n assert self.pool is not None\n back_to = self.request.query_params.get('back_to')\n reauth = bool_val(self.request.query_params.get('reauth'))\n if self.request.user.is_authenticated and not reauth:\n return RedirectResponse(back_to or self.success_page_url)\n parsed_back_to = urlparse(back_to)\n if (parsed_back_to.scheme and parsed_back_to.scheme != self.request\n .url.scheme or parsed_back_to.hostname and parsed_back_to.\n hostname != self.request.url.hostname):\n raise HTTPException(status_code=HTTP_400_BAD_REQUEST)\n if back_to is not None:\n self.back_to = back_to\n return self.render_template(context={'form': {'reauth': reauth}})\n\n async def post(self):\n assert self.pool is not None\n form = await self.request.form()\n try:\n user = await async_(lambda : self.pool.query_user(form[\n 'username']).one())()\n self.request.app.state.kdf.verify(user.password, form['password'])\n except Exception as e:\n logger.debug(f\"failed login attempt: {form['username']} - {e!r}\")\n return self.render_template(context={'form': form, 'alerts': [\n 'No user registered with that user name and password.']})\n self.per_pool_session['user_id'] = user.id\n return RedirectResponse(self.back_to or self.success_page_url,\n status_code=302)\n\n\[email protected]('/signin/success', name='signin_success')\nclass SignedinEndpoint(PoolHTTPEndpoint):\n template = 'pools/signin_success.html'\n\n async def get(self):\n return self.templates(self.template)\n\n\[email protected]('/signout', name='signout', methods=['post'])\nclass SignOutEndpoint(PoolHTTPEndpoint):\n\n async def post(self):\n form = await self.request.form()\n client_id = form.get('client_id')\n try:\n client = await async_(self.pool.clients.filter_by(\n oauth2_client_id=client_id).one)()\n except orm_exc.NoResultFound as e:\n raise HTTPException(status_code=HTTP_404_NOT_FOUND) from e\n back_to = form.get('back_to')\n if back_to is None or back_to not in client.logout_uris:\n back_to = self.request.url_for('pools:signout_success', pool=\n self.pool.key)\n if self.request.user.is_authenticated:\n del self.per_pool_session['user_id']\n return RedirectResponse(back_to, status_code=302)\n\n\[email protected]('/signout/success', name='signout_success')\nclass SignedOutEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/signout_success.html')\n\n\[email protected]('/', name='index')\nclass IndexEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/index.html')\n\n\[email protected]('/.well-known/jwks.json', name='signin_success')\nclass JWKSEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n keys = []\n if isinstance(self.request.app.state.jwt_config.key, dict):\n public_jwk = build_jwt_public_key_from_private_key(self.request\n .app.state.jwt_config.key)\n public_jwk['use'] = 'sig'\n keys.append(public_jwk)\n return JSONResponse({'keys': keys})\n",
"<import token>\n<assignment token>\n<class token>\n<function token>\n<class token>\n\n\[email protected]('/signin/success', name='signin_success')\nclass SignedinEndpoint(PoolHTTPEndpoint):\n template = 'pools/signin_success.html'\n\n async def get(self):\n return self.templates(self.template)\n\n\[email protected]('/signout', name='signout', methods=['post'])\nclass SignOutEndpoint(PoolHTTPEndpoint):\n\n async def post(self):\n form = await self.request.form()\n client_id = form.get('client_id')\n try:\n client = await async_(self.pool.clients.filter_by(\n oauth2_client_id=client_id).one)()\n except orm_exc.NoResultFound as e:\n raise HTTPException(status_code=HTTP_404_NOT_FOUND) from e\n back_to = form.get('back_to')\n if back_to is None or back_to not in client.logout_uris:\n back_to = self.request.url_for('pools:signout_success', pool=\n self.pool.key)\n if self.request.user.is_authenticated:\n del self.per_pool_session['user_id']\n return RedirectResponse(back_to, status_code=302)\n\n\[email protected]('/signout/success', name='signout_success')\nclass SignedOutEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/signout_success.html')\n\n\[email protected]('/', name='index')\nclass IndexEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/index.html')\n\n\[email protected]('/.well-known/jwks.json', name='signin_success')\nclass JWKSEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n keys = []\n if isinstance(self.request.app.state.jwt_config.key, dict):\n public_jwk = build_jwt_public_key_from_private_key(self.request\n .app.state.jwt_config.key)\n public_jwk['use'] = 'sig'\n keys.append(public_jwk)\n return JSONResponse({'keys': keys})\n",
"<import token>\n<assignment token>\n<class token>\n<function token>\n<class token>\n\n\[email protected]('/signin/success', name='signin_success')\nclass SignedinEndpoint(PoolHTTPEndpoint):\n <assignment token>\n\n async def get(self):\n return self.templates(self.template)\n\n\[email protected]('/signout', name='signout', methods=['post'])\nclass SignOutEndpoint(PoolHTTPEndpoint):\n\n async def post(self):\n form = await self.request.form()\n client_id = form.get('client_id')\n try:\n client = await async_(self.pool.clients.filter_by(\n oauth2_client_id=client_id).one)()\n except orm_exc.NoResultFound as e:\n raise HTTPException(status_code=HTTP_404_NOT_FOUND) from e\n back_to = form.get('back_to')\n if back_to is None or back_to not in client.logout_uris:\n back_to = self.request.url_for('pools:signout_success', pool=\n self.pool.key)\n if self.request.user.is_authenticated:\n del self.per_pool_session['user_id']\n return RedirectResponse(back_to, status_code=302)\n\n\[email protected]('/signout/success', name='signout_success')\nclass SignedOutEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/signout_success.html')\n\n\[email protected]('/', name='index')\nclass IndexEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/index.html')\n\n\[email protected]('/.well-known/jwks.json', name='signin_success')\nclass JWKSEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n keys = []\n if isinstance(self.request.app.state.jwt_config.key, dict):\n public_jwk = build_jwt_public_key_from_private_key(self.request\n .app.state.jwt_config.key)\n public_jwk['use'] = 'sig'\n keys.append(public_jwk)\n return JSONResponse({'keys': keys})\n",
"<import token>\n<assignment token>\n<class token>\n<function token>\n<class token>\n<class token>\n\n\[email protected]('/signout', name='signout', methods=['post'])\nclass SignOutEndpoint(PoolHTTPEndpoint):\n\n async def post(self):\n form = await self.request.form()\n client_id = form.get('client_id')\n try:\n client = await async_(self.pool.clients.filter_by(\n oauth2_client_id=client_id).one)()\n except orm_exc.NoResultFound as e:\n raise HTTPException(status_code=HTTP_404_NOT_FOUND) from e\n back_to = form.get('back_to')\n if back_to is None or back_to not in client.logout_uris:\n back_to = self.request.url_for('pools:signout_success', pool=\n self.pool.key)\n if self.request.user.is_authenticated:\n del self.per_pool_session['user_id']\n return RedirectResponse(back_to, status_code=302)\n\n\[email protected]('/signout/success', name='signout_success')\nclass SignedOutEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/signout_success.html')\n\n\[email protected]('/', name='index')\nclass IndexEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/index.html')\n\n\[email protected]('/.well-known/jwks.json', name='signin_success')\nclass JWKSEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n keys = []\n if isinstance(self.request.app.state.jwt_config.key, dict):\n public_jwk = build_jwt_public_key_from_private_key(self.request\n .app.state.jwt_config.key)\n public_jwk['use'] = 'sig'\n keys.append(public_jwk)\n return JSONResponse({'keys': keys})\n",
"<import token>\n<assignment token>\n<class token>\n<function token>\n<class token>\n<class token>\n<class token>\n\n\[email protected]('/signout/success', name='signout_success')\nclass SignedOutEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/signout_success.html')\n\n\[email protected]('/', name='index')\nclass IndexEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/index.html')\n\n\[email protected]('/.well-known/jwks.json', name='signin_success')\nclass JWKSEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n keys = []\n if isinstance(self.request.app.state.jwt_config.key, dict):\n public_jwk = build_jwt_public_key_from_private_key(self.request\n .app.state.jwt_config.key)\n public_jwk['use'] = 'sig'\n keys.append(public_jwk)\n return JSONResponse({'keys': keys})\n",
"<import token>\n<assignment token>\n<class token>\n<function token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\[email protected]('/', name='index')\nclass IndexEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/index.html')\n\n\[email protected]('/.well-known/jwks.json', name='signin_success')\nclass JWKSEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n keys = []\n if isinstance(self.request.app.state.jwt_config.key, dict):\n public_jwk = build_jwt_public_key_from_private_key(self.request\n .app.state.jwt_config.key)\n public_jwk['use'] = 'sig'\n keys.append(public_jwk)\n return JSONResponse({'keys': keys})\n",
"<import token>\n<assignment token>\n<class token>\n<function token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\[email protected]('/.well-known/jwks.json', name='signin_success')\nclass JWKSEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n keys = []\n if isinstance(self.request.app.state.jwt_config.key, dict):\n public_jwk = build_jwt_public_key_from_private_key(self.request\n .app.state.jwt_config.key)\n public_jwk['use'] = 'sig'\n keys.append(public_jwk)\n return JSONResponse({'keys': keys})\n",
"<import token>\n<assignment token>\n<class token>\n<function token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n"
] | false |
850 |
87504fb88cbbf810ad8bab08bc59284d2cf37cce
|
class Solution(object):
def findDisappearedNumbers(self, nums):
"""
:type nums: List[int]
:rtype: List[int]
"""
ns = [0]*len(nums)
for i in range(0, len(nums), 1):
ns[nums[i]-1] = 1
ret = []
for j in range(0, len(ns), 1):
if(ns[j] == 0): ret.append(j+1)
return ret
class Solution(object):
def findDisappearedNumbers(self, nums):
"""
:type nums: List[int]
:rtype: List[int]
"""
for i in range(0, len(nums), 1):
index = abs(nums[i]) - 1
nums[index] = - abs(nums[index])
return [i + 1 for i in range(0, len(nums), 1) if nums[i] > 0]
|
[
"class Solution(object):\n def findDisappearedNumbers(self, nums):\n \"\"\"\n :type nums: List[int]\n :rtype: List[int]\n \"\"\"\n ns = [0]*len(nums)\n for i in range(0, len(nums), 1):\n ns[nums[i]-1] = 1\n \n ret = []\n for j in range(0, len(ns), 1):\n if(ns[j] == 0): ret.append(j+1)\n return ret\n\nclass Solution(object):\n def findDisappearedNumbers(self, nums):\n \"\"\"\n :type nums: List[int]\n :rtype: List[int]\n \"\"\"\n for i in range(0, len(nums), 1):\n index = abs(nums[i]) - 1\n nums[index] = - abs(nums[index])\n\n return [i + 1 for i in range(0, len(nums), 1) if nums[i] > 0]",
"class Solution(object):\n\n def findDisappearedNumbers(self, nums):\n \"\"\"\n :type nums: List[int]\n :rtype: List[int]\n \"\"\"\n ns = [0] * len(nums)\n for i in range(0, len(nums), 1):\n ns[nums[i] - 1] = 1\n ret = []\n for j in range(0, len(ns), 1):\n if ns[j] == 0:\n ret.append(j + 1)\n return ret\n\n\nclass Solution(object):\n\n def findDisappearedNumbers(self, nums):\n \"\"\"\n :type nums: List[int]\n :rtype: List[int]\n \"\"\"\n for i in range(0, len(nums), 1):\n index = abs(nums[i]) - 1\n nums[index] = -abs(nums[index])\n return [(i + 1) for i in range(0, len(nums), 1) if nums[i] > 0]\n",
"class Solution(object):\n <function token>\n\n\nclass Solution(object):\n\n def findDisappearedNumbers(self, nums):\n \"\"\"\n :type nums: List[int]\n :rtype: List[int]\n \"\"\"\n for i in range(0, len(nums), 1):\n index = abs(nums[i]) - 1\n nums[index] = -abs(nums[index])\n return [(i + 1) for i in range(0, len(nums), 1) if nums[i] > 0]\n",
"<class token>\n\n\nclass Solution(object):\n\n def findDisappearedNumbers(self, nums):\n \"\"\"\n :type nums: List[int]\n :rtype: List[int]\n \"\"\"\n for i in range(0, len(nums), 1):\n index = abs(nums[i]) - 1\n nums[index] = -abs(nums[index])\n return [(i + 1) for i in range(0, len(nums), 1) if nums[i] > 0]\n",
"<class token>\n\n\nclass Solution(object):\n <function token>\n",
"<class token>\n<class token>\n"
] | false |
851 |
a7add26a919a41e52ae41c6b4c4079eadaa8aa1d
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-05-16 12:24
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0036_auto_20180516_1818'),
]
operations = [
migrations.AddField(
model_name='promotion',
name='image',
field=models.ImageField(default=1, upload_to='images/promotion', verbose_name='Image 1318x790'),
preserve_default=False,
),
]
|
[
"# -*- coding: utf-8 -*-\n# Generated by Django 1.11 on 2018-05-16 12:24\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('main', '0036_auto_20180516_1818'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='promotion',\n name='image',\n field=models.ImageField(default=1, upload_to='images/promotion', verbose_name='Image 1318x790'),\n preserve_default=False,\n ),\n ]\n",
"from __future__ import unicode_literals\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('main', '0036_auto_20180516_1818')]\n operations = [migrations.AddField(model_name='promotion', name='image',\n field=models.ImageField(default=1, upload_to='images/promotion',\n verbose_name='Image 1318x790'), preserve_default=False)]\n",
"<import token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('main', '0036_auto_20180516_1818')]\n operations = [migrations.AddField(model_name='promotion', name='image',\n field=models.ImageField(default=1, upload_to='images/promotion',\n verbose_name='Image 1318x790'), preserve_default=False)]\n",
"<import token>\n\n\nclass Migration(migrations.Migration):\n <assignment token>\n <assignment token>\n",
"<import token>\n<class token>\n"
] | false |
852 |
a52743fc911beb7e51644073131b25c177d4ad29
|
import brainlit.algorithms.generate_fragments
from brainlit.algorithms.generate_fragments import *
|
[
"import brainlit.algorithms.generate_fragments\n\nfrom brainlit.algorithms.generate_fragments import *\n",
"import brainlit.algorithms.generate_fragments\nfrom brainlit.algorithms.generate_fragments import *\n",
"<import token>\n"
] | false |
853 |
44a9bb4d74d2e694f252d8726647bca13baa4df5
|
import tornado.ioloop
import tornado.web
import json
import utils
class BaseHandler(tornado.web.RequestHandler):
def set_default_headers(self):
self.set_header("Access-Control-Allow-Origin", "*")
self.set_header("Access-Control-Allow-Headers", "x-requested-with")
class CondaHandler(BaseHandler):
def get(self, filePath):
with open("packages/conda/" + filePath) as f:
data = json.load(f)
condaPackages = {}
packages = data["packages"]
for package in packages:
name = packages[package]["name"]
version = packages[package]["version"]
try:
if version not in condaPackages[name]["versions"]:
condaPackages[name]["versions"].append(version)
except:
condaPackages[name] = {
"versions": [
version
]
}
self.write(json.dumps(condaPackages))
class SubmitHandler(BaseHandler):
def post(self):
data = tornado.escape.json_decode(self.request.body)
print(data)
folderPath = str(data['id'])
utils.mkdir(folderPath)
self.write('testing')
def make_app():
return tornado.web.Application([
(r"/packages/(.*)", CondaHandler),
(r"/submit", SubmitHandler)
])
if __name__ == "__main__":
app = make_app()
app.listen(8888)
tornado.ioloop.IOLoop.current().start()
|
[
"import tornado.ioloop\nimport tornado.web\nimport json\nimport utils\n\nclass BaseHandler(tornado.web.RequestHandler):\n def set_default_headers(self):\n self.set_header(\"Access-Control-Allow-Origin\", \"*\")\n self.set_header(\"Access-Control-Allow-Headers\", \"x-requested-with\")\n\nclass CondaHandler(BaseHandler):\n def get(self, filePath):\n with open(\"packages/conda/\" + filePath) as f:\n data = json.load(f)\n\n condaPackages = {}\n packages = data[\"packages\"]\n\n for package in packages:\n name = packages[package][\"name\"]\n version = packages[package][\"version\"]\n\n try:\n if version not in condaPackages[name][\"versions\"]:\n condaPackages[name][\"versions\"].append(version)\n except:\n condaPackages[name] = {\n \"versions\": [\n version\n ]\n }\n self.write(json.dumps(condaPackages))\n\nclass SubmitHandler(BaseHandler):\n def post(self):\n data = tornado.escape.json_decode(self.request.body)\n print(data)\n folderPath = str(data['id'])\n utils.mkdir(folderPath)\n self.write('testing')\n\ndef make_app():\n return tornado.web.Application([\n (r\"/packages/(.*)\", CondaHandler),\n (r\"/submit\", SubmitHandler)\n ])\n\nif __name__ == \"__main__\":\n app = make_app()\n app.listen(8888)\n tornado.ioloop.IOLoop.current().start()",
"import tornado.ioloop\nimport tornado.web\nimport json\nimport utils\n\n\nclass BaseHandler(tornado.web.RequestHandler):\n\n def set_default_headers(self):\n self.set_header('Access-Control-Allow-Origin', '*')\n self.set_header('Access-Control-Allow-Headers', 'x-requested-with')\n\n\nclass CondaHandler(BaseHandler):\n\n def get(self, filePath):\n with open('packages/conda/' + filePath) as f:\n data = json.load(f)\n condaPackages = {}\n packages = data['packages']\n for package in packages:\n name = packages[package]['name']\n version = packages[package]['version']\n try:\n if version not in condaPackages[name]['versions']:\n condaPackages[name]['versions'].append(version)\n except:\n condaPackages[name] = {'versions': [version]}\n self.write(json.dumps(condaPackages))\n\n\nclass SubmitHandler(BaseHandler):\n\n def post(self):\n data = tornado.escape.json_decode(self.request.body)\n print(data)\n folderPath = str(data['id'])\n utils.mkdir(folderPath)\n self.write('testing')\n\n\ndef make_app():\n return tornado.web.Application([('/packages/(.*)', CondaHandler), (\n '/submit', SubmitHandler)])\n\n\nif __name__ == '__main__':\n app = make_app()\n app.listen(8888)\n tornado.ioloop.IOLoop.current().start()\n",
"<import token>\n\n\nclass BaseHandler(tornado.web.RequestHandler):\n\n def set_default_headers(self):\n self.set_header('Access-Control-Allow-Origin', '*')\n self.set_header('Access-Control-Allow-Headers', 'x-requested-with')\n\n\nclass CondaHandler(BaseHandler):\n\n def get(self, filePath):\n with open('packages/conda/' + filePath) as f:\n data = json.load(f)\n condaPackages = {}\n packages = data['packages']\n for package in packages:\n name = packages[package]['name']\n version = packages[package]['version']\n try:\n if version not in condaPackages[name]['versions']:\n condaPackages[name]['versions'].append(version)\n except:\n condaPackages[name] = {'versions': [version]}\n self.write(json.dumps(condaPackages))\n\n\nclass SubmitHandler(BaseHandler):\n\n def post(self):\n data = tornado.escape.json_decode(self.request.body)\n print(data)\n folderPath = str(data['id'])\n utils.mkdir(folderPath)\n self.write('testing')\n\n\ndef make_app():\n return tornado.web.Application([('/packages/(.*)', CondaHandler), (\n '/submit', SubmitHandler)])\n\n\nif __name__ == '__main__':\n app = make_app()\n app.listen(8888)\n tornado.ioloop.IOLoop.current().start()\n",
"<import token>\n\n\nclass BaseHandler(tornado.web.RequestHandler):\n\n def set_default_headers(self):\n self.set_header('Access-Control-Allow-Origin', '*')\n self.set_header('Access-Control-Allow-Headers', 'x-requested-with')\n\n\nclass CondaHandler(BaseHandler):\n\n def get(self, filePath):\n with open('packages/conda/' + filePath) as f:\n data = json.load(f)\n condaPackages = {}\n packages = data['packages']\n for package in packages:\n name = packages[package]['name']\n version = packages[package]['version']\n try:\n if version not in condaPackages[name]['versions']:\n condaPackages[name]['versions'].append(version)\n except:\n condaPackages[name] = {'versions': [version]}\n self.write(json.dumps(condaPackages))\n\n\nclass SubmitHandler(BaseHandler):\n\n def post(self):\n data = tornado.escape.json_decode(self.request.body)\n print(data)\n folderPath = str(data['id'])\n utils.mkdir(folderPath)\n self.write('testing')\n\n\ndef make_app():\n return tornado.web.Application([('/packages/(.*)', CondaHandler), (\n '/submit', SubmitHandler)])\n\n\n<code token>\n",
"<import token>\n\n\nclass BaseHandler(tornado.web.RequestHandler):\n\n def set_default_headers(self):\n self.set_header('Access-Control-Allow-Origin', '*')\n self.set_header('Access-Control-Allow-Headers', 'x-requested-with')\n\n\nclass CondaHandler(BaseHandler):\n\n def get(self, filePath):\n with open('packages/conda/' + filePath) as f:\n data = json.load(f)\n condaPackages = {}\n packages = data['packages']\n for package in packages:\n name = packages[package]['name']\n version = packages[package]['version']\n try:\n if version not in condaPackages[name]['versions']:\n condaPackages[name]['versions'].append(version)\n except:\n condaPackages[name] = {'versions': [version]}\n self.write(json.dumps(condaPackages))\n\n\nclass SubmitHandler(BaseHandler):\n\n def post(self):\n data = tornado.escape.json_decode(self.request.body)\n print(data)\n folderPath = str(data['id'])\n utils.mkdir(folderPath)\n self.write('testing')\n\n\n<function token>\n<code token>\n",
"<import token>\n\n\nclass BaseHandler(tornado.web.RequestHandler):\n <function token>\n\n\nclass CondaHandler(BaseHandler):\n\n def get(self, filePath):\n with open('packages/conda/' + filePath) as f:\n data = json.load(f)\n condaPackages = {}\n packages = data['packages']\n for package in packages:\n name = packages[package]['name']\n version = packages[package]['version']\n try:\n if version not in condaPackages[name]['versions']:\n condaPackages[name]['versions'].append(version)\n except:\n condaPackages[name] = {'versions': [version]}\n self.write(json.dumps(condaPackages))\n\n\nclass SubmitHandler(BaseHandler):\n\n def post(self):\n data = tornado.escape.json_decode(self.request.body)\n print(data)\n folderPath = str(data['id'])\n utils.mkdir(folderPath)\n self.write('testing')\n\n\n<function token>\n<code token>\n",
"<import token>\n<class token>\n\n\nclass CondaHandler(BaseHandler):\n\n def get(self, filePath):\n with open('packages/conda/' + filePath) as f:\n data = json.load(f)\n condaPackages = {}\n packages = data['packages']\n for package in packages:\n name = packages[package]['name']\n version = packages[package]['version']\n try:\n if version not in condaPackages[name]['versions']:\n condaPackages[name]['versions'].append(version)\n except:\n condaPackages[name] = {'versions': [version]}\n self.write(json.dumps(condaPackages))\n\n\nclass SubmitHandler(BaseHandler):\n\n def post(self):\n data = tornado.escape.json_decode(self.request.body)\n print(data)\n folderPath = str(data['id'])\n utils.mkdir(folderPath)\n self.write('testing')\n\n\n<function token>\n<code token>\n",
"<import token>\n<class token>\n\n\nclass CondaHandler(BaseHandler):\n <function token>\n\n\nclass SubmitHandler(BaseHandler):\n\n def post(self):\n data = tornado.escape.json_decode(self.request.body)\n print(data)\n folderPath = str(data['id'])\n utils.mkdir(folderPath)\n self.write('testing')\n\n\n<function token>\n<code token>\n",
"<import token>\n<class token>\n<class token>\n\n\nclass SubmitHandler(BaseHandler):\n\n def post(self):\n data = tornado.escape.json_decode(self.request.body)\n print(data)\n folderPath = str(data['id'])\n utils.mkdir(folderPath)\n self.write('testing')\n\n\n<function token>\n<code token>\n",
"<import token>\n<class token>\n<class token>\n\n\nclass SubmitHandler(BaseHandler):\n <function token>\n\n\n<function token>\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<function token>\n<code token>\n"
] | false |
854 |
ba8cb18544e4ded8b229bfb9cc4b28599119414f
|
"""MPI-supported kernels for computing diffusion flux in 2D."""
from sopht.numeric.eulerian_grid_ops.stencil_ops_2d import (
gen_diffusion_flux_pyst_kernel_2d,
gen_set_fixed_val_pyst_kernel_2d,
)
from sopht_mpi.utils.mpi_utils import check_valid_ghost_size_and_kernel_support
from mpi4py import MPI
def gen_diffusion_flux_pyst_mpi_kernel_2d(
real_t, mpi_construct, ghost_exchange_communicator
):
# Note currently I'm generating these for arbit size arrays, we ca optimise this
# more by generating fixed size for the interior stencil and arbit size for
# boundary crunching
diffusion_flux_pyst_kernel = gen_diffusion_flux_pyst_kernel_2d(
real_t=real_t, reset_ghost_zone=False
)
kernel_support = 1
# define this here so that ghost size and kernel support is checked during
# generation phase itself
gen_diffusion_flux_pyst_mpi_kernel_2d.kernel_support = kernel_support
check_valid_ghost_size_and_kernel_support(
ghost_size=ghost_exchange_communicator.ghost_size,
kernel_support=gen_diffusion_flux_pyst_mpi_kernel_2d.kernel_support,
)
# for setting values at physical domain boundary
y_next, x_next = mpi_construct.next_grid_along
y_previous, x_previous = mpi_construct.previous_grid_along
set_fixed_val_kernel_2d = gen_set_fixed_val_pyst_kernel_2d(real_t=real_t)
def diffusion_flux_pyst_mpi_kernel_2d(
diffusion_flux,
field,
prefactor,
):
# define kernel support for kernel
diffusion_flux_pyst_mpi_kernel_2d.kernel_support = (
gen_diffusion_flux_pyst_mpi_kernel_2d.kernel_support
)
# define variable for use later
ghost_size = ghost_exchange_communicator.ghost_size
# begin ghost comm.
ghost_exchange_communicator.exchange_scalar_field_init(field)
# crunch interior stencil
diffusion_flux_pyst_kernel(
diffusion_flux=diffusion_flux[
ghost_size:-ghost_size, ghost_size:-ghost_size
],
field=field[ghost_size:-ghost_size, ghost_size:-ghost_size],
prefactor=prefactor,
)
# finalise ghost comm.
ghost_exchange_communicator.exchange_finalise()
# crunch boundary numbers
# NOTE: we pass in arrays of width 3 * kernel support size because the
# interior stencil computation leaves out a width of kernel_support.
# Since the support needed by the kernel is kernel_support on each side,
# we need to pass an array of width 3 * kernel_support, starting from
# index +/-(ghost_size - kernel_support) on the lower and upper end.
# Pystencils then automatically sets the kernel comp. bounds and
# crunches numbers in the kernel_support thickness zone at the boundary.
# Start of Y axis
diffusion_flux_pyst_kernel(
diffusion_flux=diffusion_flux[
ghost_size - kernel_support : ghost_size + 2 * kernel_support,
ghost_size:-ghost_size,
],
field=field[
ghost_size - kernel_support : ghost_size + 2 * kernel_support,
ghost_size:-ghost_size,
],
prefactor=prefactor,
)
# End of Y axis
diffusion_flux_pyst_kernel(
diffusion_flux=diffusion_flux[
-(ghost_size + 2 * kernel_support) : field.shape[0]
- (ghost_size - kernel_support),
ghost_size:-ghost_size,
],
field=field[
-(ghost_size + 2 * kernel_support) : field.shape[0]
- (ghost_size - kernel_support),
ghost_size:-ghost_size,
],
prefactor=prefactor,
)
# Start of X axis
diffusion_flux_pyst_kernel(
diffusion_flux=diffusion_flux[
:,
ghost_size - kernel_support : ghost_size + 2 * kernel_support,
],
field=field[
:,
ghost_size - kernel_support : ghost_size + 2 * kernel_support,
],
prefactor=prefactor,
)
# End of X axis
diffusion_flux_pyst_kernel(
diffusion_flux=diffusion_flux[
:,
-(ghost_size + 2 * kernel_support) : field.shape[1]
- (ghost_size - kernel_support),
],
field=field[
:,
-(ghost_size + 2 * kernel_support) : field.shape[1]
- (ghost_size - kernel_support),
],
prefactor=prefactor,
)
# Set physical domain boundary diffusion flus to zero based on neighboring block
boundary_width = 1
if x_previous == MPI.PROC_NULL:
set_fixed_val_kernel_2d(
field=diffusion_flux[:, : ghost_size + boundary_width],
fixed_val=0.0,
)
if x_next == MPI.PROC_NULL:
set_fixed_val_kernel_2d(
field=diffusion_flux[:, -ghost_size - boundary_width :],
fixed_val=0.0,
)
if y_previous == MPI.PROC_NULL:
set_fixed_val_kernel_2d(
field=diffusion_flux[: ghost_size + boundary_width, :],
fixed_val=0.0,
)
if y_next == MPI.PROC_NULL:
set_fixed_val_kernel_2d(
field=diffusion_flux[-ghost_size - boundary_width :, :],
fixed_val=0.0,
)
return diffusion_flux_pyst_mpi_kernel_2d
|
[
"\"\"\"MPI-supported kernels for computing diffusion flux in 2D.\"\"\"\nfrom sopht.numeric.eulerian_grid_ops.stencil_ops_2d import (\n gen_diffusion_flux_pyst_kernel_2d,\n gen_set_fixed_val_pyst_kernel_2d,\n)\nfrom sopht_mpi.utils.mpi_utils import check_valid_ghost_size_and_kernel_support\nfrom mpi4py import MPI\n\n\ndef gen_diffusion_flux_pyst_mpi_kernel_2d(\n real_t, mpi_construct, ghost_exchange_communicator\n):\n # Note currently I'm generating these for arbit size arrays, we ca optimise this\n # more by generating fixed size for the interior stencil and arbit size for\n # boundary crunching\n diffusion_flux_pyst_kernel = gen_diffusion_flux_pyst_kernel_2d(\n real_t=real_t, reset_ghost_zone=False\n )\n kernel_support = 1\n # define this here so that ghost size and kernel support is checked during\n # generation phase itself\n gen_diffusion_flux_pyst_mpi_kernel_2d.kernel_support = kernel_support\n check_valid_ghost_size_and_kernel_support(\n ghost_size=ghost_exchange_communicator.ghost_size,\n kernel_support=gen_diffusion_flux_pyst_mpi_kernel_2d.kernel_support,\n )\n\n # for setting values at physical domain boundary\n y_next, x_next = mpi_construct.next_grid_along\n y_previous, x_previous = mpi_construct.previous_grid_along\n set_fixed_val_kernel_2d = gen_set_fixed_val_pyst_kernel_2d(real_t=real_t)\n\n def diffusion_flux_pyst_mpi_kernel_2d(\n diffusion_flux,\n field,\n prefactor,\n ):\n # define kernel support for kernel\n diffusion_flux_pyst_mpi_kernel_2d.kernel_support = (\n gen_diffusion_flux_pyst_mpi_kernel_2d.kernel_support\n )\n # define variable for use later\n ghost_size = ghost_exchange_communicator.ghost_size\n # begin ghost comm.\n ghost_exchange_communicator.exchange_scalar_field_init(field)\n\n # crunch interior stencil\n diffusion_flux_pyst_kernel(\n diffusion_flux=diffusion_flux[\n ghost_size:-ghost_size, ghost_size:-ghost_size\n ],\n field=field[ghost_size:-ghost_size, ghost_size:-ghost_size],\n prefactor=prefactor,\n )\n # finalise ghost comm.\n ghost_exchange_communicator.exchange_finalise()\n\n # crunch boundary numbers\n # NOTE: we pass in arrays of width 3 * kernel support size because the\n # interior stencil computation leaves out a width of kernel_support.\n # Since the support needed by the kernel is kernel_support on each side,\n # we need to pass an array of width 3 * kernel_support, starting from\n # index +/-(ghost_size - kernel_support) on the lower and upper end.\n # Pystencils then automatically sets the kernel comp. bounds and\n # crunches numbers in the kernel_support thickness zone at the boundary.\n # Start of Y axis\n diffusion_flux_pyst_kernel(\n diffusion_flux=diffusion_flux[\n ghost_size - kernel_support : ghost_size + 2 * kernel_support,\n ghost_size:-ghost_size,\n ],\n field=field[\n ghost_size - kernel_support : ghost_size + 2 * kernel_support,\n ghost_size:-ghost_size,\n ],\n prefactor=prefactor,\n )\n # End of Y axis\n diffusion_flux_pyst_kernel(\n diffusion_flux=diffusion_flux[\n -(ghost_size + 2 * kernel_support) : field.shape[0]\n - (ghost_size - kernel_support),\n ghost_size:-ghost_size,\n ],\n field=field[\n -(ghost_size + 2 * kernel_support) : field.shape[0]\n - (ghost_size - kernel_support),\n ghost_size:-ghost_size,\n ],\n prefactor=prefactor,\n )\n # Start of X axis\n diffusion_flux_pyst_kernel(\n diffusion_flux=diffusion_flux[\n :,\n ghost_size - kernel_support : ghost_size + 2 * kernel_support,\n ],\n field=field[\n :,\n ghost_size - kernel_support : ghost_size + 2 * kernel_support,\n ],\n prefactor=prefactor,\n )\n # End of X axis\n diffusion_flux_pyst_kernel(\n diffusion_flux=diffusion_flux[\n :,\n -(ghost_size + 2 * kernel_support) : field.shape[1]\n - (ghost_size - kernel_support),\n ],\n field=field[\n :,\n -(ghost_size + 2 * kernel_support) : field.shape[1]\n - (ghost_size - kernel_support),\n ],\n prefactor=prefactor,\n )\n\n # Set physical domain boundary diffusion flus to zero based on neighboring block\n boundary_width = 1\n if x_previous == MPI.PROC_NULL:\n set_fixed_val_kernel_2d(\n field=diffusion_flux[:, : ghost_size + boundary_width],\n fixed_val=0.0,\n )\n if x_next == MPI.PROC_NULL:\n set_fixed_val_kernel_2d(\n field=diffusion_flux[:, -ghost_size - boundary_width :],\n fixed_val=0.0,\n )\n if y_previous == MPI.PROC_NULL:\n set_fixed_val_kernel_2d(\n field=diffusion_flux[: ghost_size + boundary_width, :],\n fixed_val=0.0,\n )\n if y_next == MPI.PROC_NULL:\n set_fixed_val_kernel_2d(\n field=diffusion_flux[-ghost_size - boundary_width :, :],\n fixed_val=0.0,\n )\n\n return diffusion_flux_pyst_mpi_kernel_2d\n",
"<docstring token>\nfrom sopht.numeric.eulerian_grid_ops.stencil_ops_2d import gen_diffusion_flux_pyst_kernel_2d, gen_set_fixed_val_pyst_kernel_2d\nfrom sopht_mpi.utils.mpi_utils import check_valid_ghost_size_and_kernel_support\nfrom mpi4py import MPI\n\n\ndef gen_diffusion_flux_pyst_mpi_kernel_2d(real_t, mpi_construct,\n ghost_exchange_communicator):\n diffusion_flux_pyst_kernel = gen_diffusion_flux_pyst_kernel_2d(real_t=\n real_t, reset_ghost_zone=False)\n kernel_support = 1\n gen_diffusion_flux_pyst_mpi_kernel_2d.kernel_support = kernel_support\n check_valid_ghost_size_and_kernel_support(ghost_size=\n ghost_exchange_communicator.ghost_size, kernel_support=\n gen_diffusion_flux_pyst_mpi_kernel_2d.kernel_support)\n y_next, x_next = mpi_construct.next_grid_along\n y_previous, x_previous = mpi_construct.previous_grid_along\n set_fixed_val_kernel_2d = gen_set_fixed_val_pyst_kernel_2d(real_t=real_t)\n\n def diffusion_flux_pyst_mpi_kernel_2d(diffusion_flux, field, prefactor):\n diffusion_flux_pyst_mpi_kernel_2d.kernel_support = (\n gen_diffusion_flux_pyst_mpi_kernel_2d.kernel_support)\n ghost_size = ghost_exchange_communicator.ghost_size\n ghost_exchange_communicator.exchange_scalar_field_init(field)\n diffusion_flux_pyst_kernel(diffusion_flux=diffusion_flux[ghost_size\n :-ghost_size, ghost_size:-ghost_size], field=field[ghost_size:-\n ghost_size, ghost_size:-ghost_size], prefactor=prefactor)\n ghost_exchange_communicator.exchange_finalise()\n diffusion_flux_pyst_kernel(diffusion_flux=diffusion_flux[ghost_size -\n kernel_support:ghost_size + 2 * kernel_support, ghost_size:-\n ghost_size], field=field[ghost_size - kernel_support:ghost_size +\n 2 * kernel_support, ghost_size:-ghost_size], prefactor=prefactor)\n diffusion_flux_pyst_kernel(diffusion_flux=diffusion_flux[-(\n ghost_size + 2 * kernel_support):field.shape[0] - (ghost_size -\n kernel_support), ghost_size:-ghost_size], field=field[-(\n ghost_size + 2 * kernel_support):field.shape[0] - (ghost_size -\n kernel_support), ghost_size:-ghost_size], prefactor=prefactor)\n diffusion_flux_pyst_kernel(diffusion_flux=diffusion_flux[:, \n ghost_size - kernel_support:ghost_size + 2 * kernel_support],\n field=field[:, ghost_size - kernel_support:ghost_size + 2 *\n kernel_support], prefactor=prefactor)\n diffusion_flux_pyst_kernel(diffusion_flux=diffusion_flux[:, -(\n ghost_size + 2 * kernel_support):field.shape[1] - (ghost_size -\n kernel_support)], field=field[:, -(ghost_size + 2 *\n kernel_support):field.shape[1] - (ghost_size - kernel_support)],\n prefactor=prefactor)\n boundary_width = 1\n if x_previous == MPI.PROC_NULL:\n set_fixed_val_kernel_2d(field=diffusion_flux[:, :ghost_size +\n boundary_width], fixed_val=0.0)\n if x_next == MPI.PROC_NULL:\n set_fixed_val_kernel_2d(field=diffusion_flux[:, -ghost_size -\n boundary_width:], fixed_val=0.0)\n if y_previous == MPI.PROC_NULL:\n set_fixed_val_kernel_2d(field=diffusion_flux[:ghost_size +\n boundary_width, :], fixed_val=0.0)\n if y_next == MPI.PROC_NULL:\n set_fixed_val_kernel_2d(field=diffusion_flux[-ghost_size -\n boundary_width:, :], fixed_val=0.0)\n return diffusion_flux_pyst_mpi_kernel_2d\n",
"<docstring token>\n<import token>\n\n\ndef gen_diffusion_flux_pyst_mpi_kernel_2d(real_t, mpi_construct,\n ghost_exchange_communicator):\n diffusion_flux_pyst_kernel = gen_diffusion_flux_pyst_kernel_2d(real_t=\n real_t, reset_ghost_zone=False)\n kernel_support = 1\n gen_diffusion_flux_pyst_mpi_kernel_2d.kernel_support = kernel_support\n check_valid_ghost_size_and_kernel_support(ghost_size=\n ghost_exchange_communicator.ghost_size, kernel_support=\n gen_diffusion_flux_pyst_mpi_kernel_2d.kernel_support)\n y_next, x_next = mpi_construct.next_grid_along\n y_previous, x_previous = mpi_construct.previous_grid_along\n set_fixed_val_kernel_2d = gen_set_fixed_val_pyst_kernel_2d(real_t=real_t)\n\n def diffusion_flux_pyst_mpi_kernel_2d(diffusion_flux, field, prefactor):\n diffusion_flux_pyst_mpi_kernel_2d.kernel_support = (\n gen_diffusion_flux_pyst_mpi_kernel_2d.kernel_support)\n ghost_size = ghost_exchange_communicator.ghost_size\n ghost_exchange_communicator.exchange_scalar_field_init(field)\n diffusion_flux_pyst_kernel(diffusion_flux=diffusion_flux[ghost_size\n :-ghost_size, ghost_size:-ghost_size], field=field[ghost_size:-\n ghost_size, ghost_size:-ghost_size], prefactor=prefactor)\n ghost_exchange_communicator.exchange_finalise()\n diffusion_flux_pyst_kernel(diffusion_flux=diffusion_flux[ghost_size -\n kernel_support:ghost_size + 2 * kernel_support, ghost_size:-\n ghost_size], field=field[ghost_size - kernel_support:ghost_size +\n 2 * kernel_support, ghost_size:-ghost_size], prefactor=prefactor)\n diffusion_flux_pyst_kernel(diffusion_flux=diffusion_flux[-(\n ghost_size + 2 * kernel_support):field.shape[0] - (ghost_size -\n kernel_support), ghost_size:-ghost_size], field=field[-(\n ghost_size + 2 * kernel_support):field.shape[0] - (ghost_size -\n kernel_support), ghost_size:-ghost_size], prefactor=prefactor)\n diffusion_flux_pyst_kernel(diffusion_flux=diffusion_flux[:, \n ghost_size - kernel_support:ghost_size + 2 * kernel_support],\n field=field[:, ghost_size - kernel_support:ghost_size + 2 *\n kernel_support], prefactor=prefactor)\n diffusion_flux_pyst_kernel(diffusion_flux=diffusion_flux[:, -(\n ghost_size + 2 * kernel_support):field.shape[1] - (ghost_size -\n kernel_support)], field=field[:, -(ghost_size + 2 *\n kernel_support):field.shape[1] - (ghost_size - kernel_support)],\n prefactor=prefactor)\n boundary_width = 1\n if x_previous == MPI.PROC_NULL:\n set_fixed_val_kernel_2d(field=diffusion_flux[:, :ghost_size +\n boundary_width], fixed_val=0.0)\n if x_next == MPI.PROC_NULL:\n set_fixed_val_kernel_2d(field=diffusion_flux[:, -ghost_size -\n boundary_width:], fixed_val=0.0)\n if y_previous == MPI.PROC_NULL:\n set_fixed_val_kernel_2d(field=diffusion_flux[:ghost_size +\n boundary_width, :], fixed_val=0.0)\n if y_next == MPI.PROC_NULL:\n set_fixed_val_kernel_2d(field=diffusion_flux[-ghost_size -\n boundary_width:, :], fixed_val=0.0)\n return diffusion_flux_pyst_mpi_kernel_2d\n",
"<docstring token>\n<import token>\n<function token>\n"
] | false |
855 |
7754974e79202b2df4ab9a7f69948483042a67cc
|
#! /usr/bin/env python
import smtpsend
S = smtpsend.Smtpsent(SUBJECT='Test')
S.sendemail('''
this is a test!
''')
|
[
"#! /usr/bin/env python\n\nimport smtpsend\n\nS = smtpsend.Smtpsent(SUBJECT='Test')\nS.sendemail('''\nthis is a test!\n''')\n",
"import smtpsend\nS = smtpsend.Smtpsent(SUBJECT='Test')\nS.sendemail(\"\"\"\nthis is a test!\n\"\"\")\n",
"<import token>\nS = smtpsend.Smtpsent(SUBJECT='Test')\nS.sendemail(\"\"\"\nthis is a test!\n\"\"\")\n",
"<import token>\n<assignment token>\nS.sendemail(\"\"\"\nthis is a test!\n\"\"\")\n",
"<import token>\n<assignment token>\n<code token>\n"
] | false |
856 |
987d6c769a4f593405e889ed2b0e3f9955900406
|
from django.contrib import admin
from django.urls import path, include
from django.conf import settings
from rest_framework_swagger.views import get_swagger_view
schema_view = get_swagger_view(title='API')
from django.contrib.auth import views as auth_views
urlpatterns = [
path('django-admin/', admin.site.urls),
path('', schema_view),
path('auth/login/', auth_views.LoginView.as_view(template_name='auth/login.html')),
path('auth/logout/', auth_views.LogoutView.as_view()),
path('api/auth/', include('apps.auth.urls')),
path('api/polls/', include('apps.polls.urls')),
]
if settings.DEBUG and 'debug_toolbar' in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns = [
path('__debug__/', include(debug_toolbar.urls))
] + urlpatterns
|
[
"from django.contrib import admin\nfrom django.urls import path, include\nfrom django.conf import settings\n\nfrom rest_framework_swagger.views import get_swagger_view\n\nschema_view = get_swagger_view(title='API')\n\nfrom django.contrib.auth import views as auth_views\n\nurlpatterns = [\n path('django-admin/', admin.site.urls),\n path('', schema_view),\n path('auth/login/', auth_views.LoginView.as_view(template_name='auth/login.html')),\n path('auth/logout/', auth_views.LogoutView.as_view()),\n path('api/auth/', include('apps.auth.urls')),\n path('api/polls/', include('apps.polls.urls')),\n]\n\nif settings.DEBUG and 'debug_toolbar' in settings.INSTALLED_APPS:\n import debug_toolbar\n urlpatterns = [\n path('__debug__/', include(debug_toolbar.urls))\n ] + urlpatterns\n",
"from django.contrib import admin\nfrom django.urls import path, include\nfrom django.conf import settings\nfrom rest_framework_swagger.views import get_swagger_view\nschema_view = get_swagger_view(title='API')\nfrom django.contrib.auth import views as auth_views\nurlpatterns = [path('django-admin/', admin.site.urls), path('', schema_view\n ), path('auth/login/', auth_views.LoginView.as_view(template_name=\n 'auth/login.html')), path('auth/logout/', auth_views.LogoutView.as_view\n ()), path('api/auth/', include('apps.auth.urls')), path('api/polls/',\n include('apps.polls.urls'))]\nif settings.DEBUG and 'debug_toolbar' in settings.INSTALLED_APPS:\n import debug_toolbar\n urlpatterns = [path('__debug__/', include(debug_toolbar.urls))\n ] + urlpatterns\n",
"<import token>\nschema_view = get_swagger_view(title='API')\n<import token>\nurlpatterns = [path('django-admin/', admin.site.urls), path('', schema_view\n ), path('auth/login/', auth_views.LoginView.as_view(template_name=\n 'auth/login.html')), path('auth/logout/', auth_views.LogoutView.as_view\n ()), path('api/auth/', include('apps.auth.urls')), path('api/polls/',\n include('apps.polls.urls'))]\nif settings.DEBUG and 'debug_toolbar' in settings.INSTALLED_APPS:\n import debug_toolbar\n urlpatterns = [path('__debug__/', include(debug_toolbar.urls))\n ] + urlpatterns\n",
"<import token>\n<assignment token>\n<import token>\n<assignment token>\nif settings.DEBUG and 'debug_toolbar' in settings.INSTALLED_APPS:\n import debug_toolbar\n urlpatterns = [path('__debug__/', include(debug_toolbar.urls))\n ] + urlpatterns\n",
"<import token>\n<assignment token>\n<import token>\n<assignment token>\n<code token>\n"
] | false |
857 |
b808daf8d1fbe3cc585db57e1049a502d3ca46f5
|
import pandas as pd
from pandas.io.json import json_normalize
import numpy as np
import warnings
import re
warnings.filterwarnings("ignore")
data_path = '/Users/trietnguyen/Documents/Thesis/Thesis-2020/References/Crawler/summaryDataJson.json'
weights = ['mg', 'ml', '%']
def formatName(name):
arr = re.split(' |-', name)
print(arr)
gweight = ''
gname = []
gnumber = ''
for word in arr:
if any(str.isdigit(c) for c in word): #2 trường hợp 200 200mg
for weight in weights:
pos = word.find(weight)
if pos != -1:
gweight = weight
gnumber = word[:pos]
break
else:
gnumber = word
elif any(word == weight for weight in weights):
gweight = word
elif word != '':
gname.append(word)
return (gnumber, gweight ,' '.join(gname))
def cleanName(name):
return re.sub(r'[^a-z0-9]', '', name.lower())
def rmSpecialCharacters(df):
df['noSpace'] = df['noSpace'].apply(cleanName)
def rmDuplicate(df):
df.drop_duplicates(subset ='noSpace',
keep = 'first', inplace = True)
df.index = range(len(df.index))
def splitMedicine(df):
df_temp = df['name'].apply(formatName)
new_df = pd.DataFrame([[a, b, c] for a,b,c in df_temp.values], columns=['number', 'weight', 'short name'])
return new_df
#Read data
df = pd.read_json(data_path, orient='records')
df.drop_duplicates(subset ="name",
keep = 'first', inplace = True)
df.index = range(len(df.index))
#Xoá các thuốc có tiếng việt
nonTiengViet_df = df.loc[df['name'].str.contains(r'[^\x00-\x7F]+') == False]
#print(nonTiengViet_df.head(10))
#Remove duplicate bằng cách xoá hết các khoảng trắng của tên thuốc, nếu trùng tên và thành phần thì xoá
nonTiengViet_df['noSpace'] = nonTiengViet_df.name
rm_character = ['-', '\"', '/', ' ', ',', '.']
rmSpecialCharacters(nonTiengViet_df)
rmDuplicate(nonTiengViet_df)
# sort dataframe:
nonTiengViet_df = nonTiengViet_df.sort_values(by=['noSpace'], ascending=True)
nonTiengViet_df.index = range(len(nonTiengViet_df.index))
# split thuốc theo [' ', '-']
# Tìm các từ có tồn tại số 200, 200mg, 0.1mg/ml 150 ....
#
print(formatName('10mg Dextrose in Water Parenteral Solution for ..'))
splitMedicine(nonTiengViet_df)
new_df = splitMedicine(nonTiengViet_df)
nonTiengViet_df['shortname'] = new_df['short name']
nonTiengViet_df['number'] = new_df['number']
nonTiengViet_df['weight'] = new_df['weight']
nonTiengViet_df['noSpace'] = nonTiengViet_df.shortname
rm_character = ['-', '\"', '/', ' ', ',', '.']
rmSpecialCharacters(nonTiengViet_df)
rmDuplicate(nonTiengViet_df)
print(nonTiengViet_df.describe)
print(nonTiengViet_df.tail(5))
nonTiengViet_df.to_json(r'PreProcessData.json')
|
[
"import pandas as pd\nfrom pandas.io.json import json_normalize\nimport numpy as np\nimport warnings\nimport re\nwarnings.filterwarnings(\"ignore\")\n\ndata_path = '/Users/trietnguyen/Documents/Thesis/Thesis-2020/References/Crawler/summaryDataJson.json'\n\nweights = ['mg', 'ml', '%']\n\ndef formatName(name):\n arr = re.split(' |-', name)\n print(arr) \n gweight = ''\n gname = [] \n gnumber = ''\n for word in arr:\n if any(str.isdigit(c) for c in word): #2 trường hợp 200 200mg\n for weight in weights:\n pos = word.find(weight)\n if pos != -1:\n gweight = weight \n gnumber = word[:pos]\n break\n else:\n gnumber = word\n \n elif any(word == weight for weight in weights):\n gweight = word\n elif word != '':\n gname.append(word)\n\n return (gnumber, gweight ,' '.join(gname))\n\ndef cleanName(name):\n return re.sub(r'[^a-z0-9]', '', name.lower()) \n\ndef rmSpecialCharacters(df):\n df['noSpace'] = df['noSpace'].apply(cleanName)\n \ndef rmDuplicate(df):\n df.drop_duplicates(subset ='noSpace', \n keep = 'first', inplace = True)\n df.index = range(len(df.index))\n\ndef splitMedicine(df):\n df_temp = df['name'].apply(formatName) \n new_df = pd.DataFrame([[a, b, c] for a,b,c in df_temp.values], columns=['number', 'weight', 'short name'])\n return new_df \n\n#Read data\ndf = pd.read_json(data_path, orient='records')\n\ndf.drop_duplicates(subset =\"name\", \n keep = 'first', inplace = True)\ndf.index = range(len(df.index))\n\n#Xoá các thuốc có tiếng việt\nnonTiengViet_df = df.loc[df['name'].str.contains(r'[^\\x00-\\x7F]+') == False]\n#print(nonTiengViet_df.head(10))\n\n#Remove duplicate bằng cách xoá hết các khoảng trắng của tên thuốc, nếu trùng tên và thành phần thì xoá \nnonTiengViet_df['noSpace'] = nonTiengViet_df.name \nrm_character = ['-', '\\\"', '/', ' ', ',', '.']\nrmSpecialCharacters(nonTiengViet_df)\n\nrmDuplicate(nonTiengViet_df)\n\n# sort dataframe:\nnonTiengViet_df = nonTiengViet_df.sort_values(by=['noSpace'], ascending=True)\nnonTiengViet_df.index = range(len(nonTiengViet_df.index))\n# split thuốc theo [' ', '-']\n# Tìm các từ có tồn tại số 200, 200mg, 0.1mg/ml 150 ....\n# \nprint(formatName('10mg Dextrose in Water Parenteral Solution for ..'))\nsplitMedicine(nonTiengViet_df)\n\nnew_df = splitMedicine(nonTiengViet_df)\nnonTiengViet_df['shortname'] = new_df['short name']\nnonTiengViet_df['number'] = new_df['number']\nnonTiengViet_df['weight'] = new_df['weight']\n\n\nnonTiengViet_df['noSpace'] = nonTiengViet_df.shortname \nrm_character = ['-', '\\\"', '/', ' ', ',', '.']\nrmSpecialCharacters(nonTiengViet_df)\n\nrmDuplicate(nonTiengViet_df)\n\nprint(nonTiengViet_df.describe)\nprint(nonTiengViet_df.tail(5))\nnonTiengViet_df.to_json(r'PreProcessData.json')\n\n",
"import pandas as pd\nfrom pandas.io.json import json_normalize\nimport numpy as np\nimport warnings\nimport re\nwarnings.filterwarnings('ignore')\ndata_path = (\n '/Users/trietnguyen/Documents/Thesis/Thesis-2020/References/Crawler/summaryDataJson.json'\n )\nweights = ['mg', 'ml', '%']\n\n\ndef formatName(name):\n arr = re.split(' |-', name)\n print(arr)\n gweight = ''\n gname = []\n gnumber = ''\n for word in arr:\n if any(str.isdigit(c) for c in word):\n for weight in weights:\n pos = word.find(weight)\n if pos != -1:\n gweight = weight\n gnumber = word[:pos]\n break\n else:\n gnumber = word\n elif any(word == weight for weight in weights):\n gweight = word\n elif word != '':\n gname.append(word)\n return gnumber, gweight, ' '.join(gname)\n\n\ndef cleanName(name):\n return re.sub('[^a-z0-9]', '', name.lower())\n\n\ndef rmSpecialCharacters(df):\n df['noSpace'] = df['noSpace'].apply(cleanName)\n\n\ndef rmDuplicate(df):\n df.drop_duplicates(subset='noSpace', keep='first', inplace=True)\n df.index = range(len(df.index))\n\n\ndef splitMedicine(df):\n df_temp = df['name'].apply(formatName)\n new_df = pd.DataFrame([[a, b, c] for a, b, c in df_temp.values],\n columns=['number', 'weight', 'short name'])\n return new_df\n\n\ndf = pd.read_json(data_path, orient='records')\ndf.drop_duplicates(subset='name', keep='first', inplace=True)\ndf.index = range(len(df.index))\nnonTiengViet_df = df.loc[df['name'].str.contains('[^\\\\x00-\\\\x7F]+') == False]\nnonTiengViet_df['noSpace'] = nonTiengViet_df.name\nrm_character = ['-', '\"', '/', ' ', ',', '.']\nrmSpecialCharacters(nonTiengViet_df)\nrmDuplicate(nonTiengViet_df)\nnonTiengViet_df = nonTiengViet_df.sort_values(by=['noSpace'], ascending=True)\nnonTiengViet_df.index = range(len(nonTiengViet_df.index))\nprint(formatName('10mg Dextrose in Water Parenteral Solution for ..'))\nsplitMedicine(nonTiengViet_df)\nnew_df = splitMedicine(nonTiengViet_df)\nnonTiengViet_df['shortname'] = new_df['short name']\nnonTiengViet_df['number'] = new_df['number']\nnonTiengViet_df['weight'] = new_df['weight']\nnonTiengViet_df['noSpace'] = nonTiengViet_df.shortname\nrm_character = ['-', '\"', '/', ' ', ',', '.']\nrmSpecialCharacters(nonTiengViet_df)\nrmDuplicate(nonTiengViet_df)\nprint(nonTiengViet_df.describe)\nprint(nonTiengViet_df.tail(5))\nnonTiengViet_df.to_json('PreProcessData.json')\n",
"<import token>\nwarnings.filterwarnings('ignore')\ndata_path = (\n '/Users/trietnguyen/Documents/Thesis/Thesis-2020/References/Crawler/summaryDataJson.json'\n )\nweights = ['mg', 'ml', '%']\n\n\ndef formatName(name):\n arr = re.split(' |-', name)\n print(arr)\n gweight = ''\n gname = []\n gnumber = ''\n for word in arr:\n if any(str.isdigit(c) for c in word):\n for weight in weights:\n pos = word.find(weight)\n if pos != -1:\n gweight = weight\n gnumber = word[:pos]\n break\n else:\n gnumber = word\n elif any(word == weight for weight in weights):\n gweight = word\n elif word != '':\n gname.append(word)\n return gnumber, gweight, ' '.join(gname)\n\n\ndef cleanName(name):\n return re.sub('[^a-z0-9]', '', name.lower())\n\n\ndef rmSpecialCharacters(df):\n df['noSpace'] = df['noSpace'].apply(cleanName)\n\n\ndef rmDuplicate(df):\n df.drop_duplicates(subset='noSpace', keep='first', inplace=True)\n df.index = range(len(df.index))\n\n\ndef splitMedicine(df):\n df_temp = df['name'].apply(formatName)\n new_df = pd.DataFrame([[a, b, c] for a, b, c in df_temp.values],\n columns=['number', 'weight', 'short name'])\n return new_df\n\n\ndf = pd.read_json(data_path, orient='records')\ndf.drop_duplicates(subset='name', keep='first', inplace=True)\ndf.index = range(len(df.index))\nnonTiengViet_df = df.loc[df['name'].str.contains('[^\\\\x00-\\\\x7F]+') == False]\nnonTiengViet_df['noSpace'] = nonTiengViet_df.name\nrm_character = ['-', '\"', '/', ' ', ',', '.']\nrmSpecialCharacters(nonTiengViet_df)\nrmDuplicate(nonTiengViet_df)\nnonTiengViet_df = nonTiengViet_df.sort_values(by=['noSpace'], ascending=True)\nnonTiengViet_df.index = range(len(nonTiengViet_df.index))\nprint(formatName('10mg Dextrose in Water Parenteral Solution for ..'))\nsplitMedicine(nonTiengViet_df)\nnew_df = splitMedicine(nonTiengViet_df)\nnonTiengViet_df['shortname'] = new_df['short name']\nnonTiengViet_df['number'] = new_df['number']\nnonTiengViet_df['weight'] = new_df['weight']\nnonTiengViet_df['noSpace'] = nonTiengViet_df.shortname\nrm_character = ['-', '\"', '/', ' ', ',', '.']\nrmSpecialCharacters(nonTiengViet_df)\nrmDuplicate(nonTiengViet_df)\nprint(nonTiengViet_df.describe)\nprint(nonTiengViet_df.tail(5))\nnonTiengViet_df.to_json('PreProcessData.json')\n",
"<import token>\nwarnings.filterwarnings('ignore')\n<assignment token>\n\n\ndef formatName(name):\n arr = re.split(' |-', name)\n print(arr)\n gweight = ''\n gname = []\n gnumber = ''\n for word in arr:\n if any(str.isdigit(c) for c in word):\n for weight in weights:\n pos = word.find(weight)\n if pos != -1:\n gweight = weight\n gnumber = word[:pos]\n break\n else:\n gnumber = word\n elif any(word == weight for weight in weights):\n gweight = word\n elif word != '':\n gname.append(word)\n return gnumber, gweight, ' '.join(gname)\n\n\ndef cleanName(name):\n return re.sub('[^a-z0-9]', '', name.lower())\n\n\ndef rmSpecialCharacters(df):\n df['noSpace'] = df['noSpace'].apply(cleanName)\n\n\ndef rmDuplicate(df):\n df.drop_duplicates(subset='noSpace', keep='first', inplace=True)\n df.index = range(len(df.index))\n\n\ndef splitMedicine(df):\n df_temp = df['name'].apply(formatName)\n new_df = pd.DataFrame([[a, b, c] for a, b, c in df_temp.values],\n columns=['number', 'weight', 'short name'])\n return new_df\n\n\n<assignment token>\ndf.drop_duplicates(subset='name', keep='first', inplace=True)\n<assignment token>\nrmSpecialCharacters(nonTiengViet_df)\nrmDuplicate(nonTiengViet_df)\n<assignment token>\nprint(formatName('10mg Dextrose in Water Parenteral Solution for ..'))\nsplitMedicine(nonTiengViet_df)\n<assignment token>\nrmSpecialCharacters(nonTiengViet_df)\nrmDuplicate(nonTiengViet_df)\nprint(nonTiengViet_df.describe)\nprint(nonTiengViet_df.tail(5))\nnonTiengViet_df.to_json('PreProcessData.json')\n",
"<import token>\n<code token>\n<assignment token>\n\n\ndef formatName(name):\n arr = re.split(' |-', name)\n print(arr)\n gweight = ''\n gname = []\n gnumber = ''\n for word in arr:\n if any(str.isdigit(c) for c in word):\n for weight in weights:\n pos = word.find(weight)\n if pos != -1:\n gweight = weight\n gnumber = word[:pos]\n break\n else:\n gnumber = word\n elif any(word == weight for weight in weights):\n gweight = word\n elif word != '':\n gname.append(word)\n return gnumber, gweight, ' '.join(gname)\n\n\ndef cleanName(name):\n return re.sub('[^a-z0-9]', '', name.lower())\n\n\ndef rmSpecialCharacters(df):\n df['noSpace'] = df['noSpace'].apply(cleanName)\n\n\ndef rmDuplicate(df):\n df.drop_duplicates(subset='noSpace', keep='first', inplace=True)\n df.index = range(len(df.index))\n\n\ndef splitMedicine(df):\n df_temp = df['name'].apply(formatName)\n new_df = pd.DataFrame([[a, b, c] for a, b, c in df_temp.values],\n columns=['number', 'weight', 'short name'])\n return new_df\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n<code token>\n<assignment token>\n\n\ndef formatName(name):\n arr = re.split(' |-', name)\n print(arr)\n gweight = ''\n gname = []\n gnumber = ''\n for word in arr:\n if any(str.isdigit(c) for c in word):\n for weight in weights:\n pos = word.find(weight)\n if pos != -1:\n gweight = weight\n gnumber = word[:pos]\n break\n else:\n gnumber = word\n elif any(word == weight for weight in weights):\n gweight = word\n elif word != '':\n gname.append(word)\n return gnumber, gweight, ' '.join(gname)\n\n\ndef cleanName(name):\n return re.sub('[^a-z0-9]', '', name.lower())\n\n\n<function token>\n\n\ndef rmDuplicate(df):\n df.drop_duplicates(subset='noSpace', keep='first', inplace=True)\n df.index = range(len(df.index))\n\n\ndef splitMedicine(df):\n df_temp = df['name'].apply(formatName)\n new_df = pd.DataFrame([[a, b, c] for a, b, c in df_temp.values],\n columns=['number', 'weight', 'short name'])\n return new_df\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n<code token>\n<assignment token>\n\n\ndef formatName(name):\n arr = re.split(' |-', name)\n print(arr)\n gweight = ''\n gname = []\n gnumber = ''\n for word in arr:\n if any(str.isdigit(c) for c in word):\n for weight in weights:\n pos = word.find(weight)\n if pos != -1:\n gweight = weight\n gnumber = word[:pos]\n break\n else:\n gnumber = word\n elif any(word == weight for weight in weights):\n gweight = word\n elif word != '':\n gname.append(word)\n return gnumber, gweight, ' '.join(gname)\n\n\ndef cleanName(name):\n return re.sub('[^a-z0-9]', '', name.lower())\n\n\n<function token>\n\n\ndef rmDuplicate(df):\n df.drop_duplicates(subset='noSpace', keep='first', inplace=True)\n df.index = range(len(df.index))\n\n\n<function token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n<code token>\n<assignment token>\n\n\ndef formatName(name):\n arr = re.split(' |-', name)\n print(arr)\n gweight = ''\n gname = []\n gnumber = ''\n for word in arr:\n if any(str.isdigit(c) for c in word):\n for weight in weights:\n pos = word.find(weight)\n if pos != -1:\n gweight = weight\n gnumber = word[:pos]\n break\n else:\n gnumber = word\n elif any(word == weight for weight in weights):\n gweight = word\n elif word != '':\n gname.append(word)\n return gnumber, gweight, ' '.join(gname)\n\n\ndef cleanName(name):\n return re.sub('[^a-z0-9]', '', name.lower())\n\n\n<function token>\n<function token>\n<function token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n<code token>\n<assignment token>\n<function token>\n\n\ndef cleanName(name):\n return re.sub('[^a-z0-9]', '', name.lower())\n\n\n<function token>\n<function token>\n<function token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n<code token>\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n"
] | false |
858 |
a649139a600cb506056a20e00089a07ec9244394
|
# -*- coding: utf-8 -*-
# Copyright 2015 Donne Martin. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from __future__ import unicode_literals
from __future__ import print_function
import click
from getpass import getpass
import os
import requests
from requests.packages.urllib3.exceptions import InsecureRequestWarning
from .compat import configparser
from .lib.github3 import authorize, enterprise_login, login
from .lib.github3.exceptions import AuthenticationFailed, UnprocessableEntity
class Config(object):
"""Gitsome config.
:type api: :class:`github3.github.Github`
:param api: An instance of github3.github.Github.
:type clr_x: str
:param clr_x: Various ansi color config colors to use for highlights.
:type CONFIG: str
:param CONFIG: The config file name.
:type CONFIG_SECTION: str
:param CONFIG_SECTION: The main config file section label.
:type CONFIG_CLR_X: str
:param CONFIG_CLR_X: Various ansi color config labels to use for highlights.
:type CONFIG_ENTERPRISE_URL: str
:param CONFIG_ENTERPRISE_URL: The GitHub Enterprise url.
:type CONFIG_USER_LOGIN: str
:param CONFIG_USER_LOGIN: The user login.
:type CONFIG_USER_PASS: str
:param CONFIG_USER_PASS: The user password.
:type CONFIG_USER_TOKEN: str
:param CONFIG_USER_TOKEN: The user token.
:type CONFIG_USER_FEED: str
:param CONFIG_USER_FEED: The user feed config. This is the feed on
https://github.com/ when logged in and requires the basic auth model,
which doesn't work when logging in with tokens or 2FA. This config
listed the pre-signed url to access the feed.
:type CONFIG_URL: str
:param CONFIG_URL: The config file name that contains urls used in the
`gh view` command.
:type CONFIG_URL_SECTION: str
:param CONFIG_URL_SECTION: The config file section that contains urls used
in the `gh view [url_index]` command.
:type CONFIG_URL_LIST: str
:param CONFIG_URL_LIST: The config containing a list of the last set of
urls the user has seen, which allows the user to quickly access a repo
url with the `gh view [url_index]` command.
:type CONFIG_VERIFY_SSL: str
:param CONFIG_VERIFY_SSL: Determines whether to verify SSL certs.
:type enterprise_url: str
:param enterprise_url: The GitHub Enterprise url.
:type urls: list
:param urls: The last set of urls the user has seen, which allows the user
to quickly access a repo url with the gh view [url_index] command.
:type user_login: str
:param user_login: The user's login in ~/.gitsomeconfig.
:type user_pass: str
:param user_pass: The user's pass in ~/.gitsomeconfig.
This is only stored for GitHub Enterprise users since using only a
personal access token does not seem to be supported.
:type user_token: str
:param user_token: The user's token in ~/.gitsomeconfig.
:type verify_ssl: bool
:param verify_ssl: Determines whether to verify SSL certs.
"""
CONFIG = '.gitsomeconfig'
CONFIG_CLR_PRIMARY = 'clr_primary'
CONFIG_CLR_SECONDARY = 'clr_secondary'
CONFIG_CLR_TERTIARY = 'clr_tertiary'
CONFIG_CLR_QUATERNARY = 'clr_quaternary'
CONFIG_CLR_BOLD = 'clr_bold'
CONFIG_CLR_CODE = 'clr_code'
CONFIG_CLR_ERROR = 'clr_error'
CONFIG_CLR_HEADER = 'clr_header'
CONFIG_CLR_LINK = 'clr_link'
CONFIG_CLR_LIST = 'clr_list'
CONFIG_CLR_MESSAGE = 'clr_message'
CONFIG_CLR_NUM_COMMENTS = 'clr_num_comments'
CONFIG_CLR_NUM_POINTS = 'clr_num_points'
CONFIG_CLR_TAG = 'clr_tag'
CONFIG_CLR_TIME = 'clr_time'
CONFIG_CLR_TITLE = 'clr_title'
CONFIG_CLR_TOOLTIP = 'clr_tooltip'
CONFIG_CLR_USER = 'clr_user'
CONFIG_CLR_VIEW_LINK = 'clr_view_link'
CONFIG_CLR_VIEW_INDEX = 'clr_view_index'
CONFIG_SECTION = 'github'
CONFIG_USER_LOGIN = 'user_login'
CONFIG_USER_PASS = 'user_pass'
CONFIG_USER_TOKEN = 'user_token'
CONFIG_USER_FEED = 'user_feed'
CONFIG_ENTERPRISE_URL = 'enterprise_url'
CONFIG_VERIFY_SSL = 'verify_ssl'
CONFIG_URL = '.gitsomeconfigurl'
CONFIG_URL_SECTION = 'url'
CONFIG_URL_LIST = 'url_list'
CONFIG_AVATAR = '.gitsomeconfigavatar.png'
def __init__(self):
self.api = None
self.user_login = None
self.user_pass = None
self.user_token = None
self.user_feed = None
self.enterprise_url = None
self.verify_ssl = True
self.urls = []
self._init_colors()
self.load_configs([
self.load_config_colors,
])
self.login = login
self.authorize = authorize
self.getpass = getpass
def _init_colors(self):
"""Initialize colors to their defaults."""
self.clr_primary = None
self.clr_secondary = 'green'
self.clr_tertiary = 'cyan'
self.clr_quaternary = 'yellow'
self.clr_bold = 'cyan'
self.clr_code = 'cyan'
self.clr_error = 'red'
self.clr_header = 'yellow'
self.clr_link = 'green'
self.clr_list = 'cyan'
self.clr_message = None
self.clr_num_comments = 'green'
self.clr_num_points = 'green'
self.clr_tag = 'cyan'
self.clr_time = 'yellow'
self.clr_title = None
self.clr_tooltip = None
self.clr_user = 'cyan'
self.clr_view_link = 'magenta'
self.clr_view_index = 'magenta'
def authenticate_cached_credentials(self, config, parser,
enterprise_auth=enterprise_login):
"""Authenticate with the user's credentials in ~/.gitsomeconfig.
:type config: str
:param config: The config path.
:type parser: :class:`ConfigParser.RawConfigParser`
:param parser: An instance of `ConfigParser.RawConfigParser.
"""
with open(config) as config_file:
try:
parser.read_file(config_file)
except AttributeError:
parser.readfp(config_file)
self.user_login = self.load_config(
parser=parser,
cfg_label=self.CONFIG_USER_LOGIN)
self.user_pass = self.load_config(
parser=parser,
cfg_label=self.CONFIG_USER_PASS)
self.user_token = self.load_config(
parser=parser,
cfg_label=self.CONFIG_USER_TOKEN)
self.enterprise_url = self.load_config(
parser=parser,
cfg_label=self.CONFIG_ENTERPRISE_URL)
self.verify_ssl = self.load_config(
parser=parser,
cfg_label=self.CONFIG_VERIFY_SSL,
boolean_config=True)
self.user_feed = self.load_config(
parser=parser,
cfg_label=self.CONFIG_USER_FEED)
if not self.verify_ssl:
# The user has chosen not to verify SSL certs.
# Disable warnings related to this option.
requests.packages.urllib3.disable_warnings(
InsecureRequestWarning)
login_kwargs = {
'username': self.user_login,
'two_factor_callback': self.request_two_factor_code,
}
if self.enterprise_url is not None:
self.login = enterprise_auth
login_kwargs.update({
'url': self.enterprise_url,
'verify': self.verify_ssl,
})
if self.user_token is not None:
login_kwargs.update({'token': self.user_token})
elif self.user_pass is not None:
login_kwargs.update({'password': self.user_pass})
else:
self.print_auth_error()
return
else:
login_kwargs.update({'token': self.user_token})
self.api = self.login(**login_kwargs)
def authenticate(self, enterprise=False,
enterprise_auth=enterprise_login, overwrite=False):
"""Log into GitHub.
Adapted from https://github.com/sigmavirus24/github-cli.
:type enterprise: bool
:param enterprise: Determines whether to configure GitHub Enterprise.
Default: False.
:type overwrite: bool
:param overwrite: indicates whether we cant to overwrite the current
set of credentials. Default: False.
"""
if self.api is not None and not overwrite:
return
# Get the full path to the configuration file.
config = self.get_github_config_path(self.CONFIG)
parser = configparser.RawConfigParser()
# Check to make sure the file exists and we are allowed to read it.
# Skip if we want to overwrite the auth settings.
if os.path.isfile(config) and os.access(config, os.R_OK | os.W_OK) and \
not overwrite:
with open(config) as config_file:
try:
parser.read_file(config_file)
except AttributeError:
parser.readfp(config_file)
self.authenticate_cached_credentials(config, parser)
else:
# The file didn't exist or we don't have the correct permissions.
login_kwargs = {
'two_factor_callback': self.request_two_factor_code,
}
if enterprise:
self.login = enterprise_auth
while not self.enterprise_url:
self.enterprise_url = input('Enterprise URL: ')
if click.confirm('Do you want to verify SSL certs?',
default=True):
self.verify_ssl = True
else:
self.verify_ssl = False
login_kwargs.update({
'url': self.enterprise_url,
'verify': self.verify_ssl,
})
while not self.user_login:
self.user_login = input('User Login: ')
login_kwargs.update({'username': self.user_login})
if click.confirm(('Do you want to log in with a password [Y] or '
'a personal access token [n]?'),
default=True):
user_pass = None
while not user_pass:
user_pass = self.getpass('Password: ')
login_kwargs.update({'password': user_pass})
try:
if not enterprise:
# Trade the user password for a personal access token.
# This does not seem to be available for Enterprise.
auth = self.authorize(
self.user_login,
user_pass,
scopes=['user', 'repo'],
note='gitsome',
note_url='https://github.com/donnemartin/gitsome',
two_factor_callback=self.request_two_factor_code
)
self.user_token = auth.token
else:
self.user_pass = user_pass
except (UnprocessableEntity, AuthenticationFailed):
click.secho('Error creating token.',
fg=self.clr_error)
click.secho(('Visit the following page and verify you do '
'not have an existing token named "gitsome":\n'
' https://github.com/settings/tokens\n'
'If a token already exists, update your '
'~/.gitsomeconfig file with your token:\n'
' user_token = TOKEN\n'
'You can also generate a new token.'),
fg=self.clr_message)
self.print_auth_error()
return
else:
# The user has chosen to authenticate with a token.
while not self.user_token:
self.user_token = input('Token: ')
login_kwargs.update({'token': self.user_token})
self.api = self.login(**login_kwargs)
if self.user_feed:
parser.set(self.CONFIG_SECTION,
self.CONFIG_USER_FEED,
self.user_feed)
def check_auth(self):
"""Check if the current authorization is valid.
This method uses the ratelimit_remaining api to check whether
the currently authenticated user's credentials are valid without
deducting from the rate limit. The ratelimit_remaining api does not
seem to be available for GitHub Enterprise.
github3.py's method check_authorization seems to only work given
an authorization created by a registered application.
TODO: Determine a better way to check the authorization for
GitHub Enterprise.
:type enterprise: bool
:param enterprise: Determines whether we are authenticating with
GitHub Enterprise.
"""
if self.enterprise_url is not None:
return True
try:
if self.api is not None:
# Throws AuthenticationFailed if invalid credentials but
# does not deduct from the rate limit.
self.api.ratelimit_remaining
return True
else:
self.print_auth_error()
except AuthenticationFailed:
self.print_auth_error()
return False
def get_github_config_path(self, config_file_name):
"""Attempt to find the github config file.
Adapted from https://github.com/sigmavirus24/github-cli.
:type config_file_name: str
:param config_file_name: The config file name.
:rtype: str
:return: The github config file path.
"""
home = os.path.abspath(os.environ.get('HOME', ''))
config_file_path = os.path.join(home, config_file_name)
return config_file_path
def load_config(self, parser, cfg_label, default=None,
color_config=False, boolean_config=False):
"""Load the specified config from ~/.gitsomeconfig.
:type parser: :class:`ConfigParser.RawConfigParser`
:param parser: An instance of `ConfigParser.RawConfigParser`.
:type cfg_label: str
:param cfg_label: The config label to load.
:type default: str
:param default: The default color if no color config exists.
Default: None.
:type color_config: bool
:param color_config: Determines whether this is a color config.
Default: False.
:type boolean_config: bool
:param boolean_config: Determines whether to load a boolean config.
Default: False.
"""
try:
if boolean_config:
cfg = parser.getboolean(self.CONFIG_SECTION, cfg_label)
else:
cfg = parser.get(self.CONFIG_SECTION, cfg_label)
if color_config:
if cfg == 'none':
cfg = None
# Check if the user input a valid color.
# If invalid, this will throw a TypeError
click.style('', fg=cfg)
except (TypeError, configparser.NoOptionError):
return default
return cfg
def load_configs(self, config_funcs):
"""Load the specified config from ~/.gitsomeconfig.
:type foo: list
:param foo: The config methods to run.
"""
config_file_path = self.get_github_config_path(self.CONFIG)
parser = configparser.RawConfigParser()
try:
with open(config_file_path) as config_file:
try:
parser.read_file(config_file)
except AttributeError:
parser.readfp(config_file)
for config_func in config_funcs:
config_func(parser)
except IOError:
# There might not be a cache yet, just silently return.
return None
def load_config_colors(self, parser):
"""Load the color config from ~/.gitsomeconfig.
:type parser: :class:`ConfigParser.RawConfigParser`
:param parser: An instance of `ConfigParser.RawConfigParser`.
"""
self.load_colors(parser)
def load_colors(self, parser):
"""Load all colors from ~/.gitsomeconfig.
:type parser: :class:`ConfigParser.RawConfigParser`
:param parser: An instance of `ConfigParser.RawConfigParser`.
"""
self.clr_primary = self.load_config(
parser=parser,
cfg_label=self.CONFIG_CLR_PRIMARY,
default=self.clr_primary,
color_config=True)
self.clr_secondary = self.load_config(
parser=parser,
cfg_label=self.CONFIG_CLR_SECONDARY,
default=self.clr_secondary,
color_config=True)
self.clr_tertiary = self.load_config(
parser=parser,
cfg_label=self.CONFIG_CLR_TERTIARY,
default=self.clr_tertiary,
color_config=True)
self.clr_quaternary = self.load_config(
parser=parser,
cfg_label=self.CONFIG_CLR_QUATERNARY,
default=self.clr_quaternary,
color_config=True)
self.clr_bold = self.load_config(
parser=parser,
cfg_label=self.CONFIG_CLR_BOLD,
default=self.clr_bold,
color_config=True)
self.clr_code = self.load_config(
parser=parser,
cfg_label=self.CONFIG_CLR_CODE,
default=self.clr_code,
color_config=True)
self.clr_code = self.load_config(
parser=parser,
cfg_label=self.CONFIG_CLR_ERROR,
default=self.clr_code,
color_config=True)
self.clr_header = self.load_config(
parser=parser,
cfg_label=self.CONFIG_CLR_HEADER,
default=self.clr_header,
color_config=True)
self.clr_link = self.load_config(
parser=parser,
cfg_label=self.CONFIG_CLR_LINK,
default=self.clr_link,
color_config=True)
self.clr_list = self.load_config(
parser=parser,
cfg_label=self.CONFIG_CLR_LIST,
default=self.clr_list,
color_config=True)
self.clr_message = self.load_config(
parser=parser,
cfg_label=self.CONFIG_CLR_MESSAGE,
default=self.clr_message,
color_config=True)
self.clr_num_comments = self.load_config(
parser=parser,
cfg_label=self.CONFIG_CLR_NUM_COMMENTS,
default=self.clr_num_comments,
color_config=True)
self.clr_num_points = self.load_config(
parser=parser,
cfg_label=self.CONFIG_CLR_NUM_POINTS,
default=self.clr_num_points,
color_config=True)
self.clr_tag = self.load_config(
parser=parser,
cfg_label=self.CONFIG_CLR_TAG,
default=self.clr_tag,
color_config=True)
self.clr_time = self.load_config(
parser=parser,
cfg_label=self.CONFIG_CLR_TIME,
default=self.clr_time,
color_config=True)
self.clr_title = self.load_config(
parser=parser,
cfg_label=self.CONFIG_CLR_TITLE,
default=self.clr_title,
color_config=True)
self.clr_tooltip = self.load_config(
parser=parser,
cfg_label=self.CONFIG_CLR_TOOLTIP,
default=self.clr_tooltip,
color_config=True)
self.clr_user = self.load_config(
parser=parser,
cfg_label=self.CONFIG_CLR_USER,
default=self.clr_user,
color_config=True)
self.clr_view_link = self.load_config(
parser=parser,
cfg_label=self.CONFIG_CLR_VIEW_LINK,
default=self.clr_view_link,
color_config=True)
self.clr_view_index = self.load_config(
parser=parser,
cfg_label=self.CONFIG_CLR_VIEW_INDEX,
default=self.clr_view_index,
color_config=True)
def load_urls(self, view_in_browser):
"""Load the current set of urls from ~/.gitsomeconfigurl.
:type view_in_browser: bool
:param view_in_browser: Determines whether to view the urls in a
browser.
:rtype: list
:return: Collection of urls.
"""
config = self.get_github_config_path(self.CONFIG_URL)
parser = configparser.RawConfigParser()
with open(config) as config_file:
try:
parser.read_file(config_file)
except AttributeError:
parser.readfp(config_file)
urls = parser.get(self.CONFIG_URL_SECTION,
self.CONFIG_URL_LIST)
urls = urls.strip()
excludes = ['[', ']', "'"]
for exclude in excludes:
urls = urls.replace(exclude, '')
if not view_in_browser:
urls = urls.replace('https://github.com/', '')
return urls.split(', ')
def print_auth_error(self):
"""Print a message the authorization has failed."""
click.secho('Authentication error.', fg=self.clr_error)
click.secho(('Update your credentials in ~/.gitsomeconfig '
'or run:\n gh configure'),
fg=self.clr_message)
def prompt_news_feed(self):
"""Prompt the user to enter a news feed url."""
if click.confirm(('No feed url detected.\n Calling gh events without '
"an argument\n displays the logged in user's "
'news feed.\nDo you want gitsome to track your '
'news feed?'),
default=True):
click.secho(('Visit the following url while logged into GitHub:\n'
' https://github.com\n'
'Enter the url found under "Subscribe to your '
'news feed".'),
fg=self.clr_message)
self.user_feed = ''
while not self.user_feed:
self.user_feed = input('URL: ')
def request_two_factor_code(self):
"""Request two factor authentication code.
Callback if two factor authentication is requested.
:rtype: str
:return: The user input two factor authentication code.
"""
code = ''
while not code:
code = input('Enter 2FA code: ')
return code
def save_config(self):
"""Saves the config to ~/.gitsomeconfig."""
if self.check_auth():
config = self.get_github_config_path(self.CONFIG)
parser = configparser.RawConfigParser()
parser.add_section(self.CONFIG_SECTION)
parser.set(self.CONFIG_SECTION,
self.CONFIG_USER_LOGIN,
self.user_login)
if self.user_token is not None:
parser.set(self.CONFIG_SECTION,
self.CONFIG_USER_TOKEN,
self.user_token)
if self.user_feed is not None:
parser.set(self.CONFIG_SECTION,
self.CONFIG_USER_FEED,
self.user_feed)
if self.enterprise_url is not None:
parser.set(self.CONFIG_SECTION,
self.CONFIG_ENTERPRISE_URL,
self.enterprise_url)
if self.user_pass is not None:
parser.set(self.CONFIG_SECTION,
self.CONFIG_USER_PASS,
self.user_pass)
else:
parser.remove_option(self.CONFIG_SECTION,
self.CONFIG_USER_PASS)
parser.set(self.CONFIG_SECTION,
self.CONFIG_VERIFY_SSL,
self.verify_ssl)
parser.set(self.CONFIG_SECTION,
self.CONFIG_CLR_PRIMARY,
self.clr_primary)
parser.set(self.CONFIG_SECTION,
self.CONFIG_CLR_SECONDARY,
self.clr_secondary)
parser.set(self.CONFIG_SECTION,
self.CONFIG_CLR_TERTIARY,
self.clr_tertiary)
parser.set(self.CONFIG_SECTION,
self.CONFIG_CLR_QUATERNARY,
self.clr_quaternary)
parser.set(self.CONFIG_SECTION,
self.CONFIG_CLR_BOLD,
self.clr_bold)
parser.set(self.CONFIG_SECTION,
self.CONFIG_CLR_CODE,
self.clr_code)
parser.set(self.CONFIG_SECTION,
self.CONFIG_CLR_ERROR,
self.clr_error)
parser.set(self.CONFIG_SECTION,
self.CONFIG_CLR_HEADER,
self.clr_header)
parser.set(self.CONFIG_SECTION,
self.CONFIG_CLR_LINK,
self.clr_link)
parser.set(self.CONFIG_SECTION,
self.CONFIG_CLR_LIST,
self.clr_list)
parser.set(self.CONFIG_SECTION,
self.CONFIG_CLR_MESSAGE,
self.clr_message)
parser.set(self.CONFIG_SECTION,
self.CONFIG_CLR_NUM_COMMENTS,
self.clr_num_comments)
parser.set(self.CONFIG_SECTION,
self.CONFIG_CLR_NUM_POINTS,
self.clr_num_points)
parser.set(self.CONFIG_SECTION,
self.CONFIG_CLR_TAG,
self.clr_tag)
parser.set(self.CONFIG_SECTION,
self.CONFIG_CLR_TIME,
self.clr_time)
parser.set(self.CONFIG_SECTION,
self.CONFIG_CLR_TITLE,
self.clr_title)
parser.set(self.CONFIG_SECTION,
self.CONFIG_CLR_TOOLTIP,
self.clr_tooltip)
parser.set(self.CONFIG_SECTION,
self.CONFIG_CLR_USER,
self.clr_user)
parser.set(self.CONFIG_SECTION,
self.CONFIG_CLR_VIEW_LINK,
self.clr_view_link)
parser.set(self.CONFIG_SECTION,
self.CONFIG_CLR_VIEW_INDEX,
self.clr_view_index)
with open(config, 'w+') as config_file:
parser.write(config_file)
def save_urls(self):
"""Save the current set of urls to ~/.gitsomeconfigurl."""
config = self.get_github_config_path(self.CONFIG_URL)
parser = configparser.RawConfigParser()
try:
parser.add_section(self.CONFIG_URL_SECTION)
except configparser.DuplicateSectionError:
pass
parser.set(self.CONFIG_URL_SECTION, self.CONFIG_URL_LIST, self.urls)
with open(config, 'w+') as config_file:
parser.write(config_file)
def show_bash_completions_info(self):
"""Show info on how to enable bash completions"""
click.secho(('By default, gitsome looks at the following locations '
'to enable bash completions:\n'
' https://github.com/donnemartin/gitsome/blob/master/xonsh/environ.py#L123-L130\n' # NOQA
'If bash completions are not working for you, check out '
'the following link:\n'
' https://github.com/donnemartin/gitsome#enabling-bash-completions'), # NOQA
fg=self.clr_message)
|
[
"# -*- coding: utf-8 -*-\n\n# Copyright 2015 Donne Martin. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"). You\n# may not use this file except in compliance with the License. A copy of\n# the License is located at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# or in the \"license\" file accompanying this file. This file is\n# distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF\n# ANY KIND, either express or implied. See the License for the specific\n# language governing permissions and limitations under the License.\n\nfrom __future__ import unicode_literals\nfrom __future__ import print_function\n\nimport click\nfrom getpass import getpass\nimport os\nimport requests\nfrom requests.packages.urllib3.exceptions import InsecureRequestWarning\n\nfrom .compat import configparser\nfrom .lib.github3 import authorize, enterprise_login, login\nfrom .lib.github3.exceptions import AuthenticationFailed, UnprocessableEntity\n\n\nclass Config(object):\n \"\"\"Gitsome config.\n\n :type api: :class:`github3.github.Github`\n :param api: An instance of github3.github.Github.\n\n :type clr_x: str\n :param clr_x: Various ansi color config colors to use for highlights.\n\n :type CONFIG: str\n :param CONFIG: The config file name.\n\n :type CONFIG_SECTION: str\n :param CONFIG_SECTION: The main config file section label.\n\n :type CONFIG_CLR_X: str\n :param CONFIG_CLR_X: Various ansi color config labels to use for highlights.\n\n :type CONFIG_ENTERPRISE_URL: str\n :param CONFIG_ENTERPRISE_URL: The GitHub Enterprise url.\n\n :type CONFIG_USER_LOGIN: str\n :param CONFIG_USER_LOGIN: The user login.\n\n :type CONFIG_USER_PASS: str\n :param CONFIG_USER_PASS: The user password.\n\n :type CONFIG_USER_TOKEN: str\n :param CONFIG_USER_TOKEN: The user token.\n\n :type CONFIG_USER_FEED: str\n :param CONFIG_USER_FEED: The user feed config. This is the feed on\n https://github.com/ when logged in and requires the basic auth model,\n which doesn't work when logging in with tokens or 2FA. This config\n listed the pre-signed url to access the feed.\n\n :type CONFIG_URL: str\n :param CONFIG_URL: The config file name that contains urls used in the\n `gh view` command.\n\n :type CONFIG_URL_SECTION: str\n :param CONFIG_URL_SECTION: The config file section that contains urls used\n in the `gh view [url_index]` command.\n\n :type CONFIG_URL_LIST: str\n :param CONFIG_URL_LIST: The config containing a list of the last set of\n urls the user has seen, which allows the user to quickly access a repo\n url with the `gh view [url_index]` command.\n\n :type CONFIG_VERIFY_SSL: str\n :param CONFIG_VERIFY_SSL: Determines whether to verify SSL certs.\n\n :type enterprise_url: str\n :param enterprise_url: The GitHub Enterprise url.\n\n :type urls: list\n :param urls: The last set of urls the user has seen, which allows the user\n to quickly access a repo url with the gh view [url_index] command.\n\n :type user_login: str\n :param user_login: The user's login in ~/.gitsomeconfig.\n\n :type user_pass: str\n :param user_pass: The user's pass in ~/.gitsomeconfig.\n This is only stored for GitHub Enterprise users since using only a\n personal access token does not seem to be supported.\n\n :type user_token: str\n :param user_token: The user's token in ~/.gitsomeconfig.\n\n :type verify_ssl: bool\n :param verify_ssl: Determines whether to verify SSL certs.\n \"\"\"\n\n CONFIG = '.gitsomeconfig'\n CONFIG_CLR_PRIMARY = 'clr_primary'\n CONFIG_CLR_SECONDARY = 'clr_secondary'\n CONFIG_CLR_TERTIARY = 'clr_tertiary'\n CONFIG_CLR_QUATERNARY = 'clr_quaternary'\n CONFIG_CLR_BOLD = 'clr_bold'\n CONFIG_CLR_CODE = 'clr_code'\n CONFIG_CLR_ERROR = 'clr_error'\n CONFIG_CLR_HEADER = 'clr_header'\n CONFIG_CLR_LINK = 'clr_link'\n CONFIG_CLR_LIST = 'clr_list'\n CONFIG_CLR_MESSAGE = 'clr_message'\n CONFIG_CLR_NUM_COMMENTS = 'clr_num_comments'\n CONFIG_CLR_NUM_POINTS = 'clr_num_points'\n CONFIG_CLR_TAG = 'clr_tag'\n CONFIG_CLR_TIME = 'clr_time'\n CONFIG_CLR_TITLE = 'clr_title'\n CONFIG_CLR_TOOLTIP = 'clr_tooltip'\n CONFIG_CLR_USER = 'clr_user'\n CONFIG_CLR_VIEW_LINK = 'clr_view_link'\n CONFIG_CLR_VIEW_INDEX = 'clr_view_index'\n CONFIG_SECTION = 'github'\n CONFIG_USER_LOGIN = 'user_login'\n CONFIG_USER_PASS = 'user_pass'\n CONFIG_USER_TOKEN = 'user_token'\n CONFIG_USER_FEED = 'user_feed'\n CONFIG_ENTERPRISE_URL = 'enterprise_url'\n CONFIG_VERIFY_SSL = 'verify_ssl'\n CONFIG_URL = '.gitsomeconfigurl'\n CONFIG_URL_SECTION = 'url'\n CONFIG_URL_LIST = 'url_list'\n CONFIG_AVATAR = '.gitsomeconfigavatar.png'\n\n def __init__(self):\n self.api = None\n self.user_login = None\n self.user_pass = None\n self.user_token = None\n self.user_feed = None\n self.enterprise_url = None\n self.verify_ssl = True\n self.urls = []\n self._init_colors()\n self.load_configs([\n self.load_config_colors,\n ])\n self.login = login\n self.authorize = authorize\n self.getpass = getpass\n\n def _init_colors(self):\n \"\"\"Initialize colors to their defaults.\"\"\"\n self.clr_primary = None\n self.clr_secondary = 'green'\n self.clr_tertiary = 'cyan'\n self.clr_quaternary = 'yellow'\n self.clr_bold = 'cyan'\n self.clr_code = 'cyan'\n self.clr_error = 'red'\n self.clr_header = 'yellow'\n self.clr_link = 'green'\n self.clr_list = 'cyan'\n self.clr_message = None\n self.clr_num_comments = 'green'\n self.clr_num_points = 'green'\n self.clr_tag = 'cyan'\n self.clr_time = 'yellow'\n self.clr_title = None\n self.clr_tooltip = None\n self.clr_user = 'cyan'\n self.clr_view_link = 'magenta'\n self.clr_view_index = 'magenta'\n\n def authenticate_cached_credentials(self, config, parser,\n enterprise_auth=enterprise_login):\n \"\"\"Authenticate with the user's credentials in ~/.gitsomeconfig.\n\n :type config: str\n :param config: The config path.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser.\n \"\"\"\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n self.user_login = self.load_config(\n parser=parser,\n cfg_label=self.CONFIG_USER_LOGIN)\n self.user_pass = self.load_config(\n parser=parser,\n cfg_label=self.CONFIG_USER_PASS)\n self.user_token = self.load_config(\n parser=parser,\n cfg_label=self.CONFIG_USER_TOKEN)\n self.enterprise_url = self.load_config(\n parser=parser,\n cfg_label=self.CONFIG_ENTERPRISE_URL)\n self.verify_ssl = self.load_config(\n parser=parser,\n cfg_label=self.CONFIG_VERIFY_SSL,\n boolean_config=True)\n self.user_feed = self.load_config(\n parser=parser,\n cfg_label=self.CONFIG_USER_FEED)\n if not self.verify_ssl:\n # The user has chosen not to verify SSL certs.\n # Disable warnings related to this option.\n requests.packages.urllib3.disable_warnings(\n InsecureRequestWarning)\n login_kwargs = {\n 'username': self.user_login,\n 'two_factor_callback': self.request_two_factor_code,\n }\n if self.enterprise_url is not None:\n self.login = enterprise_auth\n login_kwargs.update({\n 'url': self.enterprise_url,\n 'verify': self.verify_ssl,\n })\n if self.user_token is not None:\n login_kwargs.update({'token': self.user_token})\n elif self.user_pass is not None:\n login_kwargs.update({'password': self.user_pass})\n else:\n self.print_auth_error()\n return\n else:\n login_kwargs.update({'token': self.user_token})\n self.api = self.login(**login_kwargs)\n\n def authenticate(self, enterprise=False,\n enterprise_auth=enterprise_login, overwrite=False):\n \"\"\"Log into GitHub.\n\n Adapted from https://github.com/sigmavirus24/github-cli.\n\n :type enterprise: bool\n :param enterprise: Determines whether to configure GitHub Enterprise.\n Default: False.\n\n :type overwrite: bool\n :param overwrite: indicates whether we cant to overwrite the current\n set of credentials. Default: False.\n \"\"\"\n if self.api is not None and not overwrite:\n return\n # Get the full path to the configuration file.\n config = self.get_github_config_path(self.CONFIG)\n parser = configparser.RawConfigParser()\n # Check to make sure the file exists and we are allowed to read it.\n # Skip if we want to overwrite the auth settings.\n if os.path.isfile(config) and os.access(config, os.R_OK | os.W_OK) and \\\n not overwrite:\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n self.authenticate_cached_credentials(config, parser)\n else:\n # The file didn't exist or we don't have the correct permissions.\n login_kwargs = {\n 'two_factor_callback': self.request_two_factor_code,\n }\n if enterprise:\n self.login = enterprise_auth\n while not self.enterprise_url:\n self.enterprise_url = input('Enterprise URL: ')\n if click.confirm('Do you want to verify SSL certs?',\n default=True):\n self.verify_ssl = True\n else:\n self.verify_ssl = False\n login_kwargs.update({\n 'url': self.enterprise_url,\n 'verify': self.verify_ssl,\n })\n while not self.user_login:\n self.user_login = input('User Login: ')\n login_kwargs.update({'username': self.user_login})\n if click.confirm(('Do you want to log in with a password [Y] or '\n 'a personal access token [n]?'),\n default=True):\n user_pass = None\n while not user_pass:\n user_pass = self.getpass('Password: ')\n login_kwargs.update({'password': user_pass})\n try:\n if not enterprise:\n # Trade the user password for a personal access token.\n # This does not seem to be available for Enterprise.\n auth = self.authorize(\n self.user_login,\n user_pass,\n scopes=['user', 'repo'],\n note='gitsome',\n note_url='https://github.com/donnemartin/gitsome',\n two_factor_callback=self.request_two_factor_code\n )\n self.user_token = auth.token\n else:\n self.user_pass = user_pass\n except (UnprocessableEntity, AuthenticationFailed):\n click.secho('Error creating token.',\n fg=self.clr_error)\n click.secho(('Visit the following page and verify you do '\n 'not have an existing token named \"gitsome\":\\n'\n ' https://github.com/settings/tokens\\n'\n 'If a token already exists, update your '\n '~/.gitsomeconfig file with your token:\\n'\n ' user_token = TOKEN\\n'\n 'You can also generate a new token.'),\n fg=self.clr_message)\n self.print_auth_error()\n return\n else:\n # The user has chosen to authenticate with a token.\n while not self.user_token:\n self.user_token = input('Token: ')\n login_kwargs.update({'token': self.user_token})\n self.api = self.login(**login_kwargs)\n if self.user_feed:\n parser.set(self.CONFIG_SECTION,\n self.CONFIG_USER_FEED,\n self.user_feed)\n\n def check_auth(self):\n \"\"\"Check if the current authorization is valid.\n\n This method uses the ratelimit_remaining api to check whether\n the currently authenticated user's credentials are valid without\n deducting from the rate limit. The ratelimit_remaining api does not\n seem to be available for GitHub Enterprise.\n\n github3.py's method check_authorization seems to only work given\n an authorization created by a registered application.\n\n TODO: Determine a better way to check the authorization for\n GitHub Enterprise.\n\n :type enterprise: bool\n :param enterprise: Determines whether we are authenticating with\n GitHub Enterprise.\n \"\"\"\n if self.enterprise_url is not None:\n return True\n try:\n if self.api is not None:\n # Throws AuthenticationFailed if invalid credentials but\n # does not deduct from the rate limit.\n self.api.ratelimit_remaining\n return True\n else:\n self.print_auth_error()\n except AuthenticationFailed:\n self.print_auth_error()\n return False\n\n def get_github_config_path(self, config_file_name):\n \"\"\"Attempt to find the github config file.\n\n Adapted from https://github.com/sigmavirus24/github-cli.\n\n :type config_file_name: str\n :param config_file_name: The config file name.\n\n :rtype: str\n :return: The github config file path.\n \"\"\"\n home = os.path.abspath(os.environ.get('HOME', ''))\n config_file_path = os.path.join(home, config_file_name)\n return config_file_path\n\n def load_config(self, parser, cfg_label, default=None,\n color_config=False, boolean_config=False):\n \"\"\"Load the specified config from ~/.gitsomeconfig.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser`.\n\n :type cfg_label: str\n :param cfg_label: The config label to load.\n\n :type default: str\n :param default: The default color if no color config exists.\n Default: None.\n\n :type color_config: bool\n :param color_config: Determines whether this is a color config.\n Default: False.\n\n :type boolean_config: bool\n :param boolean_config: Determines whether to load a boolean config.\n Default: False.\n \"\"\"\n try:\n if boolean_config:\n cfg = parser.getboolean(self.CONFIG_SECTION, cfg_label)\n else:\n cfg = parser.get(self.CONFIG_SECTION, cfg_label)\n if color_config:\n if cfg == 'none':\n cfg = None\n # Check if the user input a valid color.\n # If invalid, this will throw a TypeError\n click.style('', fg=cfg)\n except (TypeError, configparser.NoOptionError):\n return default\n return cfg\n\n def load_configs(self, config_funcs):\n \"\"\"Load the specified config from ~/.gitsomeconfig.\n\n :type foo: list\n :param foo: The config methods to run.\n \"\"\"\n config_file_path = self.get_github_config_path(self.CONFIG)\n parser = configparser.RawConfigParser()\n try:\n with open(config_file_path) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n for config_func in config_funcs:\n config_func(parser)\n except IOError:\n # There might not be a cache yet, just silently return.\n return None\n\n def load_config_colors(self, parser):\n \"\"\"Load the color config from ~/.gitsomeconfig.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser`.\n \"\"\"\n self.load_colors(parser)\n\n def load_colors(self, parser):\n \"\"\"Load all colors from ~/.gitsomeconfig.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser`.\n \"\"\"\n self.clr_primary = self.load_config(\n parser=parser,\n cfg_label=self.CONFIG_CLR_PRIMARY,\n default=self.clr_primary,\n color_config=True)\n self.clr_secondary = self.load_config(\n parser=parser,\n cfg_label=self.CONFIG_CLR_SECONDARY,\n default=self.clr_secondary,\n color_config=True)\n self.clr_tertiary = self.load_config(\n parser=parser,\n cfg_label=self.CONFIG_CLR_TERTIARY,\n default=self.clr_tertiary,\n color_config=True)\n self.clr_quaternary = self.load_config(\n parser=parser,\n cfg_label=self.CONFIG_CLR_QUATERNARY,\n default=self.clr_quaternary,\n color_config=True)\n self.clr_bold = self.load_config(\n parser=parser,\n cfg_label=self.CONFIG_CLR_BOLD,\n default=self.clr_bold,\n color_config=True)\n self.clr_code = self.load_config(\n parser=parser,\n cfg_label=self.CONFIG_CLR_CODE,\n default=self.clr_code,\n color_config=True)\n self.clr_code = self.load_config(\n parser=parser,\n cfg_label=self.CONFIG_CLR_ERROR,\n default=self.clr_code,\n color_config=True)\n self.clr_header = self.load_config(\n parser=parser,\n cfg_label=self.CONFIG_CLR_HEADER,\n default=self.clr_header,\n color_config=True)\n self.clr_link = self.load_config(\n parser=parser,\n cfg_label=self.CONFIG_CLR_LINK,\n default=self.clr_link,\n color_config=True)\n self.clr_list = self.load_config(\n parser=parser,\n cfg_label=self.CONFIG_CLR_LIST,\n default=self.clr_list,\n color_config=True)\n self.clr_message = self.load_config(\n parser=parser,\n cfg_label=self.CONFIG_CLR_MESSAGE,\n default=self.clr_message,\n color_config=True)\n self.clr_num_comments = self.load_config(\n parser=parser,\n cfg_label=self.CONFIG_CLR_NUM_COMMENTS,\n default=self.clr_num_comments,\n color_config=True)\n self.clr_num_points = self.load_config(\n parser=parser,\n cfg_label=self.CONFIG_CLR_NUM_POINTS,\n default=self.clr_num_points,\n color_config=True)\n self.clr_tag = self.load_config(\n parser=parser,\n cfg_label=self.CONFIG_CLR_TAG,\n default=self.clr_tag,\n color_config=True)\n self.clr_time = self.load_config(\n parser=parser,\n cfg_label=self.CONFIG_CLR_TIME,\n default=self.clr_time,\n color_config=True)\n self.clr_title = self.load_config(\n parser=parser,\n cfg_label=self.CONFIG_CLR_TITLE,\n default=self.clr_title,\n color_config=True)\n self.clr_tooltip = self.load_config(\n parser=parser,\n cfg_label=self.CONFIG_CLR_TOOLTIP,\n default=self.clr_tooltip,\n color_config=True)\n self.clr_user = self.load_config(\n parser=parser,\n cfg_label=self.CONFIG_CLR_USER,\n default=self.clr_user,\n color_config=True)\n self.clr_view_link = self.load_config(\n parser=parser,\n cfg_label=self.CONFIG_CLR_VIEW_LINK,\n default=self.clr_view_link,\n color_config=True)\n self.clr_view_index = self.load_config(\n parser=parser,\n cfg_label=self.CONFIG_CLR_VIEW_INDEX,\n default=self.clr_view_index,\n color_config=True)\n\n def load_urls(self, view_in_browser):\n \"\"\"Load the current set of urls from ~/.gitsomeconfigurl.\n\n :type view_in_browser: bool\n :param view_in_browser: Determines whether to view the urls in a\n browser.\n\n :rtype: list\n :return: Collection of urls.\n \"\"\"\n config = self.get_github_config_path(self.CONFIG_URL)\n parser = configparser.RawConfigParser()\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n urls = parser.get(self.CONFIG_URL_SECTION,\n self.CONFIG_URL_LIST)\n urls = urls.strip()\n excludes = ['[', ']', \"'\"]\n for exclude in excludes:\n urls = urls.replace(exclude, '')\n if not view_in_browser:\n urls = urls.replace('https://github.com/', '')\n return urls.split(', ')\n\n def print_auth_error(self):\n \"\"\"Print a message the authorization has failed.\"\"\"\n click.secho('Authentication error.', fg=self.clr_error)\n click.secho(('Update your credentials in ~/.gitsomeconfig '\n 'or run:\\n gh configure'),\n fg=self.clr_message)\n\n def prompt_news_feed(self):\n \"\"\"Prompt the user to enter a news feed url.\"\"\"\n if click.confirm(('No feed url detected.\\n Calling gh events without '\n \"an argument\\n displays the logged in user's \"\n 'news feed.\\nDo you want gitsome to track your '\n 'news feed?'),\n default=True):\n click.secho(('Visit the following url while logged into GitHub:\\n'\n ' https://github.com\\n'\n 'Enter the url found under \"Subscribe to your '\n 'news feed\".'),\n fg=self.clr_message)\n self.user_feed = ''\n while not self.user_feed:\n self.user_feed = input('URL: ')\n\n def request_two_factor_code(self):\n \"\"\"Request two factor authentication code.\n\n Callback if two factor authentication is requested.\n\n :rtype: str\n :return: The user input two factor authentication code.\n \"\"\"\n code = ''\n while not code:\n code = input('Enter 2FA code: ')\n return code\n\n def save_config(self):\n \"\"\"Saves the config to ~/.gitsomeconfig.\"\"\"\n if self.check_auth():\n config = self.get_github_config_path(self.CONFIG)\n parser = configparser.RawConfigParser()\n parser.add_section(self.CONFIG_SECTION)\n parser.set(self.CONFIG_SECTION,\n self.CONFIG_USER_LOGIN,\n self.user_login)\n if self.user_token is not None:\n parser.set(self.CONFIG_SECTION,\n self.CONFIG_USER_TOKEN,\n self.user_token)\n if self.user_feed is not None:\n parser.set(self.CONFIG_SECTION,\n self.CONFIG_USER_FEED,\n self.user_feed)\n if self.enterprise_url is not None:\n parser.set(self.CONFIG_SECTION,\n self.CONFIG_ENTERPRISE_URL,\n self.enterprise_url)\n if self.user_pass is not None:\n parser.set(self.CONFIG_SECTION,\n self.CONFIG_USER_PASS,\n self.user_pass)\n else:\n parser.remove_option(self.CONFIG_SECTION,\n self.CONFIG_USER_PASS)\n parser.set(self.CONFIG_SECTION,\n self.CONFIG_VERIFY_SSL,\n self.verify_ssl)\n parser.set(self.CONFIG_SECTION,\n self.CONFIG_CLR_PRIMARY,\n self.clr_primary)\n parser.set(self.CONFIG_SECTION,\n self.CONFIG_CLR_SECONDARY,\n self.clr_secondary)\n parser.set(self.CONFIG_SECTION,\n self.CONFIG_CLR_TERTIARY,\n self.clr_tertiary)\n parser.set(self.CONFIG_SECTION,\n self.CONFIG_CLR_QUATERNARY,\n self.clr_quaternary)\n parser.set(self.CONFIG_SECTION,\n self.CONFIG_CLR_BOLD,\n self.clr_bold)\n parser.set(self.CONFIG_SECTION,\n self.CONFIG_CLR_CODE,\n self.clr_code)\n parser.set(self.CONFIG_SECTION,\n self.CONFIG_CLR_ERROR,\n self.clr_error)\n parser.set(self.CONFIG_SECTION,\n self.CONFIG_CLR_HEADER,\n self.clr_header)\n parser.set(self.CONFIG_SECTION,\n self.CONFIG_CLR_LINK,\n self.clr_link)\n parser.set(self.CONFIG_SECTION,\n self.CONFIG_CLR_LIST,\n self.clr_list)\n parser.set(self.CONFIG_SECTION,\n self.CONFIG_CLR_MESSAGE,\n self.clr_message)\n parser.set(self.CONFIG_SECTION,\n self.CONFIG_CLR_NUM_COMMENTS,\n self.clr_num_comments)\n parser.set(self.CONFIG_SECTION,\n self.CONFIG_CLR_NUM_POINTS,\n self.clr_num_points)\n parser.set(self.CONFIG_SECTION,\n self.CONFIG_CLR_TAG,\n self.clr_tag)\n parser.set(self.CONFIG_SECTION,\n self.CONFIG_CLR_TIME,\n self.clr_time)\n parser.set(self.CONFIG_SECTION,\n self.CONFIG_CLR_TITLE,\n self.clr_title)\n parser.set(self.CONFIG_SECTION,\n self.CONFIG_CLR_TOOLTIP,\n self.clr_tooltip)\n parser.set(self.CONFIG_SECTION,\n self.CONFIG_CLR_USER,\n self.clr_user)\n parser.set(self.CONFIG_SECTION,\n self.CONFIG_CLR_VIEW_LINK,\n self.clr_view_link)\n parser.set(self.CONFIG_SECTION,\n self.CONFIG_CLR_VIEW_INDEX,\n self.clr_view_index)\n with open(config, 'w+') as config_file:\n parser.write(config_file)\n\n def save_urls(self):\n \"\"\"Save the current set of urls to ~/.gitsomeconfigurl.\"\"\"\n config = self.get_github_config_path(self.CONFIG_URL)\n parser = configparser.RawConfigParser()\n try:\n parser.add_section(self.CONFIG_URL_SECTION)\n except configparser.DuplicateSectionError:\n pass\n parser.set(self.CONFIG_URL_SECTION, self.CONFIG_URL_LIST, self.urls)\n with open(config, 'w+') as config_file:\n parser.write(config_file)\n\n def show_bash_completions_info(self):\n \"\"\"Show info on how to enable bash completions\"\"\"\n click.secho(('By default, gitsome looks at the following locations '\n 'to enable bash completions:\\n'\n ' https://github.com/donnemartin/gitsome/blob/master/xonsh/environ.py#L123-L130\\n' # NOQA\n 'If bash completions are not working for you, check out '\n 'the following link:\\n'\n ' https://github.com/donnemartin/gitsome#enabling-bash-completions'), # NOQA\n fg=self.clr_message)\n",
"from __future__ import unicode_literals\nfrom __future__ import print_function\nimport click\nfrom getpass import getpass\nimport os\nimport requests\nfrom requests.packages.urllib3.exceptions import InsecureRequestWarning\nfrom .compat import configparser\nfrom .lib.github3 import authorize, enterprise_login, login\nfrom .lib.github3.exceptions import AuthenticationFailed, UnprocessableEntity\n\n\nclass Config(object):\n \"\"\"Gitsome config.\n\n :type api: :class:`github3.github.Github`\n :param api: An instance of github3.github.Github.\n\n :type clr_x: str\n :param clr_x: Various ansi color config colors to use for highlights.\n\n :type CONFIG: str\n :param CONFIG: The config file name.\n\n :type CONFIG_SECTION: str\n :param CONFIG_SECTION: The main config file section label.\n\n :type CONFIG_CLR_X: str\n :param CONFIG_CLR_X: Various ansi color config labels to use for highlights.\n\n :type CONFIG_ENTERPRISE_URL: str\n :param CONFIG_ENTERPRISE_URL: The GitHub Enterprise url.\n\n :type CONFIG_USER_LOGIN: str\n :param CONFIG_USER_LOGIN: The user login.\n\n :type CONFIG_USER_PASS: str\n :param CONFIG_USER_PASS: The user password.\n\n :type CONFIG_USER_TOKEN: str\n :param CONFIG_USER_TOKEN: The user token.\n\n :type CONFIG_USER_FEED: str\n :param CONFIG_USER_FEED: The user feed config. This is the feed on\n https://github.com/ when logged in and requires the basic auth model,\n which doesn't work when logging in with tokens or 2FA. This config\n listed the pre-signed url to access the feed.\n\n :type CONFIG_URL: str\n :param CONFIG_URL: The config file name that contains urls used in the\n `gh view` command.\n\n :type CONFIG_URL_SECTION: str\n :param CONFIG_URL_SECTION: The config file section that contains urls used\n in the `gh view [url_index]` command.\n\n :type CONFIG_URL_LIST: str\n :param CONFIG_URL_LIST: The config containing a list of the last set of\n urls the user has seen, which allows the user to quickly access a repo\n url with the `gh view [url_index]` command.\n\n :type CONFIG_VERIFY_SSL: str\n :param CONFIG_VERIFY_SSL: Determines whether to verify SSL certs.\n\n :type enterprise_url: str\n :param enterprise_url: The GitHub Enterprise url.\n\n :type urls: list\n :param urls: The last set of urls the user has seen, which allows the user\n to quickly access a repo url with the gh view [url_index] command.\n\n :type user_login: str\n :param user_login: The user's login in ~/.gitsomeconfig.\n\n :type user_pass: str\n :param user_pass: The user's pass in ~/.gitsomeconfig.\n This is only stored for GitHub Enterprise users since using only a\n personal access token does not seem to be supported.\n\n :type user_token: str\n :param user_token: The user's token in ~/.gitsomeconfig.\n\n :type verify_ssl: bool\n :param verify_ssl: Determines whether to verify SSL certs.\n \"\"\"\n CONFIG = '.gitsomeconfig'\n CONFIG_CLR_PRIMARY = 'clr_primary'\n CONFIG_CLR_SECONDARY = 'clr_secondary'\n CONFIG_CLR_TERTIARY = 'clr_tertiary'\n CONFIG_CLR_QUATERNARY = 'clr_quaternary'\n CONFIG_CLR_BOLD = 'clr_bold'\n CONFIG_CLR_CODE = 'clr_code'\n CONFIG_CLR_ERROR = 'clr_error'\n CONFIG_CLR_HEADER = 'clr_header'\n CONFIG_CLR_LINK = 'clr_link'\n CONFIG_CLR_LIST = 'clr_list'\n CONFIG_CLR_MESSAGE = 'clr_message'\n CONFIG_CLR_NUM_COMMENTS = 'clr_num_comments'\n CONFIG_CLR_NUM_POINTS = 'clr_num_points'\n CONFIG_CLR_TAG = 'clr_tag'\n CONFIG_CLR_TIME = 'clr_time'\n CONFIG_CLR_TITLE = 'clr_title'\n CONFIG_CLR_TOOLTIP = 'clr_tooltip'\n CONFIG_CLR_USER = 'clr_user'\n CONFIG_CLR_VIEW_LINK = 'clr_view_link'\n CONFIG_CLR_VIEW_INDEX = 'clr_view_index'\n CONFIG_SECTION = 'github'\n CONFIG_USER_LOGIN = 'user_login'\n CONFIG_USER_PASS = 'user_pass'\n CONFIG_USER_TOKEN = 'user_token'\n CONFIG_USER_FEED = 'user_feed'\n CONFIG_ENTERPRISE_URL = 'enterprise_url'\n CONFIG_VERIFY_SSL = 'verify_ssl'\n CONFIG_URL = '.gitsomeconfigurl'\n CONFIG_URL_SECTION = 'url'\n CONFIG_URL_LIST = 'url_list'\n CONFIG_AVATAR = '.gitsomeconfigavatar.png'\n\n def __init__(self):\n self.api = None\n self.user_login = None\n self.user_pass = None\n self.user_token = None\n self.user_feed = None\n self.enterprise_url = None\n self.verify_ssl = True\n self.urls = []\n self._init_colors()\n self.load_configs([self.load_config_colors])\n self.login = login\n self.authorize = authorize\n self.getpass = getpass\n\n def _init_colors(self):\n \"\"\"Initialize colors to their defaults.\"\"\"\n self.clr_primary = None\n self.clr_secondary = 'green'\n self.clr_tertiary = 'cyan'\n self.clr_quaternary = 'yellow'\n self.clr_bold = 'cyan'\n self.clr_code = 'cyan'\n self.clr_error = 'red'\n self.clr_header = 'yellow'\n self.clr_link = 'green'\n self.clr_list = 'cyan'\n self.clr_message = None\n self.clr_num_comments = 'green'\n self.clr_num_points = 'green'\n self.clr_tag = 'cyan'\n self.clr_time = 'yellow'\n self.clr_title = None\n self.clr_tooltip = None\n self.clr_user = 'cyan'\n self.clr_view_link = 'magenta'\n self.clr_view_index = 'magenta'\n\n def authenticate_cached_credentials(self, config, parser,\n enterprise_auth=enterprise_login):\n \"\"\"Authenticate with the user's credentials in ~/.gitsomeconfig.\n\n :type config: str\n :param config: The config path.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser.\n \"\"\"\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n self.user_login = self.load_config(parser=parser, cfg_label=\n self.CONFIG_USER_LOGIN)\n self.user_pass = self.load_config(parser=parser, cfg_label=self\n .CONFIG_USER_PASS)\n self.user_token = self.load_config(parser=parser, cfg_label=\n self.CONFIG_USER_TOKEN)\n self.enterprise_url = self.load_config(parser=parser, cfg_label\n =self.CONFIG_ENTERPRISE_URL)\n self.verify_ssl = self.load_config(parser=parser, cfg_label=\n self.CONFIG_VERIFY_SSL, boolean_config=True)\n self.user_feed = self.load_config(parser=parser, cfg_label=self\n .CONFIG_USER_FEED)\n if not self.verify_ssl:\n requests.packages.urllib3.disable_warnings(\n InsecureRequestWarning)\n login_kwargs = {'username': self.user_login,\n 'two_factor_callback': self.request_two_factor_code}\n if self.enterprise_url is not None:\n self.login = enterprise_auth\n login_kwargs.update({'url': self.enterprise_url, 'verify':\n self.verify_ssl})\n if self.user_token is not None:\n login_kwargs.update({'token': self.user_token})\n elif self.user_pass is not None:\n login_kwargs.update({'password': self.user_pass})\n else:\n self.print_auth_error()\n return\n else:\n login_kwargs.update({'token': self.user_token})\n self.api = self.login(**login_kwargs)\n\n def authenticate(self, enterprise=False, enterprise_auth=\n enterprise_login, overwrite=False):\n \"\"\"Log into GitHub.\n\n Adapted from https://github.com/sigmavirus24/github-cli.\n\n :type enterprise: bool\n :param enterprise: Determines whether to configure GitHub Enterprise.\n Default: False.\n\n :type overwrite: bool\n :param overwrite: indicates whether we cant to overwrite the current\n set of credentials. Default: False.\n \"\"\"\n if self.api is not None and not overwrite:\n return\n config = self.get_github_config_path(self.CONFIG)\n parser = configparser.RawConfigParser()\n if os.path.isfile(config) and os.access(config, os.R_OK | os.W_OK\n ) and not overwrite:\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n self.authenticate_cached_credentials(config, parser)\n else:\n login_kwargs = {'two_factor_callback': self.request_two_factor_code\n }\n if enterprise:\n self.login = enterprise_auth\n while not self.enterprise_url:\n self.enterprise_url = input('Enterprise URL: ')\n if click.confirm('Do you want to verify SSL certs?',\n default=True):\n self.verify_ssl = True\n else:\n self.verify_ssl = False\n login_kwargs.update({'url': self.enterprise_url, 'verify':\n self.verify_ssl})\n while not self.user_login:\n self.user_login = input('User Login: ')\n login_kwargs.update({'username': self.user_login})\n if click.confirm(\n 'Do you want to log in with a password [Y] or a personal access token [n]?'\n , default=True):\n user_pass = None\n while not user_pass:\n user_pass = self.getpass('Password: ')\n login_kwargs.update({'password': user_pass})\n try:\n if not enterprise:\n auth = self.authorize(self.user_login, user_pass,\n scopes=['user', 'repo'], note='gitsome',\n note_url=\n 'https://github.com/donnemartin/gitsome',\n two_factor_callback=self.request_two_factor_code)\n self.user_token = auth.token\n else:\n self.user_pass = user_pass\n except (UnprocessableEntity, AuthenticationFailed):\n click.secho('Error creating token.', fg=self.clr_error)\n click.secho(\n \"\"\"Visit the following page and verify you do not have an existing token named \"gitsome\":\n https://github.com/settings/tokens\nIf a token already exists, update your ~/.gitsomeconfig file with your token:\n user_token = TOKEN\nYou can also generate a new token.\"\"\"\n , fg=self.clr_message)\n self.print_auth_error()\n return\n else:\n while not self.user_token:\n self.user_token = input('Token: ')\n login_kwargs.update({'token': self.user_token})\n self.api = self.login(**login_kwargs)\n if self.user_feed:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_FEED, self\n .user_feed)\n\n def check_auth(self):\n \"\"\"Check if the current authorization is valid.\n\n This method uses the ratelimit_remaining api to check whether\n the currently authenticated user's credentials are valid without\n deducting from the rate limit. The ratelimit_remaining api does not\n seem to be available for GitHub Enterprise.\n\n github3.py's method check_authorization seems to only work given\n an authorization created by a registered application.\n\n TODO: Determine a better way to check the authorization for\n GitHub Enterprise.\n\n :type enterprise: bool\n :param enterprise: Determines whether we are authenticating with\n GitHub Enterprise.\n \"\"\"\n if self.enterprise_url is not None:\n return True\n try:\n if self.api is not None:\n self.api.ratelimit_remaining\n return True\n else:\n self.print_auth_error()\n except AuthenticationFailed:\n self.print_auth_error()\n return False\n\n def get_github_config_path(self, config_file_name):\n \"\"\"Attempt to find the github config file.\n\n Adapted from https://github.com/sigmavirus24/github-cli.\n\n :type config_file_name: str\n :param config_file_name: The config file name.\n\n :rtype: str\n :return: The github config file path.\n \"\"\"\n home = os.path.abspath(os.environ.get('HOME', ''))\n config_file_path = os.path.join(home, config_file_name)\n return config_file_path\n\n def load_config(self, parser, cfg_label, default=None, color_config=\n False, boolean_config=False):\n \"\"\"Load the specified config from ~/.gitsomeconfig.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser`.\n\n :type cfg_label: str\n :param cfg_label: The config label to load.\n\n :type default: str\n :param default: The default color if no color config exists.\n Default: None.\n\n :type color_config: bool\n :param color_config: Determines whether this is a color config.\n Default: False.\n\n :type boolean_config: bool\n :param boolean_config: Determines whether to load a boolean config.\n Default: False.\n \"\"\"\n try:\n if boolean_config:\n cfg = parser.getboolean(self.CONFIG_SECTION, cfg_label)\n else:\n cfg = parser.get(self.CONFIG_SECTION, cfg_label)\n if color_config:\n if cfg == 'none':\n cfg = None\n click.style('', fg=cfg)\n except (TypeError, configparser.NoOptionError):\n return default\n return cfg\n\n def load_configs(self, config_funcs):\n \"\"\"Load the specified config from ~/.gitsomeconfig.\n\n :type foo: list\n :param foo: The config methods to run.\n \"\"\"\n config_file_path = self.get_github_config_path(self.CONFIG)\n parser = configparser.RawConfigParser()\n try:\n with open(config_file_path) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n for config_func in config_funcs:\n config_func(parser)\n except IOError:\n return None\n\n def load_config_colors(self, parser):\n \"\"\"Load the color config from ~/.gitsomeconfig.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser`.\n \"\"\"\n self.load_colors(parser)\n\n def load_colors(self, parser):\n \"\"\"Load all colors from ~/.gitsomeconfig.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser`.\n \"\"\"\n self.clr_primary = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_PRIMARY, default=self.clr_primary, color_config=True)\n self.clr_secondary = self.load_config(parser=parser, cfg_label=self\n .CONFIG_CLR_SECONDARY, default=self.clr_secondary, color_config\n =True)\n self.clr_tertiary = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TERTIARY, default=self.clr_tertiary, color_config=True)\n self.clr_quaternary = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_QUATERNARY, default=self.clr_quaternary,\n color_config=True)\n self.clr_bold = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_BOLD, default=self.clr_bold, color_config=True)\n self.clr_code = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_CODE, default=self.clr_code, color_config=True)\n self.clr_code = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_ERROR, default=self.clr_code, color_config=True)\n self.clr_header = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_HEADER, default=self.clr_header, color_config=True)\n self.clr_link = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_LINK, default=self.clr_link, color_config=True)\n self.clr_list = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_LIST, default=self.clr_list, color_config=True)\n self.clr_message = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_MESSAGE, default=self.clr_message, color_config=True)\n self.clr_num_comments = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_NUM_COMMENTS, default=self.clr_num_comments,\n color_config=True)\n self.clr_num_points = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_NUM_POINTS, default=self.clr_num_points,\n color_config=True)\n self.clr_tag = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TAG, default=self.clr_tag, color_config=True)\n self.clr_time = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TIME, default=self.clr_time, color_config=True)\n self.clr_title = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TITLE, default=self.clr_title, color_config=True)\n self.clr_tooltip = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TOOLTIP, default=self.clr_tooltip, color_config=True)\n self.clr_user = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_USER, default=self.clr_user, color_config=True)\n self.clr_view_link = self.load_config(parser=parser, cfg_label=self\n .CONFIG_CLR_VIEW_LINK, default=self.clr_view_link, color_config\n =True)\n self.clr_view_index = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_VIEW_INDEX, default=self.clr_view_index,\n color_config=True)\n\n def load_urls(self, view_in_browser):\n \"\"\"Load the current set of urls from ~/.gitsomeconfigurl.\n\n :type view_in_browser: bool\n :param view_in_browser: Determines whether to view the urls in a\n browser.\n\n :rtype: list\n :return: Collection of urls.\n \"\"\"\n config = self.get_github_config_path(self.CONFIG_URL)\n parser = configparser.RawConfigParser()\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n urls = parser.get(self.CONFIG_URL_SECTION, self.CONFIG_URL_LIST)\n urls = urls.strip()\n excludes = ['[', ']', \"'\"]\n for exclude in excludes:\n urls = urls.replace(exclude, '')\n if not view_in_browser:\n urls = urls.replace('https://github.com/', '')\n return urls.split(', ')\n\n def print_auth_error(self):\n \"\"\"Print a message the authorization has failed.\"\"\"\n click.secho('Authentication error.', fg=self.clr_error)\n click.secho(\n 'Update your credentials in ~/.gitsomeconfig or run:\\n gh configure'\n , fg=self.clr_message)\n\n def prompt_news_feed(self):\n \"\"\"Prompt the user to enter a news feed url.\"\"\"\n if click.confirm(\n \"\"\"No feed url detected.\n Calling gh events without an argument\n displays the logged in user's news feed.\nDo you want gitsome to track your news feed?\"\"\"\n , default=True):\n click.secho(\n \"\"\"Visit the following url while logged into GitHub:\n https://github.com\nEnter the url found under \"Subscribe to your news feed\".\"\"\"\n , fg=self.clr_message)\n self.user_feed = ''\n while not self.user_feed:\n self.user_feed = input('URL: ')\n\n def request_two_factor_code(self):\n \"\"\"Request two factor authentication code.\n\n Callback if two factor authentication is requested.\n\n :rtype: str\n :return: The user input two factor authentication code.\n \"\"\"\n code = ''\n while not code:\n code = input('Enter 2FA code: ')\n return code\n\n def save_config(self):\n \"\"\"Saves the config to ~/.gitsomeconfig.\"\"\"\n if self.check_auth():\n config = self.get_github_config_path(self.CONFIG)\n parser = configparser.RawConfigParser()\n parser.add_section(self.CONFIG_SECTION)\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_LOGIN, self.\n user_login)\n if self.user_token is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_TOKEN,\n self.user_token)\n if self.user_feed is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_FEED, self\n .user_feed)\n if self.enterprise_url is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_ENTERPRISE_URL,\n self.enterprise_url)\n if self.user_pass is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_PASS,\n self.user_pass)\n else:\n parser.remove_option(self.CONFIG_SECTION, self.CONFIG_USER_PASS\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_VERIFY_SSL, self.\n verify_ssl)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_PRIMARY, self.\n clr_primary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_SECONDARY, self\n .clr_secondary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TERTIARY, self.\n clr_tertiary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_QUATERNARY,\n self.clr_quaternary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_BOLD, self.clr_bold\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_CODE, self.clr_code\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_ERROR, self.\n clr_error)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_HEADER, self.\n clr_header)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_LINK, self.clr_link\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_LIST, self.clr_list\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_MESSAGE, self.\n clr_message)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_NUM_COMMENTS,\n self.clr_num_comments)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_NUM_POINTS,\n self.clr_num_points)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TAG, self.clr_tag)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TIME, self.clr_time\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TITLE, self.\n clr_title)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TOOLTIP, self.\n clr_tooltip)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_USER, self.clr_user\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_VIEW_LINK, self\n .clr_view_link)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_VIEW_INDEX,\n self.clr_view_index)\n with open(config, 'w+') as config_file:\n parser.write(config_file)\n\n def save_urls(self):\n \"\"\"Save the current set of urls to ~/.gitsomeconfigurl.\"\"\"\n config = self.get_github_config_path(self.CONFIG_URL)\n parser = configparser.RawConfigParser()\n try:\n parser.add_section(self.CONFIG_URL_SECTION)\n except configparser.DuplicateSectionError:\n pass\n parser.set(self.CONFIG_URL_SECTION, self.CONFIG_URL_LIST, self.urls)\n with open(config, 'w+') as config_file:\n parser.write(config_file)\n\n def show_bash_completions_info(self):\n \"\"\"Show info on how to enable bash completions\"\"\"\n click.secho(\n \"\"\"By default, gitsome looks at the following locations to enable bash completions:\n https://github.com/donnemartin/gitsome/blob/master/xonsh/environ.py#L123-L130\nIf bash completions are not working for you, check out the following link:\n https://github.com/donnemartin/gitsome#enabling-bash-completions\"\"\"\n , fg=self.clr_message)\n",
"<import token>\n\n\nclass Config(object):\n \"\"\"Gitsome config.\n\n :type api: :class:`github3.github.Github`\n :param api: An instance of github3.github.Github.\n\n :type clr_x: str\n :param clr_x: Various ansi color config colors to use for highlights.\n\n :type CONFIG: str\n :param CONFIG: The config file name.\n\n :type CONFIG_SECTION: str\n :param CONFIG_SECTION: The main config file section label.\n\n :type CONFIG_CLR_X: str\n :param CONFIG_CLR_X: Various ansi color config labels to use for highlights.\n\n :type CONFIG_ENTERPRISE_URL: str\n :param CONFIG_ENTERPRISE_URL: The GitHub Enterprise url.\n\n :type CONFIG_USER_LOGIN: str\n :param CONFIG_USER_LOGIN: The user login.\n\n :type CONFIG_USER_PASS: str\n :param CONFIG_USER_PASS: The user password.\n\n :type CONFIG_USER_TOKEN: str\n :param CONFIG_USER_TOKEN: The user token.\n\n :type CONFIG_USER_FEED: str\n :param CONFIG_USER_FEED: The user feed config. This is the feed on\n https://github.com/ when logged in and requires the basic auth model,\n which doesn't work when logging in with tokens or 2FA. This config\n listed the pre-signed url to access the feed.\n\n :type CONFIG_URL: str\n :param CONFIG_URL: The config file name that contains urls used in the\n `gh view` command.\n\n :type CONFIG_URL_SECTION: str\n :param CONFIG_URL_SECTION: The config file section that contains urls used\n in the `gh view [url_index]` command.\n\n :type CONFIG_URL_LIST: str\n :param CONFIG_URL_LIST: The config containing a list of the last set of\n urls the user has seen, which allows the user to quickly access a repo\n url with the `gh view [url_index]` command.\n\n :type CONFIG_VERIFY_SSL: str\n :param CONFIG_VERIFY_SSL: Determines whether to verify SSL certs.\n\n :type enterprise_url: str\n :param enterprise_url: The GitHub Enterprise url.\n\n :type urls: list\n :param urls: The last set of urls the user has seen, which allows the user\n to quickly access a repo url with the gh view [url_index] command.\n\n :type user_login: str\n :param user_login: The user's login in ~/.gitsomeconfig.\n\n :type user_pass: str\n :param user_pass: The user's pass in ~/.gitsomeconfig.\n This is only stored for GitHub Enterprise users since using only a\n personal access token does not seem to be supported.\n\n :type user_token: str\n :param user_token: The user's token in ~/.gitsomeconfig.\n\n :type verify_ssl: bool\n :param verify_ssl: Determines whether to verify SSL certs.\n \"\"\"\n CONFIG = '.gitsomeconfig'\n CONFIG_CLR_PRIMARY = 'clr_primary'\n CONFIG_CLR_SECONDARY = 'clr_secondary'\n CONFIG_CLR_TERTIARY = 'clr_tertiary'\n CONFIG_CLR_QUATERNARY = 'clr_quaternary'\n CONFIG_CLR_BOLD = 'clr_bold'\n CONFIG_CLR_CODE = 'clr_code'\n CONFIG_CLR_ERROR = 'clr_error'\n CONFIG_CLR_HEADER = 'clr_header'\n CONFIG_CLR_LINK = 'clr_link'\n CONFIG_CLR_LIST = 'clr_list'\n CONFIG_CLR_MESSAGE = 'clr_message'\n CONFIG_CLR_NUM_COMMENTS = 'clr_num_comments'\n CONFIG_CLR_NUM_POINTS = 'clr_num_points'\n CONFIG_CLR_TAG = 'clr_tag'\n CONFIG_CLR_TIME = 'clr_time'\n CONFIG_CLR_TITLE = 'clr_title'\n CONFIG_CLR_TOOLTIP = 'clr_tooltip'\n CONFIG_CLR_USER = 'clr_user'\n CONFIG_CLR_VIEW_LINK = 'clr_view_link'\n CONFIG_CLR_VIEW_INDEX = 'clr_view_index'\n CONFIG_SECTION = 'github'\n CONFIG_USER_LOGIN = 'user_login'\n CONFIG_USER_PASS = 'user_pass'\n CONFIG_USER_TOKEN = 'user_token'\n CONFIG_USER_FEED = 'user_feed'\n CONFIG_ENTERPRISE_URL = 'enterprise_url'\n CONFIG_VERIFY_SSL = 'verify_ssl'\n CONFIG_URL = '.gitsomeconfigurl'\n CONFIG_URL_SECTION = 'url'\n CONFIG_URL_LIST = 'url_list'\n CONFIG_AVATAR = '.gitsomeconfigavatar.png'\n\n def __init__(self):\n self.api = None\n self.user_login = None\n self.user_pass = None\n self.user_token = None\n self.user_feed = None\n self.enterprise_url = None\n self.verify_ssl = True\n self.urls = []\n self._init_colors()\n self.load_configs([self.load_config_colors])\n self.login = login\n self.authorize = authorize\n self.getpass = getpass\n\n def _init_colors(self):\n \"\"\"Initialize colors to their defaults.\"\"\"\n self.clr_primary = None\n self.clr_secondary = 'green'\n self.clr_tertiary = 'cyan'\n self.clr_quaternary = 'yellow'\n self.clr_bold = 'cyan'\n self.clr_code = 'cyan'\n self.clr_error = 'red'\n self.clr_header = 'yellow'\n self.clr_link = 'green'\n self.clr_list = 'cyan'\n self.clr_message = None\n self.clr_num_comments = 'green'\n self.clr_num_points = 'green'\n self.clr_tag = 'cyan'\n self.clr_time = 'yellow'\n self.clr_title = None\n self.clr_tooltip = None\n self.clr_user = 'cyan'\n self.clr_view_link = 'magenta'\n self.clr_view_index = 'magenta'\n\n def authenticate_cached_credentials(self, config, parser,\n enterprise_auth=enterprise_login):\n \"\"\"Authenticate with the user's credentials in ~/.gitsomeconfig.\n\n :type config: str\n :param config: The config path.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser.\n \"\"\"\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n self.user_login = self.load_config(parser=parser, cfg_label=\n self.CONFIG_USER_LOGIN)\n self.user_pass = self.load_config(parser=parser, cfg_label=self\n .CONFIG_USER_PASS)\n self.user_token = self.load_config(parser=parser, cfg_label=\n self.CONFIG_USER_TOKEN)\n self.enterprise_url = self.load_config(parser=parser, cfg_label\n =self.CONFIG_ENTERPRISE_URL)\n self.verify_ssl = self.load_config(parser=parser, cfg_label=\n self.CONFIG_VERIFY_SSL, boolean_config=True)\n self.user_feed = self.load_config(parser=parser, cfg_label=self\n .CONFIG_USER_FEED)\n if not self.verify_ssl:\n requests.packages.urllib3.disable_warnings(\n InsecureRequestWarning)\n login_kwargs = {'username': self.user_login,\n 'two_factor_callback': self.request_two_factor_code}\n if self.enterprise_url is not None:\n self.login = enterprise_auth\n login_kwargs.update({'url': self.enterprise_url, 'verify':\n self.verify_ssl})\n if self.user_token is not None:\n login_kwargs.update({'token': self.user_token})\n elif self.user_pass is not None:\n login_kwargs.update({'password': self.user_pass})\n else:\n self.print_auth_error()\n return\n else:\n login_kwargs.update({'token': self.user_token})\n self.api = self.login(**login_kwargs)\n\n def authenticate(self, enterprise=False, enterprise_auth=\n enterprise_login, overwrite=False):\n \"\"\"Log into GitHub.\n\n Adapted from https://github.com/sigmavirus24/github-cli.\n\n :type enterprise: bool\n :param enterprise: Determines whether to configure GitHub Enterprise.\n Default: False.\n\n :type overwrite: bool\n :param overwrite: indicates whether we cant to overwrite the current\n set of credentials. Default: False.\n \"\"\"\n if self.api is not None and not overwrite:\n return\n config = self.get_github_config_path(self.CONFIG)\n parser = configparser.RawConfigParser()\n if os.path.isfile(config) and os.access(config, os.R_OK | os.W_OK\n ) and not overwrite:\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n self.authenticate_cached_credentials(config, parser)\n else:\n login_kwargs = {'two_factor_callback': self.request_two_factor_code\n }\n if enterprise:\n self.login = enterprise_auth\n while not self.enterprise_url:\n self.enterprise_url = input('Enterprise URL: ')\n if click.confirm('Do you want to verify SSL certs?',\n default=True):\n self.verify_ssl = True\n else:\n self.verify_ssl = False\n login_kwargs.update({'url': self.enterprise_url, 'verify':\n self.verify_ssl})\n while not self.user_login:\n self.user_login = input('User Login: ')\n login_kwargs.update({'username': self.user_login})\n if click.confirm(\n 'Do you want to log in with a password [Y] or a personal access token [n]?'\n , default=True):\n user_pass = None\n while not user_pass:\n user_pass = self.getpass('Password: ')\n login_kwargs.update({'password': user_pass})\n try:\n if not enterprise:\n auth = self.authorize(self.user_login, user_pass,\n scopes=['user', 'repo'], note='gitsome',\n note_url=\n 'https://github.com/donnemartin/gitsome',\n two_factor_callback=self.request_two_factor_code)\n self.user_token = auth.token\n else:\n self.user_pass = user_pass\n except (UnprocessableEntity, AuthenticationFailed):\n click.secho('Error creating token.', fg=self.clr_error)\n click.secho(\n \"\"\"Visit the following page and verify you do not have an existing token named \"gitsome\":\n https://github.com/settings/tokens\nIf a token already exists, update your ~/.gitsomeconfig file with your token:\n user_token = TOKEN\nYou can also generate a new token.\"\"\"\n , fg=self.clr_message)\n self.print_auth_error()\n return\n else:\n while not self.user_token:\n self.user_token = input('Token: ')\n login_kwargs.update({'token': self.user_token})\n self.api = self.login(**login_kwargs)\n if self.user_feed:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_FEED, self\n .user_feed)\n\n def check_auth(self):\n \"\"\"Check if the current authorization is valid.\n\n This method uses the ratelimit_remaining api to check whether\n the currently authenticated user's credentials are valid without\n deducting from the rate limit. The ratelimit_remaining api does not\n seem to be available for GitHub Enterprise.\n\n github3.py's method check_authorization seems to only work given\n an authorization created by a registered application.\n\n TODO: Determine a better way to check the authorization for\n GitHub Enterprise.\n\n :type enterprise: bool\n :param enterprise: Determines whether we are authenticating with\n GitHub Enterprise.\n \"\"\"\n if self.enterprise_url is not None:\n return True\n try:\n if self.api is not None:\n self.api.ratelimit_remaining\n return True\n else:\n self.print_auth_error()\n except AuthenticationFailed:\n self.print_auth_error()\n return False\n\n def get_github_config_path(self, config_file_name):\n \"\"\"Attempt to find the github config file.\n\n Adapted from https://github.com/sigmavirus24/github-cli.\n\n :type config_file_name: str\n :param config_file_name: The config file name.\n\n :rtype: str\n :return: The github config file path.\n \"\"\"\n home = os.path.abspath(os.environ.get('HOME', ''))\n config_file_path = os.path.join(home, config_file_name)\n return config_file_path\n\n def load_config(self, parser, cfg_label, default=None, color_config=\n False, boolean_config=False):\n \"\"\"Load the specified config from ~/.gitsomeconfig.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser`.\n\n :type cfg_label: str\n :param cfg_label: The config label to load.\n\n :type default: str\n :param default: The default color if no color config exists.\n Default: None.\n\n :type color_config: bool\n :param color_config: Determines whether this is a color config.\n Default: False.\n\n :type boolean_config: bool\n :param boolean_config: Determines whether to load a boolean config.\n Default: False.\n \"\"\"\n try:\n if boolean_config:\n cfg = parser.getboolean(self.CONFIG_SECTION, cfg_label)\n else:\n cfg = parser.get(self.CONFIG_SECTION, cfg_label)\n if color_config:\n if cfg == 'none':\n cfg = None\n click.style('', fg=cfg)\n except (TypeError, configparser.NoOptionError):\n return default\n return cfg\n\n def load_configs(self, config_funcs):\n \"\"\"Load the specified config from ~/.gitsomeconfig.\n\n :type foo: list\n :param foo: The config methods to run.\n \"\"\"\n config_file_path = self.get_github_config_path(self.CONFIG)\n parser = configparser.RawConfigParser()\n try:\n with open(config_file_path) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n for config_func in config_funcs:\n config_func(parser)\n except IOError:\n return None\n\n def load_config_colors(self, parser):\n \"\"\"Load the color config from ~/.gitsomeconfig.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser`.\n \"\"\"\n self.load_colors(parser)\n\n def load_colors(self, parser):\n \"\"\"Load all colors from ~/.gitsomeconfig.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser`.\n \"\"\"\n self.clr_primary = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_PRIMARY, default=self.clr_primary, color_config=True)\n self.clr_secondary = self.load_config(parser=parser, cfg_label=self\n .CONFIG_CLR_SECONDARY, default=self.clr_secondary, color_config\n =True)\n self.clr_tertiary = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TERTIARY, default=self.clr_tertiary, color_config=True)\n self.clr_quaternary = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_QUATERNARY, default=self.clr_quaternary,\n color_config=True)\n self.clr_bold = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_BOLD, default=self.clr_bold, color_config=True)\n self.clr_code = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_CODE, default=self.clr_code, color_config=True)\n self.clr_code = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_ERROR, default=self.clr_code, color_config=True)\n self.clr_header = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_HEADER, default=self.clr_header, color_config=True)\n self.clr_link = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_LINK, default=self.clr_link, color_config=True)\n self.clr_list = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_LIST, default=self.clr_list, color_config=True)\n self.clr_message = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_MESSAGE, default=self.clr_message, color_config=True)\n self.clr_num_comments = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_NUM_COMMENTS, default=self.clr_num_comments,\n color_config=True)\n self.clr_num_points = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_NUM_POINTS, default=self.clr_num_points,\n color_config=True)\n self.clr_tag = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TAG, default=self.clr_tag, color_config=True)\n self.clr_time = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TIME, default=self.clr_time, color_config=True)\n self.clr_title = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TITLE, default=self.clr_title, color_config=True)\n self.clr_tooltip = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TOOLTIP, default=self.clr_tooltip, color_config=True)\n self.clr_user = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_USER, default=self.clr_user, color_config=True)\n self.clr_view_link = self.load_config(parser=parser, cfg_label=self\n .CONFIG_CLR_VIEW_LINK, default=self.clr_view_link, color_config\n =True)\n self.clr_view_index = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_VIEW_INDEX, default=self.clr_view_index,\n color_config=True)\n\n def load_urls(self, view_in_browser):\n \"\"\"Load the current set of urls from ~/.gitsomeconfigurl.\n\n :type view_in_browser: bool\n :param view_in_browser: Determines whether to view the urls in a\n browser.\n\n :rtype: list\n :return: Collection of urls.\n \"\"\"\n config = self.get_github_config_path(self.CONFIG_URL)\n parser = configparser.RawConfigParser()\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n urls = parser.get(self.CONFIG_URL_SECTION, self.CONFIG_URL_LIST)\n urls = urls.strip()\n excludes = ['[', ']', \"'\"]\n for exclude in excludes:\n urls = urls.replace(exclude, '')\n if not view_in_browser:\n urls = urls.replace('https://github.com/', '')\n return urls.split(', ')\n\n def print_auth_error(self):\n \"\"\"Print a message the authorization has failed.\"\"\"\n click.secho('Authentication error.', fg=self.clr_error)\n click.secho(\n 'Update your credentials in ~/.gitsomeconfig or run:\\n gh configure'\n , fg=self.clr_message)\n\n def prompt_news_feed(self):\n \"\"\"Prompt the user to enter a news feed url.\"\"\"\n if click.confirm(\n \"\"\"No feed url detected.\n Calling gh events without an argument\n displays the logged in user's news feed.\nDo you want gitsome to track your news feed?\"\"\"\n , default=True):\n click.secho(\n \"\"\"Visit the following url while logged into GitHub:\n https://github.com\nEnter the url found under \"Subscribe to your news feed\".\"\"\"\n , fg=self.clr_message)\n self.user_feed = ''\n while not self.user_feed:\n self.user_feed = input('URL: ')\n\n def request_two_factor_code(self):\n \"\"\"Request two factor authentication code.\n\n Callback if two factor authentication is requested.\n\n :rtype: str\n :return: The user input two factor authentication code.\n \"\"\"\n code = ''\n while not code:\n code = input('Enter 2FA code: ')\n return code\n\n def save_config(self):\n \"\"\"Saves the config to ~/.gitsomeconfig.\"\"\"\n if self.check_auth():\n config = self.get_github_config_path(self.CONFIG)\n parser = configparser.RawConfigParser()\n parser.add_section(self.CONFIG_SECTION)\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_LOGIN, self.\n user_login)\n if self.user_token is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_TOKEN,\n self.user_token)\n if self.user_feed is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_FEED, self\n .user_feed)\n if self.enterprise_url is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_ENTERPRISE_URL,\n self.enterprise_url)\n if self.user_pass is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_PASS,\n self.user_pass)\n else:\n parser.remove_option(self.CONFIG_SECTION, self.CONFIG_USER_PASS\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_VERIFY_SSL, self.\n verify_ssl)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_PRIMARY, self.\n clr_primary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_SECONDARY, self\n .clr_secondary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TERTIARY, self.\n clr_tertiary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_QUATERNARY,\n self.clr_quaternary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_BOLD, self.clr_bold\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_CODE, self.clr_code\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_ERROR, self.\n clr_error)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_HEADER, self.\n clr_header)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_LINK, self.clr_link\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_LIST, self.clr_list\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_MESSAGE, self.\n clr_message)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_NUM_COMMENTS,\n self.clr_num_comments)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_NUM_POINTS,\n self.clr_num_points)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TAG, self.clr_tag)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TIME, self.clr_time\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TITLE, self.\n clr_title)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TOOLTIP, self.\n clr_tooltip)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_USER, self.clr_user\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_VIEW_LINK, self\n .clr_view_link)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_VIEW_INDEX,\n self.clr_view_index)\n with open(config, 'w+') as config_file:\n parser.write(config_file)\n\n def save_urls(self):\n \"\"\"Save the current set of urls to ~/.gitsomeconfigurl.\"\"\"\n config = self.get_github_config_path(self.CONFIG_URL)\n parser = configparser.RawConfigParser()\n try:\n parser.add_section(self.CONFIG_URL_SECTION)\n except configparser.DuplicateSectionError:\n pass\n parser.set(self.CONFIG_URL_SECTION, self.CONFIG_URL_LIST, self.urls)\n with open(config, 'w+') as config_file:\n parser.write(config_file)\n\n def show_bash_completions_info(self):\n \"\"\"Show info on how to enable bash completions\"\"\"\n click.secho(\n \"\"\"By default, gitsome looks at the following locations to enable bash completions:\n https://github.com/donnemartin/gitsome/blob/master/xonsh/environ.py#L123-L130\nIf bash completions are not working for you, check out the following link:\n https://github.com/donnemartin/gitsome#enabling-bash-completions\"\"\"\n , fg=self.clr_message)\n",
"<import token>\n\n\nclass Config(object):\n <docstring token>\n CONFIG = '.gitsomeconfig'\n CONFIG_CLR_PRIMARY = 'clr_primary'\n CONFIG_CLR_SECONDARY = 'clr_secondary'\n CONFIG_CLR_TERTIARY = 'clr_tertiary'\n CONFIG_CLR_QUATERNARY = 'clr_quaternary'\n CONFIG_CLR_BOLD = 'clr_bold'\n CONFIG_CLR_CODE = 'clr_code'\n CONFIG_CLR_ERROR = 'clr_error'\n CONFIG_CLR_HEADER = 'clr_header'\n CONFIG_CLR_LINK = 'clr_link'\n CONFIG_CLR_LIST = 'clr_list'\n CONFIG_CLR_MESSAGE = 'clr_message'\n CONFIG_CLR_NUM_COMMENTS = 'clr_num_comments'\n CONFIG_CLR_NUM_POINTS = 'clr_num_points'\n CONFIG_CLR_TAG = 'clr_tag'\n CONFIG_CLR_TIME = 'clr_time'\n CONFIG_CLR_TITLE = 'clr_title'\n CONFIG_CLR_TOOLTIP = 'clr_tooltip'\n CONFIG_CLR_USER = 'clr_user'\n CONFIG_CLR_VIEW_LINK = 'clr_view_link'\n CONFIG_CLR_VIEW_INDEX = 'clr_view_index'\n CONFIG_SECTION = 'github'\n CONFIG_USER_LOGIN = 'user_login'\n CONFIG_USER_PASS = 'user_pass'\n CONFIG_USER_TOKEN = 'user_token'\n CONFIG_USER_FEED = 'user_feed'\n CONFIG_ENTERPRISE_URL = 'enterprise_url'\n CONFIG_VERIFY_SSL = 'verify_ssl'\n CONFIG_URL = '.gitsomeconfigurl'\n CONFIG_URL_SECTION = 'url'\n CONFIG_URL_LIST = 'url_list'\n CONFIG_AVATAR = '.gitsomeconfigavatar.png'\n\n def __init__(self):\n self.api = None\n self.user_login = None\n self.user_pass = None\n self.user_token = None\n self.user_feed = None\n self.enterprise_url = None\n self.verify_ssl = True\n self.urls = []\n self._init_colors()\n self.load_configs([self.load_config_colors])\n self.login = login\n self.authorize = authorize\n self.getpass = getpass\n\n def _init_colors(self):\n \"\"\"Initialize colors to their defaults.\"\"\"\n self.clr_primary = None\n self.clr_secondary = 'green'\n self.clr_tertiary = 'cyan'\n self.clr_quaternary = 'yellow'\n self.clr_bold = 'cyan'\n self.clr_code = 'cyan'\n self.clr_error = 'red'\n self.clr_header = 'yellow'\n self.clr_link = 'green'\n self.clr_list = 'cyan'\n self.clr_message = None\n self.clr_num_comments = 'green'\n self.clr_num_points = 'green'\n self.clr_tag = 'cyan'\n self.clr_time = 'yellow'\n self.clr_title = None\n self.clr_tooltip = None\n self.clr_user = 'cyan'\n self.clr_view_link = 'magenta'\n self.clr_view_index = 'magenta'\n\n def authenticate_cached_credentials(self, config, parser,\n enterprise_auth=enterprise_login):\n \"\"\"Authenticate with the user's credentials in ~/.gitsomeconfig.\n\n :type config: str\n :param config: The config path.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser.\n \"\"\"\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n self.user_login = self.load_config(parser=parser, cfg_label=\n self.CONFIG_USER_LOGIN)\n self.user_pass = self.load_config(parser=parser, cfg_label=self\n .CONFIG_USER_PASS)\n self.user_token = self.load_config(parser=parser, cfg_label=\n self.CONFIG_USER_TOKEN)\n self.enterprise_url = self.load_config(parser=parser, cfg_label\n =self.CONFIG_ENTERPRISE_URL)\n self.verify_ssl = self.load_config(parser=parser, cfg_label=\n self.CONFIG_VERIFY_SSL, boolean_config=True)\n self.user_feed = self.load_config(parser=parser, cfg_label=self\n .CONFIG_USER_FEED)\n if not self.verify_ssl:\n requests.packages.urllib3.disable_warnings(\n InsecureRequestWarning)\n login_kwargs = {'username': self.user_login,\n 'two_factor_callback': self.request_two_factor_code}\n if self.enterprise_url is not None:\n self.login = enterprise_auth\n login_kwargs.update({'url': self.enterprise_url, 'verify':\n self.verify_ssl})\n if self.user_token is not None:\n login_kwargs.update({'token': self.user_token})\n elif self.user_pass is not None:\n login_kwargs.update({'password': self.user_pass})\n else:\n self.print_auth_error()\n return\n else:\n login_kwargs.update({'token': self.user_token})\n self.api = self.login(**login_kwargs)\n\n def authenticate(self, enterprise=False, enterprise_auth=\n enterprise_login, overwrite=False):\n \"\"\"Log into GitHub.\n\n Adapted from https://github.com/sigmavirus24/github-cli.\n\n :type enterprise: bool\n :param enterprise: Determines whether to configure GitHub Enterprise.\n Default: False.\n\n :type overwrite: bool\n :param overwrite: indicates whether we cant to overwrite the current\n set of credentials. Default: False.\n \"\"\"\n if self.api is not None and not overwrite:\n return\n config = self.get_github_config_path(self.CONFIG)\n parser = configparser.RawConfigParser()\n if os.path.isfile(config) and os.access(config, os.R_OK | os.W_OK\n ) and not overwrite:\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n self.authenticate_cached_credentials(config, parser)\n else:\n login_kwargs = {'two_factor_callback': self.request_two_factor_code\n }\n if enterprise:\n self.login = enterprise_auth\n while not self.enterprise_url:\n self.enterprise_url = input('Enterprise URL: ')\n if click.confirm('Do you want to verify SSL certs?',\n default=True):\n self.verify_ssl = True\n else:\n self.verify_ssl = False\n login_kwargs.update({'url': self.enterprise_url, 'verify':\n self.verify_ssl})\n while not self.user_login:\n self.user_login = input('User Login: ')\n login_kwargs.update({'username': self.user_login})\n if click.confirm(\n 'Do you want to log in with a password [Y] or a personal access token [n]?'\n , default=True):\n user_pass = None\n while not user_pass:\n user_pass = self.getpass('Password: ')\n login_kwargs.update({'password': user_pass})\n try:\n if not enterprise:\n auth = self.authorize(self.user_login, user_pass,\n scopes=['user', 'repo'], note='gitsome',\n note_url=\n 'https://github.com/donnemartin/gitsome',\n two_factor_callback=self.request_two_factor_code)\n self.user_token = auth.token\n else:\n self.user_pass = user_pass\n except (UnprocessableEntity, AuthenticationFailed):\n click.secho('Error creating token.', fg=self.clr_error)\n click.secho(\n \"\"\"Visit the following page and verify you do not have an existing token named \"gitsome\":\n https://github.com/settings/tokens\nIf a token already exists, update your ~/.gitsomeconfig file with your token:\n user_token = TOKEN\nYou can also generate a new token.\"\"\"\n , fg=self.clr_message)\n self.print_auth_error()\n return\n else:\n while not self.user_token:\n self.user_token = input('Token: ')\n login_kwargs.update({'token': self.user_token})\n self.api = self.login(**login_kwargs)\n if self.user_feed:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_FEED, self\n .user_feed)\n\n def check_auth(self):\n \"\"\"Check if the current authorization is valid.\n\n This method uses the ratelimit_remaining api to check whether\n the currently authenticated user's credentials are valid without\n deducting from the rate limit. The ratelimit_remaining api does not\n seem to be available for GitHub Enterprise.\n\n github3.py's method check_authorization seems to only work given\n an authorization created by a registered application.\n\n TODO: Determine a better way to check the authorization for\n GitHub Enterprise.\n\n :type enterprise: bool\n :param enterprise: Determines whether we are authenticating with\n GitHub Enterprise.\n \"\"\"\n if self.enterprise_url is not None:\n return True\n try:\n if self.api is not None:\n self.api.ratelimit_remaining\n return True\n else:\n self.print_auth_error()\n except AuthenticationFailed:\n self.print_auth_error()\n return False\n\n def get_github_config_path(self, config_file_name):\n \"\"\"Attempt to find the github config file.\n\n Adapted from https://github.com/sigmavirus24/github-cli.\n\n :type config_file_name: str\n :param config_file_name: The config file name.\n\n :rtype: str\n :return: The github config file path.\n \"\"\"\n home = os.path.abspath(os.environ.get('HOME', ''))\n config_file_path = os.path.join(home, config_file_name)\n return config_file_path\n\n def load_config(self, parser, cfg_label, default=None, color_config=\n False, boolean_config=False):\n \"\"\"Load the specified config from ~/.gitsomeconfig.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser`.\n\n :type cfg_label: str\n :param cfg_label: The config label to load.\n\n :type default: str\n :param default: The default color if no color config exists.\n Default: None.\n\n :type color_config: bool\n :param color_config: Determines whether this is a color config.\n Default: False.\n\n :type boolean_config: bool\n :param boolean_config: Determines whether to load a boolean config.\n Default: False.\n \"\"\"\n try:\n if boolean_config:\n cfg = parser.getboolean(self.CONFIG_SECTION, cfg_label)\n else:\n cfg = parser.get(self.CONFIG_SECTION, cfg_label)\n if color_config:\n if cfg == 'none':\n cfg = None\n click.style('', fg=cfg)\n except (TypeError, configparser.NoOptionError):\n return default\n return cfg\n\n def load_configs(self, config_funcs):\n \"\"\"Load the specified config from ~/.gitsomeconfig.\n\n :type foo: list\n :param foo: The config methods to run.\n \"\"\"\n config_file_path = self.get_github_config_path(self.CONFIG)\n parser = configparser.RawConfigParser()\n try:\n with open(config_file_path) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n for config_func in config_funcs:\n config_func(parser)\n except IOError:\n return None\n\n def load_config_colors(self, parser):\n \"\"\"Load the color config from ~/.gitsomeconfig.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser`.\n \"\"\"\n self.load_colors(parser)\n\n def load_colors(self, parser):\n \"\"\"Load all colors from ~/.gitsomeconfig.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser`.\n \"\"\"\n self.clr_primary = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_PRIMARY, default=self.clr_primary, color_config=True)\n self.clr_secondary = self.load_config(parser=parser, cfg_label=self\n .CONFIG_CLR_SECONDARY, default=self.clr_secondary, color_config\n =True)\n self.clr_tertiary = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TERTIARY, default=self.clr_tertiary, color_config=True)\n self.clr_quaternary = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_QUATERNARY, default=self.clr_quaternary,\n color_config=True)\n self.clr_bold = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_BOLD, default=self.clr_bold, color_config=True)\n self.clr_code = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_CODE, default=self.clr_code, color_config=True)\n self.clr_code = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_ERROR, default=self.clr_code, color_config=True)\n self.clr_header = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_HEADER, default=self.clr_header, color_config=True)\n self.clr_link = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_LINK, default=self.clr_link, color_config=True)\n self.clr_list = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_LIST, default=self.clr_list, color_config=True)\n self.clr_message = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_MESSAGE, default=self.clr_message, color_config=True)\n self.clr_num_comments = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_NUM_COMMENTS, default=self.clr_num_comments,\n color_config=True)\n self.clr_num_points = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_NUM_POINTS, default=self.clr_num_points,\n color_config=True)\n self.clr_tag = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TAG, default=self.clr_tag, color_config=True)\n self.clr_time = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TIME, default=self.clr_time, color_config=True)\n self.clr_title = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TITLE, default=self.clr_title, color_config=True)\n self.clr_tooltip = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TOOLTIP, default=self.clr_tooltip, color_config=True)\n self.clr_user = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_USER, default=self.clr_user, color_config=True)\n self.clr_view_link = self.load_config(parser=parser, cfg_label=self\n .CONFIG_CLR_VIEW_LINK, default=self.clr_view_link, color_config\n =True)\n self.clr_view_index = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_VIEW_INDEX, default=self.clr_view_index,\n color_config=True)\n\n def load_urls(self, view_in_browser):\n \"\"\"Load the current set of urls from ~/.gitsomeconfigurl.\n\n :type view_in_browser: bool\n :param view_in_browser: Determines whether to view the urls in a\n browser.\n\n :rtype: list\n :return: Collection of urls.\n \"\"\"\n config = self.get_github_config_path(self.CONFIG_URL)\n parser = configparser.RawConfigParser()\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n urls = parser.get(self.CONFIG_URL_SECTION, self.CONFIG_URL_LIST)\n urls = urls.strip()\n excludes = ['[', ']', \"'\"]\n for exclude in excludes:\n urls = urls.replace(exclude, '')\n if not view_in_browser:\n urls = urls.replace('https://github.com/', '')\n return urls.split(', ')\n\n def print_auth_error(self):\n \"\"\"Print a message the authorization has failed.\"\"\"\n click.secho('Authentication error.', fg=self.clr_error)\n click.secho(\n 'Update your credentials in ~/.gitsomeconfig or run:\\n gh configure'\n , fg=self.clr_message)\n\n def prompt_news_feed(self):\n \"\"\"Prompt the user to enter a news feed url.\"\"\"\n if click.confirm(\n \"\"\"No feed url detected.\n Calling gh events without an argument\n displays the logged in user's news feed.\nDo you want gitsome to track your news feed?\"\"\"\n , default=True):\n click.secho(\n \"\"\"Visit the following url while logged into GitHub:\n https://github.com\nEnter the url found under \"Subscribe to your news feed\".\"\"\"\n , fg=self.clr_message)\n self.user_feed = ''\n while not self.user_feed:\n self.user_feed = input('URL: ')\n\n def request_two_factor_code(self):\n \"\"\"Request two factor authentication code.\n\n Callback if two factor authentication is requested.\n\n :rtype: str\n :return: The user input two factor authentication code.\n \"\"\"\n code = ''\n while not code:\n code = input('Enter 2FA code: ')\n return code\n\n def save_config(self):\n \"\"\"Saves the config to ~/.gitsomeconfig.\"\"\"\n if self.check_auth():\n config = self.get_github_config_path(self.CONFIG)\n parser = configparser.RawConfigParser()\n parser.add_section(self.CONFIG_SECTION)\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_LOGIN, self.\n user_login)\n if self.user_token is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_TOKEN,\n self.user_token)\n if self.user_feed is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_FEED, self\n .user_feed)\n if self.enterprise_url is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_ENTERPRISE_URL,\n self.enterprise_url)\n if self.user_pass is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_PASS,\n self.user_pass)\n else:\n parser.remove_option(self.CONFIG_SECTION, self.CONFIG_USER_PASS\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_VERIFY_SSL, self.\n verify_ssl)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_PRIMARY, self.\n clr_primary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_SECONDARY, self\n .clr_secondary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TERTIARY, self.\n clr_tertiary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_QUATERNARY,\n self.clr_quaternary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_BOLD, self.clr_bold\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_CODE, self.clr_code\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_ERROR, self.\n clr_error)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_HEADER, self.\n clr_header)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_LINK, self.clr_link\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_LIST, self.clr_list\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_MESSAGE, self.\n clr_message)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_NUM_COMMENTS,\n self.clr_num_comments)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_NUM_POINTS,\n self.clr_num_points)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TAG, self.clr_tag)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TIME, self.clr_time\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TITLE, self.\n clr_title)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TOOLTIP, self.\n clr_tooltip)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_USER, self.clr_user\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_VIEW_LINK, self\n .clr_view_link)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_VIEW_INDEX,\n self.clr_view_index)\n with open(config, 'w+') as config_file:\n parser.write(config_file)\n\n def save_urls(self):\n \"\"\"Save the current set of urls to ~/.gitsomeconfigurl.\"\"\"\n config = self.get_github_config_path(self.CONFIG_URL)\n parser = configparser.RawConfigParser()\n try:\n parser.add_section(self.CONFIG_URL_SECTION)\n except configparser.DuplicateSectionError:\n pass\n parser.set(self.CONFIG_URL_SECTION, self.CONFIG_URL_LIST, self.urls)\n with open(config, 'w+') as config_file:\n parser.write(config_file)\n\n def show_bash_completions_info(self):\n \"\"\"Show info on how to enable bash completions\"\"\"\n click.secho(\n \"\"\"By default, gitsome looks at the following locations to enable bash completions:\n https://github.com/donnemartin/gitsome/blob/master/xonsh/environ.py#L123-L130\nIf bash completions are not working for you, check out the following link:\n https://github.com/donnemartin/gitsome#enabling-bash-completions\"\"\"\n , fg=self.clr_message)\n",
"<import token>\n\n\nclass Config(object):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n def __init__(self):\n self.api = None\n self.user_login = None\n self.user_pass = None\n self.user_token = None\n self.user_feed = None\n self.enterprise_url = None\n self.verify_ssl = True\n self.urls = []\n self._init_colors()\n self.load_configs([self.load_config_colors])\n self.login = login\n self.authorize = authorize\n self.getpass = getpass\n\n def _init_colors(self):\n \"\"\"Initialize colors to their defaults.\"\"\"\n self.clr_primary = None\n self.clr_secondary = 'green'\n self.clr_tertiary = 'cyan'\n self.clr_quaternary = 'yellow'\n self.clr_bold = 'cyan'\n self.clr_code = 'cyan'\n self.clr_error = 'red'\n self.clr_header = 'yellow'\n self.clr_link = 'green'\n self.clr_list = 'cyan'\n self.clr_message = None\n self.clr_num_comments = 'green'\n self.clr_num_points = 'green'\n self.clr_tag = 'cyan'\n self.clr_time = 'yellow'\n self.clr_title = None\n self.clr_tooltip = None\n self.clr_user = 'cyan'\n self.clr_view_link = 'magenta'\n self.clr_view_index = 'magenta'\n\n def authenticate_cached_credentials(self, config, parser,\n enterprise_auth=enterprise_login):\n \"\"\"Authenticate with the user's credentials in ~/.gitsomeconfig.\n\n :type config: str\n :param config: The config path.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser.\n \"\"\"\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n self.user_login = self.load_config(parser=parser, cfg_label=\n self.CONFIG_USER_LOGIN)\n self.user_pass = self.load_config(parser=parser, cfg_label=self\n .CONFIG_USER_PASS)\n self.user_token = self.load_config(parser=parser, cfg_label=\n self.CONFIG_USER_TOKEN)\n self.enterprise_url = self.load_config(parser=parser, cfg_label\n =self.CONFIG_ENTERPRISE_URL)\n self.verify_ssl = self.load_config(parser=parser, cfg_label=\n self.CONFIG_VERIFY_SSL, boolean_config=True)\n self.user_feed = self.load_config(parser=parser, cfg_label=self\n .CONFIG_USER_FEED)\n if not self.verify_ssl:\n requests.packages.urllib3.disable_warnings(\n InsecureRequestWarning)\n login_kwargs = {'username': self.user_login,\n 'two_factor_callback': self.request_two_factor_code}\n if self.enterprise_url is not None:\n self.login = enterprise_auth\n login_kwargs.update({'url': self.enterprise_url, 'verify':\n self.verify_ssl})\n if self.user_token is not None:\n login_kwargs.update({'token': self.user_token})\n elif self.user_pass is not None:\n login_kwargs.update({'password': self.user_pass})\n else:\n self.print_auth_error()\n return\n else:\n login_kwargs.update({'token': self.user_token})\n self.api = self.login(**login_kwargs)\n\n def authenticate(self, enterprise=False, enterprise_auth=\n enterprise_login, overwrite=False):\n \"\"\"Log into GitHub.\n\n Adapted from https://github.com/sigmavirus24/github-cli.\n\n :type enterprise: bool\n :param enterprise: Determines whether to configure GitHub Enterprise.\n Default: False.\n\n :type overwrite: bool\n :param overwrite: indicates whether we cant to overwrite the current\n set of credentials. Default: False.\n \"\"\"\n if self.api is not None and not overwrite:\n return\n config = self.get_github_config_path(self.CONFIG)\n parser = configparser.RawConfigParser()\n if os.path.isfile(config) and os.access(config, os.R_OK | os.W_OK\n ) and not overwrite:\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n self.authenticate_cached_credentials(config, parser)\n else:\n login_kwargs = {'two_factor_callback': self.request_two_factor_code\n }\n if enterprise:\n self.login = enterprise_auth\n while not self.enterprise_url:\n self.enterprise_url = input('Enterprise URL: ')\n if click.confirm('Do you want to verify SSL certs?',\n default=True):\n self.verify_ssl = True\n else:\n self.verify_ssl = False\n login_kwargs.update({'url': self.enterprise_url, 'verify':\n self.verify_ssl})\n while not self.user_login:\n self.user_login = input('User Login: ')\n login_kwargs.update({'username': self.user_login})\n if click.confirm(\n 'Do you want to log in with a password [Y] or a personal access token [n]?'\n , default=True):\n user_pass = None\n while not user_pass:\n user_pass = self.getpass('Password: ')\n login_kwargs.update({'password': user_pass})\n try:\n if not enterprise:\n auth = self.authorize(self.user_login, user_pass,\n scopes=['user', 'repo'], note='gitsome',\n note_url=\n 'https://github.com/donnemartin/gitsome',\n two_factor_callback=self.request_two_factor_code)\n self.user_token = auth.token\n else:\n self.user_pass = user_pass\n except (UnprocessableEntity, AuthenticationFailed):\n click.secho('Error creating token.', fg=self.clr_error)\n click.secho(\n \"\"\"Visit the following page and verify you do not have an existing token named \"gitsome\":\n https://github.com/settings/tokens\nIf a token already exists, update your ~/.gitsomeconfig file with your token:\n user_token = TOKEN\nYou can also generate a new token.\"\"\"\n , fg=self.clr_message)\n self.print_auth_error()\n return\n else:\n while not self.user_token:\n self.user_token = input('Token: ')\n login_kwargs.update({'token': self.user_token})\n self.api = self.login(**login_kwargs)\n if self.user_feed:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_FEED, self\n .user_feed)\n\n def check_auth(self):\n \"\"\"Check if the current authorization is valid.\n\n This method uses the ratelimit_remaining api to check whether\n the currently authenticated user's credentials are valid without\n deducting from the rate limit. The ratelimit_remaining api does not\n seem to be available for GitHub Enterprise.\n\n github3.py's method check_authorization seems to only work given\n an authorization created by a registered application.\n\n TODO: Determine a better way to check the authorization for\n GitHub Enterprise.\n\n :type enterprise: bool\n :param enterprise: Determines whether we are authenticating with\n GitHub Enterprise.\n \"\"\"\n if self.enterprise_url is not None:\n return True\n try:\n if self.api is not None:\n self.api.ratelimit_remaining\n return True\n else:\n self.print_auth_error()\n except AuthenticationFailed:\n self.print_auth_error()\n return False\n\n def get_github_config_path(self, config_file_name):\n \"\"\"Attempt to find the github config file.\n\n Adapted from https://github.com/sigmavirus24/github-cli.\n\n :type config_file_name: str\n :param config_file_name: The config file name.\n\n :rtype: str\n :return: The github config file path.\n \"\"\"\n home = os.path.abspath(os.environ.get('HOME', ''))\n config_file_path = os.path.join(home, config_file_name)\n return config_file_path\n\n def load_config(self, parser, cfg_label, default=None, color_config=\n False, boolean_config=False):\n \"\"\"Load the specified config from ~/.gitsomeconfig.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser`.\n\n :type cfg_label: str\n :param cfg_label: The config label to load.\n\n :type default: str\n :param default: The default color if no color config exists.\n Default: None.\n\n :type color_config: bool\n :param color_config: Determines whether this is a color config.\n Default: False.\n\n :type boolean_config: bool\n :param boolean_config: Determines whether to load a boolean config.\n Default: False.\n \"\"\"\n try:\n if boolean_config:\n cfg = parser.getboolean(self.CONFIG_SECTION, cfg_label)\n else:\n cfg = parser.get(self.CONFIG_SECTION, cfg_label)\n if color_config:\n if cfg == 'none':\n cfg = None\n click.style('', fg=cfg)\n except (TypeError, configparser.NoOptionError):\n return default\n return cfg\n\n def load_configs(self, config_funcs):\n \"\"\"Load the specified config from ~/.gitsomeconfig.\n\n :type foo: list\n :param foo: The config methods to run.\n \"\"\"\n config_file_path = self.get_github_config_path(self.CONFIG)\n parser = configparser.RawConfigParser()\n try:\n with open(config_file_path) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n for config_func in config_funcs:\n config_func(parser)\n except IOError:\n return None\n\n def load_config_colors(self, parser):\n \"\"\"Load the color config from ~/.gitsomeconfig.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser`.\n \"\"\"\n self.load_colors(parser)\n\n def load_colors(self, parser):\n \"\"\"Load all colors from ~/.gitsomeconfig.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser`.\n \"\"\"\n self.clr_primary = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_PRIMARY, default=self.clr_primary, color_config=True)\n self.clr_secondary = self.load_config(parser=parser, cfg_label=self\n .CONFIG_CLR_SECONDARY, default=self.clr_secondary, color_config\n =True)\n self.clr_tertiary = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TERTIARY, default=self.clr_tertiary, color_config=True)\n self.clr_quaternary = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_QUATERNARY, default=self.clr_quaternary,\n color_config=True)\n self.clr_bold = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_BOLD, default=self.clr_bold, color_config=True)\n self.clr_code = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_CODE, default=self.clr_code, color_config=True)\n self.clr_code = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_ERROR, default=self.clr_code, color_config=True)\n self.clr_header = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_HEADER, default=self.clr_header, color_config=True)\n self.clr_link = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_LINK, default=self.clr_link, color_config=True)\n self.clr_list = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_LIST, default=self.clr_list, color_config=True)\n self.clr_message = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_MESSAGE, default=self.clr_message, color_config=True)\n self.clr_num_comments = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_NUM_COMMENTS, default=self.clr_num_comments,\n color_config=True)\n self.clr_num_points = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_NUM_POINTS, default=self.clr_num_points,\n color_config=True)\n self.clr_tag = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TAG, default=self.clr_tag, color_config=True)\n self.clr_time = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TIME, default=self.clr_time, color_config=True)\n self.clr_title = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TITLE, default=self.clr_title, color_config=True)\n self.clr_tooltip = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TOOLTIP, default=self.clr_tooltip, color_config=True)\n self.clr_user = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_USER, default=self.clr_user, color_config=True)\n self.clr_view_link = self.load_config(parser=parser, cfg_label=self\n .CONFIG_CLR_VIEW_LINK, default=self.clr_view_link, color_config\n =True)\n self.clr_view_index = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_VIEW_INDEX, default=self.clr_view_index,\n color_config=True)\n\n def load_urls(self, view_in_browser):\n \"\"\"Load the current set of urls from ~/.gitsomeconfigurl.\n\n :type view_in_browser: bool\n :param view_in_browser: Determines whether to view the urls in a\n browser.\n\n :rtype: list\n :return: Collection of urls.\n \"\"\"\n config = self.get_github_config_path(self.CONFIG_URL)\n parser = configparser.RawConfigParser()\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n urls = parser.get(self.CONFIG_URL_SECTION, self.CONFIG_URL_LIST)\n urls = urls.strip()\n excludes = ['[', ']', \"'\"]\n for exclude in excludes:\n urls = urls.replace(exclude, '')\n if not view_in_browser:\n urls = urls.replace('https://github.com/', '')\n return urls.split(', ')\n\n def print_auth_error(self):\n \"\"\"Print a message the authorization has failed.\"\"\"\n click.secho('Authentication error.', fg=self.clr_error)\n click.secho(\n 'Update your credentials in ~/.gitsomeconfig or run:\\n gh configure'\n , fg=self.clr_message)\n\n def prompt_news_feed(self):\n \"\"\"Prompt the user to enter a news feed url.\"\"\"\n if click.confirm(\n \"\"\"No feed url detected.\n Calling gh events without an argument\n displays the logged in user's news feed.\nDo you want gitsome to track your news feed?\"\"\"\n , default=True):\n click.secho(\n \"\"\"Visit the following url while logged into GitHub:\n https://github.com\nEnter the url found under \"Subscribe to your news feed\".\"\"\"\n , fg=self.clr_message)\n self.user_feed = ''\n while not self.user_feed:\n self.user_feed = input('URL: ')\n\n def request_two_factor_code(self):\n \"\"\"Request two factor authentication code.\n\n Callback if two factor authentication is requested.\n\n :rtype: str\n :return: The user input two factor authentication code.\n \"\"\"\n code = ''\n while not code:\n code = input('Enter 2FA code: ')\n return code\n\n def save_config(self):\n \"\"\"Saves the config to ~/.gitsomeconfig.\"\"\"\n if self.check_auth():\n config = self.get_github_config_path(self.CONFIG)\n parser = configparser.RawConfigParser()\n parser.add_section(self.CONFIG_SECTION)\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_LOGIN, self.\n user_login)\n if self.user_token is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_TOKEN,\n self.user_token)\n if self.user_feed is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_FEED, self\n .user_feed)\n if self.enterprise_url is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_ENTERPRISE_URL,\n self.enterprise_url)\n if self.user_pass is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_PASS,\n self.user_pass)\n else:\n parser.remove_option(self.CONFIG_SECTION, self.CONFIG_USER_PASS\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_VERIFY_SSL, self.\n verify_ssl)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_PRIMARY, self.\n clr_primary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_SECONDARY, self\n .clr_secondary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TERTIARY, self.\n clr_tertiary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_QUATERNARY,\n self.clr_quaternary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_BOLD, self.clr_bold\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_CODE, self.clr_code\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_ERROR, self.\n clr_error)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_HEADER, self.\n clr_header)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_LINK, self.clr_link\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_LIST, self.clr_list\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_MESSAGE, self.\n clr_message)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_NUM_COMMENTS,\n self.clr_num_comments)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_NUM_POINTS,\n self.clr_num_points)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TAG, self.clr_tag)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TIME, self.clr_time\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TITLE, self.\n clr_title)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TOOLTIP, self.\n clr_tooltip)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_USER, self.clr_user\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_VIEW_LINK, self\n .clr_view_link)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_VIEW_INDEX,\n self.clr_view_index)\n with open(config, 'w+') as config_file:\n parser.write(config_file)\n\n def save_urls(self):\n \"\"\"Save the current set of urls to ~/.gitsomeconfigurl.\"\"\"\n config = self.get_github_config_path(self.CONFIG_URL)\n parser = configparser.RawConfigParser()\n try:\n parser.add_section(self.CONFIG_URL_SECTION)\n except configparser.DuplicateSectionError:\n pass\n parser.set(self.CONFIG_URL_SECTION, self.CONFIG_URL_LIST, self.urls)\n with open(config, 'w+') as config_file:\n parser.write(config_file)\n\n def show_bash_completions_info(self):\n \"\"\"Show info on how to enable bash completions\"\"\"\n click.secho(\n \"\"\"By default, gitsome looks at the following locations to enable bash completions:\n https://github.com/donnemartin/gitsome/blob/master/xonsh/environ.py#L123-L130\nIf bash completions are not working for you, check out the following link:\n https://github.com/donnemartin/gitsome#enabling-bash-completions\"\"\"\n , fg=self.clr_message)\n",
"<import token>\n\n\nclass Config(object):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n def __init__(self):\n self.api = None\n self.user_login = None\n self.user_pass = None\n self.user_token = None\n self.user_feed = None\n self.enterprise_url = None\n self.verify_ssl = True\n self.urls = []\n self._init_colors()\n self.load_configs([self.load_config_colors])\n self.login = login\n self.authorize = authorize\n self.getpass = getpass\n\n def _init_colors(self):\n \"\"\"Initialize colors to their defaults.\"\"\"\n self.clr_primary = None\n self.clr_secondary = 'green'\n self.clr_tertiary = 'cyan'\n self.clr_quaternary = 'yellow'\n self.clr_bold = 'cyan'\n self.clr_code = 'cyan'\n self.clr_error = 'red'\n self.clr_header = 'yellow'\n self.clr_link = 'green'\n self.clr_list = 'cyan'\n self.clr_message = None\n self.clr_num_comments = 'green'\n self.clr_num_points = 'green'\n self.clr_tag = 'cyan'\n self.clr_time = 'yellow'\n self.clr_title = None\n self.clr_tooltip = None\n self.clr_user = 'cyan'\n self.clr_view_link = 'magenta'\n self.clr_view_index = 'magenta'\n\n def authenticate_cached_credentials(self, config, parser,\n enterprise_auth=enterprise_login):\n \"\"\"Authenticate with the user's credentials in ~/.gitsomeconfig.\n\n :type config: str\n :param config: The config path.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser.\n \"\"\"\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n self.user_login = self.load_config(parser=parser, cfg_label=\n self.CONFIG_USER_LOGIN)\n self.user_pass = self.load_config(parser=parser, cfg_label=self\n .CONFIG_USER_PASS)\n self.user_token = self.load_config(parser=parser, cfg_label=\n self.CONFIG_USER_TOKEN)\n self.enterprise_url = self.load_config(parser=parser, cfg_label\n =self.CONFIG_ENTERPRISE_URL)\n self.verify_ssl = self.load_config(parser=parser, cfg_label=\n self.CONFIG_VERIFY_SSL, boolean_config=True)\n self.user_feed = self.load_config(parser=parser, cfg_label=self\n .CONFIG_USER_FEED)\n if not self.verify_ssl:\n requests.packages.urllib3.disable_warnings(\n InsecureRequestWarning)\n login_kwargs = {'username': self.user_login,\n 'two_factor_callback': self.request_two_factor_code}\n if self.enterprise_url is not None:\n self.login = enterprise_auth\n login_kwargs.update({'url': self.enterprise_url, 'verify':\n self.verify_ssl})\n if self.user_token is not None:\n login_kwargs.update({'token': self.user_token})\n elif self.user_pass is not None:\n login_kwargs.update({'password': self.user_pass})\n else:\n self.print_auth_error()\n return\n else:\n login_kwargs.update({'token': self.user_token})\n self.api = self.login(**login_kwargs)\n\n def authenticate(self, enterprise=False, enterprise_auth=\n enterprise_login, overwrite=False):\n \"\"\"Log into GitHub.\n\n Adapted from https://github.com/sigmavirus24/github-cli.\n\n :type enterprise: bool\n :param enterprise: Determines whether to configure GitHub Enterprise.\n Default: False.\n\n :type overwrite: bool\n :param overwrite: indicates whether we cant to overwrite the current\n set of credentials. Default: False.\n \"\"\"\n if self.api is not None and not overwrite:\n return\n config = self.get_github_config_path(self.CONFIG)\n parser = configparser.RawConfigParser()\n if os.path.isfile(config) and os.access(config, os.R_OK | os.W_OK\n ) and not overwrite:\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n self.authenticate_cached_credentials(config, parser)\n else:\n login_kwargs = {'two_factor_callback': self.request_two_factor_code\n }\n if enterprise:\n self.login = enterprise_auth\n while not self.enterprise_url:\n self.enterprise_url = input('Enterprise URL: ')\n if click.confirm('Do you want to verify SSL certs?',\n default=True):\n self.verify_ssl = True\n else:\n self.verify_ssl = False\n login_kwargs.update({'url': self.enterprise_url, 'verify':\n self.verify_ssl})\n while not self.user_login:\n self.user_login = input('User Login: ')\n login_kwargs.update({'username': self.user_login})\n if click.confirm(\n 'Do you want to log in with a password [Y] or a personal access token [n]?'\n , default=True):\n user_pass = None\n while not user_pass:\n user_pass = self.getpass('Password: ')\n login_kwargs.update({'password': user_pass})\n try:\n if not enterprise:\n auth = self.authorize(self.user_login, user_pass,\n scopes=['user', 'repo'], note='gitsome',\n note_url=\n 'https://github.com/donnemartin/gitsome',\n two_factor_callback=self.request_two_factor_code)\n self.user_token = auth.token\n else:\n self.user_pass = user_pass\n except (UnprocessableEntity, AuthenticationFailed):\n click.secho('Error creating token.', fg=self.clr_error)\n click.secho(\n \"\"\"Visit the following page and verify you do not have an existing token named \"gitsome\":\n https://github.com/settings/tokens\nIf a token already exists, update your ~/.gitsomeconfig file with your token:\n user_token = TOKEN\nYou can also generate a new token.\"\"\"\n , fg=self.clr_message)\n self.print_auth_error()\n return\n else:\n while not self.user_token:\n self.user_token = input('Token: ')\n login_kwargs.update({'token': self.user_token})\n self.api = self.login(**login_kwargs)\n if self.user_feed:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_FEED, self\n .user_feed)\n\n def check_auth(self):\n \"\"\"Check if the current authorization is valid.\n\n This method uses the ratelimit_remaining api to check whether\n the currently authenticated user's credentials are valid without\n deducting from the rate limit. The ratelimit_remaining api does not\n seem to be available for GitHub Enterprise.\n\n github3.py's method check_authorization seems to only work given\n an authorization created by a registered application.\n\n TODO: Determine a better way to check the authorization for\n GitHub Enterprise.\n\n :type enterprise: bool\n :param enterprise: Determines whether we are authenticating with\n GitHub Enterprise.\n \"\"\"\n if self.enterprise_url is not None:\n return True\n try:\n if self.api is not None:\n self.api.ratelimit_remaining\n return True\n else:\n self.print_auth_error()\n except AuthenticationFailed:\n self.print_auth_error()\n return False\n\n def get_github_config_path(self, config_file_name):\n \"\"\"Attempt to find the github config file.\n\n Adapted from https://github.com/sigmavirus24/github-cli.\n\n :type config_file_name: str\n :param config_file_name: The config file name.\n\n :rtype: str\n :return: The github config file path.\n \"\"\"\n home = os.path.abspath(os.environ.get('HOME', ''))\n config_file_path = os.path.join(home, config_file_name)\n return config_file_path\n\n def load_config(self, parser, cfg_label, default=None, color_config=\n False, boolean_config=False):\n \"\"\"Load the specified config from ~/.gitsomeconfig.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser`.\n\n :type cfg_label: str\n :param cfg_label: The config label to load.\n\n :type default: str\n :param default: The default color if no color config exists.\n Default: None.\n\n :type color_config: bool\n :param color_config: Determines whether this is a color config.\n Default: False.\n\n :type boolean_config: bool\n :param boolean_config: Determines whether to load a boolean config.\n Default: False.\n \"\"\"\n try:\n if boolean_config:\n cfg = parser.getboolean(self.CONFIG_SECTION, cfg_label)\n else:\n cfg = parser.get(self.CONFIG_SECTION, cfg_label)\n if color_config:\n if cfg == 'none':\n cfg = None\n click.style('', fg=cfg)\n except (TypeError, configparser.NoOptionError):\n return default\n return cfg\n <function token>\n\n def load_config_colors(self, parser):\n \"\"\"Load the color config from ~/.gitsomeconfig.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser`.\n \"\"\"\n self.load_colors(parser)\n\n def load_colors(self, parser):\n \"\"\"Load all colors from ~/.gitsomeconfig.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser`.\n \"\"\"\n self.clr_primary = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_PRIMARY, default=self.clr_primary, color_config=True)\n self.clr_secondary = self.load_config(parser=parser, cfg_label=self\n .CONFIG_CLR_SECONDARY, default=self.clr_secondary, color_config\n =True)\n self.clr_tertiary = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TERTIARY, default=self.clr_tertiary, color_config=True)\n self.clr_quaternary = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_QUATERNARY, default=self.clr_quaternary,\n color_config=True)\n self.clr_bold = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_BOLD, default=self.clr_bold, color_config=True)\n self.clr_code = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_CODE, default=self.clr_code, color_config=True)\n self.clr_code = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_ERROR, default=self.clr_code, color_config=True)\n self.clr_header = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_HEADER, default=self.clr_header, color_config=True)\n self.clr_link = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_LINK, default=self.clr_link, color_config=True)\n self.clr_list = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_LIST, default=self.clr_list, color_config=True)\n self.clr_message = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_MESSAGE, default=self.clr_message, color_config=True)\n self.clr_num_comments = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_NUM_COMMENTS, default=self.clr_num_comments,\n color_config=True)\n self.clr_num_points = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_NUM_POINTS, default=self.clr_num_points,\n color_config=True)\n self.clr_tag = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TAG, default=self.clr_tag, color_config=True)\n self.clr_time = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TIME, default=self.clr_time, color_config=True)\n self.clr_title = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TITLE, default=self.clr_title, color_config=True)\n self.clr_tooltip = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TOOLTIP, default=self.clr_tooltip, color_config=True)\n self.clr_user = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_USER, default=self.clr_user, color_config=True)\n self.clr_view_link = self.load_config(parser=parser, cfg_label=self\n .CONFIG_CLR_VIEW_LINK, default=self.clr_view_link, color_config\n =True)\n self.clr_view_index = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_VIEW_INDEX, default=self.clr_view_index,\n color_config=True)\n\n def load_urls(self, view_in_browser):\n \"\"\"Load the current set of urls from ~/.gitsomeconfigurl.\n\n :type view_in_browser: bool\n :param view_in_browser: Determines whether to view the urls in a\n browser.\n\n :rtype: list\n :return: Collection of urls.\n \"\"\"\n config = self.get_github_config_path(self.CONFIG_URL)\n parser = configparser.RawConfigParser()\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n urls = parser.get(self.CONFIG_URL_SECTION, self.CONFIG_URL_LIST)\n urls = urls.strip()\n excludes = ['[', ']', \"'\"]\n for exclude in excludes:\n urls = urls.replace(exclude, '')\n if not view_in_browser:\n urls = urls.replace('https://github.com/', '')\n return urls.split(', ')\n\n def print_auth_error(self):\n \"\"\"Print a message the authorization has failed.\"\"\"\n click.secho('Authentication error.', fg=self.clr_error)\n click.secho(\n 'Update your credentials in ~/.gitsomeconfig or run:\\n gh configure'\n , fg=self.clr_message)\n\n def prompt_news_feed(self):\n \"\"\"Prompt the user to enter a news feed url.\"\"\"\n if click.confirm(\n \"\"\"No feed url detected.\n Calling gh events without an argument\n displays the logged in user's news feed.\nDo you want gitsome to track your news feed?\"\"\"\n , default=True):\n click.secho(\n \"\"\"Visit the following url while logged into GitHub:\n https://github.com\nEnter the url found under \"Subscribe to your news feed\".\"\"\"\n , fg=self.clr_message)\n self.user_feed = ''\n while not self.user_feed:\n self.user_feed = input('URL: ')\n\n def request_two_factor_code(self):\n \"\"\"Request two factor authentication code.\n\n Callback if two factor authentication is requested.\n\n :rtype: str\n :return: The user input two factor authentication code.\n \"\"\"\n code = ''\n while not code:\n code = input('Enter 2FA code: ')\n return code\n\n def save_config(self):\n \"\"\"Saves the config to ~/.gitsomeconfig.\"\"\"\n if self.check_auth():\n config = self.get_github_config_path(self.CONFIG)\n parser = configparser.RawConfigParser()\n parser.add_section(self.CONFIG_SECTION)\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_LOGIN, self.\n user_login)\n if self.user_token is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_TOKEN,\n self.user_token)\n if self.user_feed is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_FEED, self\n .user_feed)\n if self.enterprise_url is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_ENTERPRISE_URL,\n self.enterprise_url)\n if self.user_pass is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_PASS,\n self.user_pass)\n else:\n parser.remove_option(self.CONFIG_SECTION, self.CONFIG_USER_PASS\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_VERIFY_SSL, self.\n verify_ssl)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_PRIMARY, self.\n clr_primary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_SECONDARY, self\n .clr_secondary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TERTIARY, self.\n clr_tertiary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_QUATERNARY,\n self.clr_quaternary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_BOLD, self.clr_bold\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_CODE, self.clr_code\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_ERROR, self.\n clr_error)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_HEADER, self.\n clr_header)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_LINK, self.clr_link\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_LIST, self.clr_list\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_MESSAGE, self.\n clr_message)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_NUM_COMMENTS,\n self.clr_num_comments)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_NUM_POINTS,\n self.clr_num_points)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TAG, self.clr_tag)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TIME, self.clr_time\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TITLE, self.\n clr_title)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TOOLTIP, self.\n clr_tooltip)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_USER, self.clr_user\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_VIEW_LINK, self\n .clr_view_link)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_VIEW_INDEX,\n self.clr_view_index)\n with open(config, 'w+') as config_file:\n parser.write(config_file)\n\n def save_urls(self):\n \"\"\"Save the current set of urls to ~/.gitsomeconfigurl.\"\"\"\n config = self.get_github_config_path(self.CONFIG_URL)\n parser = configparser.RawConfigParser()\n try:\n parser.add_section(self.CONFIG_URL_SECTION)\n except configparser.DuplicateSectionError:\n pass\n parser.set(self.CONFIG_URL_SECTION, self.CONFIG_URL_LIST, self.urls)\n with open(config, 'w+') as config_file:\n parser.write(config_file)\n\n def show_bash_completions_info(self):\n \"\"\"Show info on how to enable bash completions\"\"\"\n click.secho(\n \"\"\"By default, gitsome looks at the following locations to enable bash completions:\n https://github.com/donnemartin/gitsome/blob/master/xonsh/environ.py#L123-L130\nIf bash completions are not working for you, check out the following link:\n https://github.com/donnemartin/gitsome#enabling-bash-completions\"\"\"\n , fg=self.clr_message)\n",
"<import token>\n\n\nclass Config(object):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n def _init_colors(self):\n \"\"\"Initialize colors to their defaults.\"\"\"\n self.clr_primary = None\n self.clr_secondary = 'green'\n self.clr_tertiary = 'cyan'\n self.clr_quaternary = 'yellow'\n self.clr_bold = 'cyan'\n self.clr_code = 'cyan'\n self.clr_error = 'red'\n self.clr_header = 'yellow'\n self.clr_link = 'green'\n self.clr_list = 'cyan'\n self.clr_message = None\n self.clr_num_comments = 'green'\n self.clr_num_points = 'green'\n self.clr_tag = 'cyan'\n self.clr_time = 'yellow'\n self.clr_title = None\n self.clr_tooltip = None\n self.clr_user = 'cyan'\n self.clr_view_link = 'magenta'\n self.clr_view_index = 'magenta'\n\n def authenticate_cached_credentials(self, config, parser,\n enterprise_auth=enterprise_login):\n \"\"\"Authenticate with the user's credentials in ~/.gitsomeconfig.\n\n :type config: str\n :param config: The config path.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser.\n \"\"\"\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n self.user_login = self.load_config(parser=parser, cfg_label=\n self.CONFIG_USER_LOGIN)\n self.user_pass = self.load_config(parser=parser, cfg_label=self\n .CONFIG_USER_PASS)\n self.user_token = self.load_config(parser=parser, cfg_label=\n self.CONFIG_USER_TOKEN)\n self.enterprise_url = self.load_config(parser=parser, cfg_label\n =self.CONFIG_ENTERPRISE_URL)\n self.verify_ssl = self.load_config(parser=parser, cfg_label=\n self.CONFIG_VERIFY_SSL, boolean_config=True)\n self.user_feed = self.load_config(parser=parser, cfg_label=self\n .CONFIG_USER_FEED)\n if not self.verify_ssl:\n requests.packages.urllib3.disable_warnings(\n InsecureRequestWarning)\n login_kwargs = {'username': self.user_login,\n 'two_factor_callback': self.request_two_factor_code}\n if self.enterprise_url is not None:\n self.login = enterprise_auth\n login_kwargs.update({'url': self.enterprise_url, 'verify':\n self.verify_ssl})\n if self.user_token is not None:\n login_kwargs.update({'token': self.user_token})\n elif self.user_pass is not None:\n login_kwargs.update({'password': self.user_pass})\n else:\n self.print_auth_error()\n return\n else:\n login_kwargs.update({'token': self.user_token})\n self.api = self.login(**login_kwargs)\n\n def authenticate(self, enterprise=False, enterprise_auth=\n enterprise_login, overwrite=False):\n \"\"\"Log into GitHub.\n\n Adapted from https://github.com/sigmavirus24/github-cli.\n\n :type enterprise: bool\n :param enterprise: Determines whether to configure GitHub Enterprise.\n Default: False.\n\n :type overwrite: bool\n :param overwrite: indicates whether we cant to overwrite the current\n set of credentials. Default: False.\n \"\"\"\n if self.api is not None and not overwrite:\n return\n config = self.get_github_config_path(self.CONFIG)\n parser = configparser.RawConfigParser()\n if os.path.isfile(config) and os.access(config, os.R_OK | os.W_OK\n ) and not overwrite:\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n self.authenticate_cached_credentials(config, parser)\n else:\n login_kwargs = {'two_factor_callback': self.request_two_factor_code\n }\n if enterprise:\n self.login = enterprise_auth\n while not self.enterprise_url:\n self.enterprise_url = input('Enterprise URL: ')\n if click.confirm('Do you want to verify SSL certs?',\n default=True):\n self.verify_ssl = True\n else:\n self.verify_ssl = False\n login_kwargs.update({'url': self.enterprise_url, 'verify':\n self.verify_ssl})\n while not self.user_login:\n self.user_login = input('User Login: ')\n login_kwargs.update({'username': self.user_login})\n if click.confirm(\n 'Do you want to log in with a password [Y] or a personal access token [n]?'\n , default=True):\n user_pass = None\n while not user_pass:\n user_pass = self.getpass('Password: ')\n login_kwargs.update({'password': user_pass})\n try:\n if not enterprise:\n auth = self.authorize(self.user_login, user_pass,\n scopes=['user', 'repo'], note='gitsome',\n note_url=\n 'https://github.com/donnemartin/gitsome',\n two_factor_callback=self.request_two_factor_code)\n self.user_token = auth.token\n else:\n self.user_pass = user_pass\n except (UnprocessableEntity, AuthenticationFailed):\n click.secho('Error creating token.', fg=self.clr_error)\n click.secho(\n \"\"\"Visit the following page and verify you do not have an existing token named \"gitsome\":\n https://github.com/settings/tokens\nIf a token already exists, update your ~/.gitsomeconfig file with your token:\n user_token = TOKEN\nYou can also generate a new token.\"\"\"\n , fg=self.clr_message)\n self.print_auth_error()\n return\n else:\n while not self.user_token:\n self.user_token = input('Token: ')\n login_kwargs.update({'token': self.user_token})\n self.api = self.login(**login_kwargs)\n if self.user_feed:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_FEED, self\n .user_feed)\n\n def check_auth(self):\n \"\"\"Check if the current authorization is valid.\n\n This method uses the ratelimit_remaining api to check whether\n the currently authenticated user's credentials are valid without\n deducting from the rate limit. The ratelimit_remaining api does not\n seem to be available for GitHub Enterprise.\n\n github3.py's method check_authorization seems to only work given\n an authorization created by a registered application.\n\n TODO: Determine a better way to check the authorization for\n GitHub Enterprise.\n\n :type enterprise: bool\n :param enterprise: Determines whether we are authenticating with\n GitHub Enterprise.\n \"\"\"\n if self.enterprise_url is not None:\n return True\n try:\n if self.api is not None:\n self.api.ratelimit_remaining\n return True\n else:\n self.print_auth_error()\n except AuthenticationFailed:\n self.print_auth_error()\n return False\n\n def get_github_config_path(self, config_file_name):\n \"\"\"Attempt to find the github config file.\n\n Adapted from https://github.com/sigmavirus24/github-cli.\n\n :type config_file_name: str\n :param config_file_name: The config file name.\n\n :rtype: str\n :return: The github config file path.\n \"\"\"\n home = os.path.abspath(os.environ.get('HOME', ''))\n config_file_path = os.path.join(home, config_file_name)\n return config_file_path\n\n def load_config(self, parser, cfg_label, default=None, color_config=\n False, boolean_config=False):\n \"\"\"Load the specified config from ~/.gitsomeconfig.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser`.\n\n :type cfg_label: str\n :param cfg_label: The config label to load.\n\n :type default: str\n :param default: The default color if no color config exists.\n Default: None.\n\n :type color_config: bool\n :param color_config: Determines whether this is a color config.\n Default: False.\n\n :type boolean_config: bool\n :param boolean_config: Determines whether to load a boolean config.\n Default: False.\n \"\"\"\n try:\n if boolean_config:\n cfg = parser.getboolean(self.CONFIG_SECTION, cfg_label)\n else:\n cfg = parser.get(self.CONFIG_SECTION, cfg_label)\n if color_config:\n if cfg == 'none':\n cfg = None\n click.style('', fg=cfg)\n except (TypeError, configparser.NoOptionError):\n return default\n return cfg\n <function token>\n\n def load_config_colors(self, parser):\n \"\"\"Load the color config from ~/.gitsomeconfig.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser`.\n \"\"\"\n self.load_colors(parser)\n\n def load_colors(self, parser):\n \"\"\"Load all colors from ~/.gitsomeconfig.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser`.\n \"\"\"\n self.clr_primary = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_PRIMARY, default=self.clr_primary, color_config=True)\n self.clr_secondary = self.load_config(parser=parser, cfg_label=self\n .CONFIG_CLR_SECONDARY, default=self.clr_secondary, color_config\n =True)\n self.clr_tertiary = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TERTIARY, default=self.clr_tertiary, color_config=True)\n self.clr_quaternary = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_QUATERNARY, default=self.clr_quaternary,\n color_config=True)\n self.clr_bold = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_BOLD, default=self.clr_bold, color_config=True)\n self.clr_code = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_CODE, default=self.clr_code, color_config=True)\n self.clr_code = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_ERROR, default=self.clr_code, color_config=True)\n self.clr_header = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_HEADER, default=self.clr_header, color_config=True)\n self.clr_link = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_LINK, default=self.clr_link, color_config=True)\n self.clr_list = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_LIST, default=self.clr_list, color_config=True)\n self.clr_message = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_MESSAGE, default=self.clr_message, color_config=True)\n self.clr_num_comments = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_NUM_COMMENTS, default=self.clr_num_comments,\n color_config=True)\n self.clr_num_points = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_NUM_POINTS, default=self.clr_num_points,\n color_config=True)\n self.clr_tag = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TAG, default=self.clr_tag, color_config=True)\n self.clr_time = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TIME, default=self.clr_time, color_config=True)\n self.clr_title = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TITLE, default=self.clr_title, color_config=True)\n self.clr_tooltip = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TOOLTIP, default=self.clr_tooltip, color_config=True)\n self.clr_user = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_USER, default=self.clr_user, color_config=True)\n self.clr_view_link = self.load_config(parser=parser, cfg_label=self\n .CONFIG_CLR_VIEW_LINK, default=self.clr_view_link, color_config\n =True)\n self.clr_view_index = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_VIEW_INDEX, default=self.clr_view_index,\n color_config=True)\n\n def load_urls(self, view_in_browser):\n \"\"\"Load the current set of urls from ~/.gitsomeconfigurl.\n\n :type view_in_browser: bool\n :param view_in_browser: Determines whether to view the urls in a\n browser.\n\n :rtype: list\n :return: Collection of urls.\n \"\"\"\n config = self.get_github_config_path(self.CONFIG_URL)\n parser = configparser.RawConfigParser()\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n urls = parser.get(self.CONFIG_URL_SECTION, self.CONFIG_URL_LIST)\n urls = urls.strip()\n excludes = ['[', ']', \"'\"]\n for exclude in excludes:\n urls = urls.replace(exclude, '')\n if not view_in_browser:\n urls = urls.replace('https://github.com/', '')\n return urls.split(', ')\n\n def print_auth_error(self):\n \"\"\"Print a message the authorization has failed.\"\"\"\n click.secho('Authentication error.', fg=self.clr_error)\n click.secho(\n 'Update your credentials in ~/.gitsomeconfig or run:\\n gh configure'\n , fg=self.clr_message)\n\n def prompt_news_feed(self):\n \"\"\"Prompt the user to enter a news feed url.\"\"\"\n if click.confirm(\n \"\"\"No feed url detected.\n Calling gh events without an argument\n displays the logged in user's news feed.\nDo you want gitsome to track your news feed?\"\"\"\n , default=True):\n click.secho(\n \"\"\"Visit the following url while logged into GitHub:\n https://github.com\nEnter the url found under \"Subscribe to your news feed\".\"\"\"\n , fg=self.clr_message)\n self.user_feed = ''\n while not self.user_feed:\n self.user_feed = input('URL: ')\n\n def request_two_factor_code(self):\n \"\"\"Request two factor authentication code.\n\n Callback if two factor authentication is requested.\n\n :rtype: str\n :return: The user input two factor authentication code.\n \"\"\"\n code = ''\n while not code:\n code = input('Enter 2FA code: ')\n return code\n\n def save_config(self):\n \"\"\"Saves the config to ~/.gitsomeconfig.\"\"\"\n if self.check_auth():\n config = self.get_github_config_path(self.CONFIG)\n parser = configparser.RawConfigParser()\n parser.add_section(self.CONFIG_SECTION)\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_LOGIN, self.\n user_login)\n if self.user_token is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_TOKEN,\n self.user_token)\n if self.user_feed is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_FEED, self\n .user_feed)\n if self.enterprise_url is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_ENTERPRISE_URL,\n self.enterprise_url)\n if self.user_pass is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_PASS,\n self.user_pass)\n else:\n parser.remove_option(self.CONFIG_SECTION, self.CONFIG_USER_PASS\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_VERIFY_SSL, self.\n verify_ssl)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_PRIMARY, self.\n clr_primary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_SECONDARY, self\n .clr_secondary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TERTIARY, self.\n clr_tertiary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_QUATERNARY,\n self.clr_quaternary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_BOLD, self.clr_bold\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_CODE, self.clr_code\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_ERROR, self.\n clr_error)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_HEADER, self.\n clr_header)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_LINK, self.clr_link\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_LIST, self.clr_list\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_MESSAGE, self.\n clr_message)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_NUM_COMMENTS,\n self.clr_num_comments)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_NUM_POINTS,\n self.clr_num_points)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TAG, self.clr_tag)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TIME, self.clr_time\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TITLE, self.\n clr_title)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TOOLTIP, self.\n clr_tooltip)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_USER, self.clr_user\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_VIEW_LINK, self\n .clr_view_link)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_VIEW_INDEX,\n self.clr_view_index)\n with open(config, 'w+') as config_file:\n parser.write(config_file)\n\n def save_urls(self):\n \"\"\"Save the current set of urls to ~/.gitsomeconfigurl.\"\"\"\n config = self.get_github_config_path(self.CONFIG_URL)\n parser = configparser.RawConfigParser()\n try:\n parser.add_section(self.CONFIG_URL_SECTION)\n except configparser.DuplicateSectionError:\n pass\n parser.set(self.CONFIG_URL_SECTION, self.CONFIG_URL_LIST, self.urls)\n with open(config, 'w+') as config_file:\n parser.write(config_file)\n\n def show_bash_completions_info(self):\n \"\"\"Show info on how to enable bash completions\"\"\"\n click.secho(\n \"\"\"By default, gitsome looks at the following locations to enable bash completions:\n https://github.com/donnemartin/gitsome/blob/master/xonsh/environ.py#L123-L130\nIf bash completions are not working for you, check out the following link:\n https://github.com/donnemartin/gitsome#enabling-bash-completions\"\"\"\n , fg=self.clr_message)\n",
"<import token>\n\n\nclass Config(object):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n def _init_colors(self):\n \"\"\"Initialize colors to their defaults.\"\"\"\n self.clr_primary = None\n self.clr_secondary = 'green'\n self.clr_tertiary = 'cyan'\n self.clr_quaternary = 'yellow'\n self.clr_bold = 'cyan'\n self.clr_code = 'cyan'\n self.clr_error = 'red'\n self.clr_header = 'yellow'\n self.clr_link = 'green'\n self.clr_list = 'cyan'\n self.clr_message = None\n self.clr_num_comments = 'green'\n self.clr_num_points = 'green'\n self.clr_tag = 'cyan'\n self.clr_time = 'yellow'\n self.clr_title = None\n self.clr_tooltip = None\n self.clr_user = 'cyan'\n self.clr_view_link = 'magenta'\n self.clr_view_index = 'magenta'\n\n def authenticate_cached_credentials(self, config, parser,\n enterprise_auth=enterprise_login):\n \"\"\"Authenticate with the user's credentials in ~/.gitsomeconfig.\n\n :type config: str\n :param config: The config path.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser.\n \"\"\"\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n self.user_login = self.load_config(parser=parser, cfg_label=\n self.CONFIG_USER_LOGIN)\n self.user_pass = self.load_config(parser=parser, cfg_label=self\n .CONFIG_USER_PASS)\n self.user_token = self.load_config(parser=parser, cfg_label=\n self.CONFIG_USER_TOKEN)\n self.enterprise_url = self.load_config(parser=parser, cfg_label\n =self.CONFIG_ENTERPRISE_URL)\n self.verify_ssl = self.load_config(parser=parser, cfg_label=\n self.CONFIG_VERIFY_SSL, boolean_config=True)\n self.user_feed = self.load_config(parser=parser, cfg_label=self\n .CONFIG_USER_FEED)\n if not self.verify_ssl:\n requests.packages.urllib3.disable_warnings(\n InsecureRequestWarning)\n login_kwargs = {'username': self.user_login,\n 'two_factor_callback': self.request_two_factor_code}\n if self.enterprise_url is not None:\n self.login = enterprise_auth\n login_kwargs.update({'url': self.enterprise_url, 'verify':\n self.verify_ssl})\n if self.user_token is not None:\n login_kwargs.update({'token': self.user_token})\n elif self.user_pass is not None:\n login_kwargs.update({'password': self.user_pass})\n else:\n self.print_auth_error()\n return\n else:\n login_kwargs.update({'token': self.user_token})\n self.api = self.login(**login_kwargs)\n\n def authenticate(self, enterprise=False, enterprise_auth=\n enterprise_login, overwrite=False):\n \"\"\"Log into GitHub.\n\n Adapted from https://github.com/sigmavirus24/github-cli.\n\n :type enterprise: bool\n :param enterprise: Determines whether to configure GitHub Enterprise.\n Default: False.\n\n :type overwrite: bool\n :param overwrite: indicates whether we cant to overwrite the current\n set of credentials. Default: False.\n \"\"\"\n if self.api is not None and not overwrite:\n return\n config = self.get_github_config_path(self.CONFIG)\n parser = configparser.RawConfigParser()\n if os.path.isfile(config) and os.access(config, os.R_OK | os.W_OK\n ) and not overwrite:\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n self.authenticate_cached_credentials(config, parser)\n else:\n login_kwargs = {'two_factor_callback': self.request_two_factor_code\n }\n if enterprise:\n self.login = enterprise_auth\n while not self.enterprise_url:\n self.enterprise_url = input('Enterprise URL: ')\n if click.confirm('Do you want to verify SSL certs?',\n default=True):\n self.verify_ssl = True\n else:\n self.verify_ssl = False\n login_kwargs.update({'url': self.enterprise_url, 'verify':\n self.verify_ssl})\n while not self.user_login:\n self.user_login = input('User Login: ')\n login_kwargs.update({'username': self.user_login})\n if click.confirm(\n 'Do you want to log in with a password [Y] or a personal access token [n]?'\n , default=True):\n user_pass = None\n while not user_pass:\n user_pass = self.getpass('Password: ')\n login_kwargs.update({'password': user_pass})\n try:\n if not enterprise:\n auth = self.authorize(self.user_login, user_pass,\n scopes=['user', 'repo'], note='gitsome',\n note_url=\n 'https://github.com/donnemartin/gitsome',\n two_factor_callback=self.request_two_factor_code)\n self.user_token = auth.token\n else:\n self.user_pass = user_pass\n except (UnprocessableEntity, AuthenticationFailed):\n click.secho('Error creating token.', fg=self.clr_error)\n click.secho(\n \"\"\"Visit the following page and verify you do not have an existing token named \"gitsome\":\n https://github.com/settings/tokens\nIf a token already exists, update your ~/.gitsomeconfig file with your token:\n user_token = TOKEN\nYou can also generate a new token.\"\"\"\n , fg=self.clr_message)\n self.print_auth_error()\n return\n else:\n while not self.user_token:\n self.user_token = input('Token: ')\n login_kwargs.update({'token': self.user_token})\n self.api = self.login(**login_kwargs)\n if self.user_feed:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_FEED, self\n .user_feed)\n\n def check_auth(self):\n \"\"\"Check if the current authorization is valid.\n\n This method uses the ratelimit_remaining api to check whether\n the currently authenticated user's credentials are valid without\n deducting from the rate limit. The ratelimit_remaining api does not\n seem to be available for GitHub Enterprise.\n\n github3.py's method check_authorization seems to only work given\n an authorization created by a registered application.\n\n TODO: Determine a better way to check the authorization for\n GitHub Enterprise.\n\n :type enterprise: bool\n :param enterprise: Determines whether we are authenticating with\n GitHub Enterprise.\n \"\"\"\n if self.enterprise_url is not None:\n return True\n try:\n if self.api is not None:\n self.api.ratelimit_remaining\n return True\n else:\n self.print_auth_error()\n except AuthenticationFailed:\n self.print_auth_error()\n return False\n\n def get_github_config_path(self, config_file_name):\n \"\"\"Attempt to find the github config file.\n\n Adapted from https://github.com/sigmavirus24/github-cli.\n\n :type config_file_name: str\n :param config_file_name: The config file name.\n\n :rtype: str\n :return: The github config file path.\n \"\"\"\n home = os.path.abspath(os.environ.get('HOME', ''))\n config_file_path = os.path.join(home, config_file_name)\n return config_file_path\n <function token>\n <function token>\n\n def load_config_colors(self, parser):\n \"\"\"Load the color config from ~/.gitsomeconfig.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser`.\n \"\"\"\n self.load_colors(parser)\n\n def load_colors(self, parser):\n \"\"\"Load all colors from ~/.gitsomeconfig.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser`.\n \"\"\"\n self.clr_primary = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_PRIMARY, default=self.clr_primary, color_config=True)\n self.clr_secondary = self.load_config(parser=parser, cfg_label=self\n .CONFIG_CLR_SECONDARY, default=self.clr_secondary, color_config\n =True)\n self.clr_tertiary = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TERTIARY, default=self.clr_tertiary, color_config=True)\n self.clr_quaternary = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_QUATERNARY, default=self.clr_quaternary,\n color_config=True)\n self.clr_bold = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_BOLD, default=self.clr_bold, color_config=True)\n self.clr_code = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_CODE, default=self.clr_code, color_config=True)\n self.clr_code = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_ERROR, default=self.clr_code, color_config=True)\n self.clr_header = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_HEADER, default=self.clr_header, color_config=True)\n self.clr_link = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_LINK, default=self.clr_link, color_config=True)\n self.clr_list = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_LIST, default=self.clr_list, color_config=True)\n self.clr_message = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_MESSAGE, default=self.clr_message, color_config=True)\n self.clr_num_comments = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_NUM_COMMENTS, default=self.clr_num_comments,\n color_config=True)\n self.clr_num_points = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_NUM_POINTS, default=self.clr_num_points,\n color_config=True)\n self.clr_tag = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TAG, default=self.clr_tag, color_config=True)\n self.clr_time = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TIME, default=self.clr_time, color_config=True)\n self.clr_title = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TITLE, default=self.clr_title, color_config=True)\n self.clr_tooltip = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TOOLTIP, default=self.clr_tooltip, color_config=True)\n self.clr_user = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_USER, default=self.clr_user, color_config=True)\n self.clr_view_link = self.load_config(parser=parser, cfg_label=self\n .CONFIG_CLR_VIEW_LINK, default=self.clr_view_link, color_config\n =True)\n self.clr_view_index = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_VIEW_INDEX, default=self.clr_view_index,\n color_config=True)\n\n def load_urls(self, view_in_browser):\n \"\"\"Load the current set of urls from ~/.gitsomeconfigurl.\n\n :type view_in_browser: bool\n :param view_in_browser: Determines whether to view the urls in a\n browser.\n\n :rtype: list\n :return: Collection of urls.\n \"\"\"\n config = self.get_github_config_path(self.CONFIG_URL)\n parser = configparser.RawConfigParser()\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n urls = parser.get(self.CONFIG_URL_SECTION, self.CONFIG_URL_LIST)\n urls = urls.strip()\n excludes = ['[', ']', \"'\"]\n for exclude in excludes:\n urls = urls.replace(exclude, '')\n if not view_in_browser:\n urls = urls.replace('https://github.com/', '')\n return urls.split(', ')\n\n def print_auth_error(self):\n \"\"\"Print a message the authorization has failed.\"\"\"\n click.secho('Authentication error.', fg=self.clr_error)\n click.secho(\n 'Update your credentials in ~/.gitsomeconfig or run:\\n gh configure'\n , fg=self.clr_message)\n\n def prompt_news_feed(self):\n \"\"\"Prompt the user to enter a news feed url.\"\"\"\n if click.confirm(\n \"\"\"No feed url detected.\n Calling gh events without an argument\n displays the logged in user's news feed.\nDo you want gitsome to track your news feed?\"\"\"\n , default=True):\n click.secho(\n \"\"\"Visit the following url while logged into GitHub:\n https://github.com\nEnter the url found under \"Subscribe to your news feed\".\"\"\"\n , fg=self.clr_message)\n self.user_feed = ''\n while not self.user_feed:\n self.user_feed = input('URL: ')\n\n def request_two_factor_code(self):\n \"\"\"Request two factor authentication code.\n\n Callback if two factor authentication is requested.\n\n :rtype: str\n :return: The user input two factor authentication code.\n \"\"\"\n code = ''\n while not code:\n code = input('Enter 2FA code: ')\n return code\n\n def save_config(self):\n \"\"\"Saves the config to ~/.gitsomeconfig.\"\"\"\n if self.check_auth():\n config = self.get_github_config_path(self.CONFIG)\n parser = configparser.RawConfigParser()\n parser.add_section(self.CONFIG_SECTION)\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_LOGIN, self.\n user_login)\n if self.user_token is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_TOKEN,\n self.user_token)\n if self.user_feed is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_FEED, self\n .user_feed)\n if self.enterprise_url is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_ENTERPRISE_URL,\n self.enterprise_url)\n if self.user_pass is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_PASS,\n self.user_pass)\n else:\n parser.remove_option(self.CONFIG_SECTION, self.CONFIG_USER_PASS\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_VERIFY_SSL, self.\n verify_ssl)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_PRIMARY, self.\n clr_primary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_SECONDARY, self\n .clr_secondary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TERTIARY, self.\n clr_tertiary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_QUATERNARY,\n self.clr_quaternary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_BOLD, self.clr_bold\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_CODE, self.clr_code\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_ERROR, self.\n clr_error)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_HEADER, self.\n clr_header)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_LINK, self.clr_link\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_LIST, self.clr_list\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_MESSAGE, self.\n clr_message)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_NUM_COMMENTS,\n self.clr_num_comments)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_NUM_POINTS,\n self.clr_num_points)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TAG, self.clr_tag)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TIME, self.clr_time\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TITLE, self.\n clr_title)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TOOLTIP, self.\n clr_tooltip)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_USER, self.clr_user\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_VIEW_LINK, self\n .clr_view_link)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_VIEW_INDEX,\n self.clr_view_index)\n with open(config, 'w+') as config_file:\n parser.write(config_file)\n\n def save_urls(self):\n \"\"\"Save the current set of urls to ~/.gitsomeconfigurl.\"\"\"\n config = self.get_github_config_path(self.CONFIG_URL)\n parser = configparser.RawConfigParser()\n try:\n parser.add_section(self.CONFIG_URL_SECTION)\n except configparser.DuplicateSectionError:\n pass\n parser.set(self.CONFIG_URL_SECTION, self.CONFIG_URL_LIST, self.urls)\n with open(config, 'w+') as config_file:\n parser.write(config_file)\n\n def show_bash_completions_info(self):\n \"\"\"Show info on how to enable bash completions\"\"\"\n click.secho(\n \"\"\"By default, gitsome looks at the following locations to enable bash completions:\n https://github.com/donnemartin/gitsome/blob/master/xonsh/environ.py#L123-L130\nIf bash completions are not working for you, check out the following link:\n https://github.com/donnemartin/gitsome#enabling-bash-completions\"\"\"\n , fg=self.clr_message)\n",
"<import token>\n\n\nclass Config(object):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n def _init_colors(self):\n \"\"\"Initialize colors to their defaults.\"\"\"\n self.clr_primary = None\n self.clr_secondary = 'green'\n self.clr_tertiary = 'cyan'\n self.clr_quaternary = 'yellow'\n self.clr_bold = 'cyan'\n self.clr_code = 'cyan'\n self.clr_error = 'red'\n self.clr_header = 'yellow'\n self.clr_link = 'green'\n self.clr_list = 'cyan'\n self.clr_message = None\n self.clr_num_comments = 'green'\n self.clr_num_points = 'green'\n self.clr_tag = 'cyan'\n self.clr_time = 'yellow'\n self.clr_title = None\n self.clr_tooltip = None\n self.clr_user = 'cyan'\n self.clr_view_link = 'magenta'\n self.clr_view_index = 'magenta'\n\n def authenticate_cached_credentials(self, config, parser,\n enterprise_auth=enterprise_login):\n \"\"\"Authenticate with the user's credentials in ~/.gitsomeconfig.\n\n :type config: str\n :param config: The config path.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser.\n \"\"\"\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n self.user_login = self.load_config(parser=parser, cfg_label=\n self.CONFIG_USER_LOGIN)\n self.user_pass = self.load_config(parser=parser, cfg_label=self\n .CONFIG_USER_PASS)\n self.user_token = self.load_config(parser=parser, cfg_label=\n self.CONFIG_USER_TOKEN)\n self.enterprise_url = self.load_config(parser=parser, cfg_label\n =self.CONFIG_ENTERPRISE_URL)\n self.verify_ssl = self.load_config(parser=parser, cfg_label=\n self.CONFIG_VERIFY_SSL, boolean_config=True)\n self.user_feed = self.load_config(parser=parser, cfg_label=self\n .CONFIG_USER_FEED)\n if not self.verify_ssl:\n requests.packages.urllib3.disable_warnings(\n InsecureRequestWarning)\n login_kwargs = {'username': self.user_login,\n 'two_factor_callback': self.request_two_factor_code}\n if self.enterprise_url is not None:\n self.login = enterprise_auth\n login_kwargs.update({'url': self.enterprise_url, 'verify':\n self.verify_ssl})\n if self.user_token is not None:\n login_kwargs.update({'token': self.user_token})\n elif self.user_pass is not None:\n login_kwargs.update({'password': self.user_pass})\n else:\n self.print_auth_error()\n return\n else:\n login_kwargs.update({'token': self.user_token})\n self.api = self.login(**login_kwargs)\n\n def authenticate(self, enterprise=False, enterprise_auth=\n enterprise_login, overwrite=False):\n \"\"\"Log into GitHub.\n\n Adapted from https://github.com/sigmavirus24/github-cli.\n\n :type enterprise: bool\n :param enterprise: Determines whether to configure GitHub Enterprise.\n Default: False.\n\n :type overwrite: bool\n :param overwrite: indicates whether we cant to overwrite the current\n set of credentials. Default: False.\n \"\"\"\n if self.api is not None and not overwrite:\n return\n config = self.get_github_config_path(self.CONFIG)\n parser = configparser.RawConfigParser()\n if os.path.isfile(config) and os.access(config, os.R_OK | os.W_OK\n ) and not overwrite:\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n self.authenticate_cached_credentials(config, parser)\n else:\n login_kwargs = {'two_factor_callback': self.request_two_factor_code\n }\n if enterprise:\n self.login = enterprise_auth\n while not self.enterprise_url:\n self.enterprise_url = input('Enterprise URL: ')\n if click.confirm('Do you want to verify SSL certs?',\n default=True):\n self.verify_ssl = True\n else:\n self.verify_ssl = False\n login_kwargs.update({'url': self.enterprise_url, 'verify':\n self.verify_ssl})\n while not self.user_login:\n self.user_login = input('User Login: ')\n login_kwargs.update({'username': self.user_login})\n if click.confirm(\n 'Do you want to log in with a password [Y] or a personal access token [n]?'\n , default=True):\n user_pass = None\n while not user_pass:\n user_pass = self.getpass('Password: ')\n login_kwargs.update({'password': user_pass})\n try:\n if not enterprise:\n auth = self.authorize(self.user_login, user_pass,\n scopes=['user', 'repo'], note='gitsome',\n note_url=\n 'https://github.com/donnemartin/gitsome',\n two_factor_callback=self.request_two_factor_code)\n self.user_token = auth.token\n else:\n self.user_pass = user_pass\n except (UnprocessableEntity, AuthenticationFailed):\n click.secho('Error creating token.', fg=self.clr_error)\n click.secho(\n \"\"\"Visit the following page and verify you do not have an existing token named \"gitsome\":\n https://github.com/settings/tokens\nIf a token already exists, update your ~/.gitsomeconfig file with your token:\n user_token = TOKEN\nYou can also generate a new token.\"\"\"\n , fg=self.clr_message)\n self.print_auth_error()\n return\n else:\n while not self.user_token:\n self.user_token = input('Token: ')\n login_kwargs.update({'token': self.user_token})\n self.api = self.login(**login_kwargs)\n if self.user_feed:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_FEED, self\n .user_feed)\n <function token>\n\n def get_github_config_path(self, config_file_name):\n \"\"\"Attempt to find the github config file.\n\n Adapted from https://github.com/sigmavirus24/github-cli.\n\n :type config_file_name: str\n :param config_file_name: The config file name.\n\n :rtype: str\n :return: The github config file path.\n \"\"\"\n home = os.path.abspath(os.environ.get('HOME', ''))\n config_file_path = os.path.join(home, config_file_name)\n return config_file_path\n <function token>\n <function token>\n\n def load_config_colors(self, parser):\n \"\"\"Load the color config from ~/.gitsomeconfig.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser`.\n \"\"\"\n self.load_colors(parser)\n\n def load_colors(self, parser):\n \"\"\"Load all colors from ~/.gitsomeconfig.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser`.\n \"\"\"\n self.clr_primary = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_PRIMARY, default=self.clr_primary, color_config=True)\n self.clr_secondary = self.load_config(parser=parser, cfg_label=self\n .CONFIG_CLR_SECONDARY, default=self.clr_secondary, color_config\n =True)\n self.clr_tertiary = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TERTIARY, default=self.clr_tertiary, color_config=True)\n self.clr_quaternary = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_QUATERNARY, default=self.clr_quaternary,\n color_config=True)\n self.clr_bold = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_BOLD, default=self.clr_bold, color_config=True)\n self.clr_code = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_CODE, default=self.clr_code, color_config=True)\n self.clr_code = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_ERROR, default=self.clr_code, color_config=True)\n self.clr_header = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_HEADER, default=self.clr_header, color_config=True)\n self.clr_link = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_LINK, default=self.clr_link, color_config=True)\n self.clr_list = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_LIST, default=self.clr_list, color_config=True)\n self.clr_message = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_MESSAGE, default=self.clr_message, color_config=True)\n self.clr_num_comments = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_NUM_COMMENTS, default=self.clr_num_comments,\n color_config=True)\n self.clr_num_points = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_NUM_POINTS, default=self.clr_num_points,\n color_config=True)\n self.clr_tag = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TAG, default=self.clr_tag, color_config=True)\n self.clr_time = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TIME, default=self.clr_time, color_config=True)\n self.clr_title = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TITLE, default=self.clr_title, color_config=True)\n self.clr_tooltip = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TOOLTIP, default=self.clr_tooltip, color_config=True)\n self.clr_user = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_USER, default=self.clr_user, color_config=True)\n self.clr_view_link = self.load_config(parser=parser, cfg_label=self\n .CONFIG_CLR_VIEW_LINK, default=self.clr_view_link, color_config\n =True)\n self.clr_view_index = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_VIEW_INDEX, default=self.clr_view_index,\n color_config=True)\n\n def load_urls(self, view_in_browser):\n \"\"\"Load the current set of urls from ~/.gitsomeconfigurl.\n\n :type view_in_browser: bool\n :param view_in_browser: Determines whether to view the urls in a\n browser.\n\n :rtype: list\n :return: Collection of urls.\n \"\"\"\n config = self.get_github_config_path(self.CONFIG_URL)\n parser = configparser.RawConfigParser()\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n urls = parser.get(self.CONFIG_URL_SECTION, self.CONFIG_URL_LIST)\n urls = urls.strip()\n excludes = ['[', ']', \"'\"]\n for exclude in excludes:\n urls = urls.replace(exclude, '')\n if not view_in_browser:\n urls = urls.replace('https://github.com/', '')\n return urls.split(', ')\n\n def print_auth_error(self):\n \"\"\"Print a message the authorization has failed.\"\"\"\n click.secho('Authentication error.', fg=self.clr_error)\n click.secho(\n 'Update your credentials in ~/.gitsomeconfig or run:\\n gh configure'\n , fg=self.clr_message)\n\n def prompt_news_feed(self):\n \"\"\"Prompt the user to enter a news feed url.\"\"\"\n if click.confirm(\n \"\"\"No feed url detected.\n Calling gh events without an argument\n displays the logged in user's news feed.\nDo you want gitsome to track your news feed?\"\"\"\n , default=True):\n click.secho(\n \"\"\"Visit the following url while logged into GitHub:\n https://github.com\nEnter the url found under \"Subscribe to your news feed\".\"\"\"\n , fg=self.clr_message)\n self.user_feed = ''\n while not self.user_feed:\n self.user_feed = input('URL: ')\n\n def request_two_factor_code(self):\n \"\"\"Request two factor authentication code.\n\n Callback if two factor authentication is requested.\n\n :rtype: str\n :return: The user input two factor authentication code.\n \"\"\"\n code = ''\n while not code:\n code = input('Enter 2FA code: ')\n return code\n\n def save_config(self):\n \"\"\"Saves the config to ~/.gitsomeconfig.\"\"\"\n if self.check_auth():\n config = self.get_github_config_path(self.CONFIG)\n parser = configparser.RawConfigParser()\n parser.add_section(self.CONFIG_SECTION)\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_LOGIN, self.\n user_login)\n if self.user_token is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_TOKEN,\n self.user_token)\n if self.user_feed is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_FEED, self\n .user_feed)\n if self.enterprise_url is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_ENTERPRISE_URL,\n self.enterprise_url)\n if self.user_pass is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_PASS,\n self.user_pass)\n else:\n parser.remove_option(self.CONFIG_SECTION, self.CONFIG_USER_PASS\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_VERIFY_SSL, self.\n verify_ssl)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_PRIMARY, self.\n clr_primary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_SECONDARY, self\n .clr_secondary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TERTIARY, self.\n clr_tertiary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_QUATERNARY,\n self.clr_quaternary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_BOLD, self.clr_bold\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_CODE, self.clr_code\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_ERROR, self.\n clr_error)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_HEADER, self.\n clr_header)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_LINK, self.clr_link\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_LIST, self.clr_list\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_MESSAGE, self.\n clr_message)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_NUM_COMMENTS,\n self.clr_num_comments)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_NUM_POINTS,\n self.clr_num_points)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TAG, self.clr_tag)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TIME, self.clr_time\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TITLE, self.\n clr_title)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TOOLTIP, self.\n clr_tooltip)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_USER, self.clr_user\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_VIEW_LINK, self\n .clr_view_link)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_VIEW_INDEX,\n self.clr_view_index)\n with open(config, 'w+') as config_file:\n parser.write(config_file)\n\n def save_urls(self):\n \"\"\"Save the current set of urls to ~/.gitsomeconfigurl.\"\"\"\n config = self.get_github_config_path(self.CONFIG_URL)\n parser = configparser.RawConfigParser()\n try:\n parser.add_section(self.CONFIG_URL_SECTION)\n except configparser.DuplicateSectionError:\n pass\n parser.set(self.CONFIG_URL_SECTION, self.CONFIG_URL_LIST, self.urls)\n with open(config, 'w+') as config_file:\n parser.write(config_file)\n\n def show_bash_completions_info(self):\n \"\"\"Show info on how to enable bash completions\"\"\"\n click.secho(\n \"\"\"By default, gitsome looks at the following locations to enable bash completions:\n https://github.com/donnemartin/gitsome/blob/master/xonsh/environ.py#L123-L130\nIf bash completions are not working for you, check out the following link:\n https://github.com/donnemartin/gitsome#enabling-bash-completions\"\"\"\n , fg=self.clr_message)\n",
"<import token>\n\n\nclass Config(object):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n def _init_colors(self):\n \"\"\"Initialize colors to their defaults.\"\"\"\n self.clr_primary = None\n self.clr_secondary = 'green'\n self.clr_tertiary = 'cyan'\n self.clr_quaternary = 'yellow'\n self.clr_bold = 'cyan'\n self.clr_code = 'cyan'\n self.clr_error = 'red'\n self.clr_header = 'yellow'\n self.clr_link = 'green'\n self.clr_list = 'cyan'\n self.clr_message = None\n self.clr_num_comments = 'green'\n self.clr_num_points = 'green'\n self.clr_tag = 'cyan'\n self.clr_time = 'yellow'\n self.clr_title = None\n self.clr_tooltip = None\n self.clr_user = 'cyan'\n self.clr_view_link = 'magenta'\n self.clr_view_index = 'magenta'\n\n def authenticate_cached_credentials(self, config, parser,\n enterprise_auth=enterprise_login):\n \"\"\"Authenticate with the user's credentials in ~/.gitsomeconfig.\n\n :type config: str\n :param config: The config path.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser.\n \"\"\"\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n self.user_login = self.load_config(parser=parser, cfg_label=\n self.CONFIG_USER_LOGIN)\n self.user_pass = self.load_config(parser=parser, cfg_label=self\n .CONFIG_USER_PASS)\n self.user_token = self.load_config(parser=parser, cfg_label=\n self.CONFIG_USER_TOKEN)\n self.enterprise_url = self.load_config(parser=parser, cfg_label\n =self.CONFIG_ENTERPRISE_URL)\n self.verify_ssl = self.load_config(parser=parser, cfg_label=\n self.CONFIG_VERIFY_SSL, boolean_config=True)\n self.user_feed = self.load_config(parser=parser, cfg_label=self\n .CONFIG_USER_FEED)\n if not self.verify_ssl:\n requests.packages.urllib3.disable_warnings(\n InsecureRequestWarning)\n login_kwargs = {'username': self.user_login,\n 'two_factor_callback': self.request_two_factor_code}\n if self.enterprise_url is not None:\n self.login = enterprise_auth\n login_kwargs.update({'url': self.enterprise_url, 'verify':\n self.verify_ssl})\n if self.user_token is not None:\n login_kwargs.update({'token': self.user_token})\n elif self.user_pass is not None:\n login_kwargs.update({'password': self.user_pass})\n else:\n self.print_auth_error()\n return\n else:\n login_kwargs.update({'token': self.user_token})\n self.api = self.login(**login_kwargs)\n\n def authenticate(self, enterprise=False, enterprise_auth=\n enterprise_login, overwrite=False):\n \"\"\"Log into GitHub.\n\n Adapted from https://github.com/sigmavirus24/github-cli.\n\n :type enterprise: bool\n :param enterprise: Determines whether to configure GitHub Enterprise.\n Default: False.\n\n :type overwrite: bool\n :param overwrite: indicates whether we cant to overwrite the current\n set of credentials. Default: False.\n \"\"\"\n if self.api is not None and not overwrite:\n return\n config = self.get_github_config_path(self.CONFIG)\n parser = configparser.RawConfigParser()\n if os.path.isfile(config) and os.access(config, os.R_OK | os.W_OK\n ) and not overwrite:\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n self.authenticate_cached_credentials(config, parser)\n else:\n login_kwargs = {'two_factor_callback': self.request_two_factor_code\n }\n if enterprise:\n self.login = enterprise_auth\n while not self.enterprise_url:\n self.enterprise_url = input('Enterprise URL: ')\n if click.confirm('Do you want to verify SSL certs?',\n default=True):\n self.verify_ssl = True\n else:\n self.verify_ssl = False\n login_kwargs.update({'url': self.enterprise_url, 'verify':\n self.verify_ssl})\n while not self.user_login:\n self.user_login = input('User Login: ')\n login_kwargs.update({'username': self.user_login})\n if click.confirm(\n 'Do you want to log in with a password [Y] or a personal access token [n]?'\n , default=True):\n user_pass = None\n while not user_pass:\n user_pass = self.getpass('Password: ')\n login_kwargs.update({'password': user_pass})\n try:\n if not enterprise:\n auth = self.authorize(self.user_login, user_pass,\n scopes=['user', 'repo'], note='gitsome',\n note_url=\n 'https://github.com/donnemartin/gitsome',\n two_factor_callback=self.request_two_factor_code)\n self.user_token = auth.token\n else:\n self.user_pass = user_pass\n except (UnprocessableEntity, AuthenticationFailed):\n click.secho('Error creating token.', fg=self.clr_error)\n click.secho(\n \"\"\"Visit the following page and verify you do not have an existing token named \"gitsome\":\n https://github.com/settings/tokens\nIf a token already exists, update your ~/.gitsomeconfig file with your token:\n user_token = TOKEN\nYou can also generate a new token.\"\"\"\n , fg=self.clr_message)\n self.print_auth_error()\n return\n else:\n while not self.user_token:\n self.user_token = input('Token: ')\n login_kwargs.update({'token': self.user_token})\n self.api = self.login(**login_kwargs)\n if self.user_feed:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_FEED, self\n .user_feed)\n <function token>\n\n def get_github_config_path(self, config_file_name):\n \"\"\"Attempt to find the github config file.\n\n Adapted from https://github.com/sigmavirus24/github-cli.\n\n :type config_file_name: str\n :param config_file_name: The config file name.\n\n :rtype: str\n :return: The github config file path.\n \"\"\"\n home = os.path.abspath(os.environ.get('HOME', ''))\n config_file_path = os.path.join(home, config_file_name)\n return config_file_path\n <function token>\n <function token>\n\n def load_config_colors(self, parser):\n \"\"\"Load the color config from ~/.gitsomeconfig.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser`.\n \"\"\"\n self.load_colors(parser)\n\n def load_colors(self, parser):\n \"\"\"Load all colors from ~/.gitsomeconfig.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser`.\n \"\"\"\n self.clr_primary = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_PRIMARY, default=self.clr_primary, color_config=True)\n self.clr_secondary = self.load_config(parser=parser, cfg_label=self\n .CONFIG_CLR_SECONDARY, default=self.clr_secondary, color_config\n =True)\n self.clr_tertiary = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TERTIARY, default=self.clr_tertiary, color_config=True)\n self.clr_quaternary = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_QUATERNARY, default=self.clr_quaternary,\n color_config=True)\n self.clr_bold = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_BOLD, default=self.clr_bold, color_config=True)\n self.clr_code = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_CODE, default=self.clr_code, color_config=True)\n self.clr_code = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_ERROR, default=self.clr_code, color_config=True)\n self.clr_header = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_HEADER, default=self.clr_header, color_config=True)\n self.clr_link = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_LINK, default=self.clr_link, color_config=True)\n self.clr_list = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_LIST, default=self.clr_list, color_config=True)\n self.clr_message = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_MESSAGE, default=self.clr_message, color_config=True)\n self.clr_num_comments = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_NUM_COMMENTS, default=self.clr_num_comments,\n color_config=True)\n self.clr_num_points = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_NUM_POINTS, default=self.clr_num_points,\n color_config=True)\n self.clr_tag = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TAG, default=self.clr_tag, color_config=True)\n self.clr_time = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TIME, default=self.clr_time, color_config=True)\n self.clr_title = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TITLE, default=self.clr_title, color_config=True)\n self.clr_tooltip = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TOOLTIP, default=self.clr_tooltip, color_config=True)\n self.clr_user = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_USER, default=self.clr_user, color_config=True)\n self.clr_view_link = self.load_config(parser=parser, cfg_label=self\n .CONFIG_CLR_VIEW_LINK, default=self.clr_view_link, color_config\n =True)\n self.clr_view_index = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_VIEW_INDEX, default=self.clr_view_index,\n color_config=True)\n\n def load_urls(self, view_in_browser):\n \"\"\"Load the current set of urls from ~/.gitsomeconfigurl.\n\n :type view_in_browser: bool\n :param view_in_browser: Determines whether to view the urls in a\n browser.\n\n :rtype: list\n :return: Collection of urls.\n \"\"\"\n config = self.get_github_config_path(self.CONFIG_URL)\n parser = configparser.RawConfigParser()\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n urls = parser.get(self.CONFIG_URL_SECTION, self.CONFIG_URL_LIST)\n urls = urls.strip()\n excludes = ['[', ']', \"'\"]\n for exclude in excludes:\n urls = urls.replace(exclude, '')\n if not view_in_browser:\n urls = urls.replace('https://github.com/', '')\n return urls.split(', ')\n\n def print_auth_error(self):\n \"\"\"Print a message the authorization has failed.\"\"\"\n click.secho('Authentication error.', fg=self.clr_error)\n click.secho(\n 'Update your credentials in ~/.gitsomeconfig or run:\\n gh configure'\n , fg=self.clr_message)\n\n def prompt_news_feed(self):\n \"\"\"Prompt the user to enter a news feed url.\"\"\"\n if click.confirm(\n \"\"\"No feed url detected.\n Calling gh events without an argument\n displays the logged in user's news feed.\nDo you want gitsome to track your news feed?\"\"\"\n , default=True):\n click.secho(\n \"\"\"Visit the following url while logged into GitHub:\n https://github.com\nEnter the url found under \"Subscribe to your news feed\".\"\"\"\n , fg=self.clr_message)\n self.user_feed = ''\n while not self.user_feed:\n self.user_feed = input('URL: ')\n <function token>\n\n def save_config(self):\n \"\"\"Saves the config to ~/.gitsomeconfig.\"\"\"\n if self.check_auth():\n config = self.get_github_config_path(self.CONFIG)\n parser = configparser.RawConfigParser()\n parser.add_section(self.CONFIG_SECTION)\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_LOGIN, self.\n user_login)\n if self.user_token is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_TOKEN,\n self.user_token)\n if self.user_feed is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_FEED, self\n .user_feed)\n if self.enterprise_url is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_ENTERPRISE_URL,\n self.enterprise_url)\n if self.user_pass is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_PASS,\n self.user_pass)\n else:\n parser.remove_option(self.CONFIG_SECTION, self.CONFIG_USER_PASS\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_VERIFY_SSL, self.\n verify_ssl)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_PRIMARY, self.\n clr_primary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_SECONDARY, self\n .clr_secondary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TERTIARY, self.\n clr_tertiary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_QUATERNARY,\n self.clr_quaternary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_BOLD, self.clr_bold\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_CODE, self.clr_code\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_ERROR, self.\n clr_error)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_HEADER, self.\n clr_header)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_LINK, self.clr_link\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_LIST, self.clr_list\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_MESSAGE, self.\n clr_message)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_NUM_COMMENTS,\n self.clr_num_comments)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_NUM_POINTS,\n self.clr_num_points)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TAG, self.clr_tag)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TIME, self.clr_time\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TITLE, self.\n clr_title)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TOOLTIP, self.\n clr_tooltip)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_USER, self.clr_user\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_VIEW_LINK, self\n .clr_view_link)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_VIEW_INDEX,\n self.clr_view_index)\n with open(config, 'w+') as config_file:\n parser.write(config_file)\n\n def save_urls(self):\n \"\"\"Save the current set of urls to ~/.gitsomeconfigurl.\"\"\"\n config = self.get_github_config_path(self.CONFIG_URL)\n parser = configparser.RawConfigParser()\n try:\n parser.add_section(self.CONFIG_URL_SECTION)\n except configparser.DuplicateSectionError:\n pass\n parser.set(self.CONFIG_URL_SECTION, self.CONFIG_URL_LIST, self.urls)\n with open(config, 'w+') as config_file:\n parser.write(config_file)\n\n def show_bash_completions_info(self):\n \"\"\"Show info on how to enable bash completions\"\"\"\n click.secho(\n \"\"\"By default, gitsome looks at the following locations to enable bash completions:\n https://github.com/donnemartin/gitsome/blob/master/xonsh/environ.py#L123-L130\nIf bash completions are not working for you, check out the following link:\n https://github.com/donnemartin/gitsome#enabling-bash-completions\"\"\"\n , fg=self.clr_message)\n",
"<import token>\n\n\nclass Config(object):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n def _init_colors(self):\n \"\"\"Initialize colors to their defaults.\"\"\"\n self.clr_primary = None\n self.clr_secondary = 'green'\n self.clr_tertiary = 'cyan'\n self.clr_quaternary = 'yellow'\n self.clr_bold = 'cyan'\n self.clr_code = 'cyan'\n self.clr_error = 'red'\n self.clr_header = 'yellow'\n self.clr_link = 'green'\n self.clr_list = 'cyan'\n self.clr_message = None\n self.clr_num_comments = 'green'\n self.clr_num_points = 'green'\n self.clr_tag = 'cyan'\n self.clr_time = 'yellow'\n self.clr_title = None\n self.clr_tooltip = None\n self.clr_user = 'cyan'\n self.clr_view_link = 'magenta'\n self.clr_view_index = 'magenta'\n <function token>\n\n def authenticate(self, enterprise=False, enterprise_auth=\n enterprise_login, overwrite=False):\n \"\"\"Log into GitHub.\n\n Adapted from https://github.com/sigmavirus24/github-cli.\n\n :type enterprise: bool\n :param enterprise: Determines whether to configure GitHub Enterprise.\n Default: False.\n\n :type overwrite: bool\n :param overwrite: indicates whether we cant to overwrite the current\n set of credentials. Default: False.\n \"\"\"\n if self.api is not None and not overwrite:\n return\n config = self.get_github_config_path(self.CONFIG)\n parser = configparser.RawConfigParser()\n if os.path.isfile(config) and os.access(config, os.R_OK | os.W_OK\n ) and not overwrite:\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n self.authenticate_cached_credentials(config, parser)\n else:\n login_kwargs = {'two_factor_callback': self.request_two_factor_code\n }\n if enterprise:\n self.login = enterprise_auth\n while not self.enterprise_url:\n self.enterprise_url = input('Enterprise URL: ')\n if click.confirm('Do you want to verify SSL certs?',\n default=True):\n self.verify_ssl = True\n else:\n self.verify_ssl = False\n login_kwargs.update({'url': self.enterprise_url, 'verify':\n self.verify_ssl})\n while not self.user_login:\n self.user_login = input('User Login: ')\n login_kwargs.update({'username': self.user_login})\n if click.confirm(\n 'Do you want to log in with a password [Y] or a personal access token [n]?'\n , default=True):\n user_pass = None\n while not user_pass:\n user_pass = self.getpass('Password: ')\n login_kwargs.update({'password': user_pass})\n try:\n if not enterprise:\n auth = self.authorize(self.user_login, user_pass,\n scopes=['user', 'repo'], note='gitsome',\n note_url=\n 'https://github.com/donnemartin/gitsome',\n two_factor_callback=self.request_two_factor_code)\n self.user_token = auth.token\n else:\n self.user_pass = user_pass\n except (UnprocessableEntity, AuthenticationFailed):\n click.secho('Error creating token.', fg=self.clr_error)\n click.secho(\n \"\"\"Visit the following page and verify you do not have an existing token named \"gitsome\":\n https://github.com/settings/tokens\nIf a token already exists, update your ~/.gitsomeconfig file with your token:\n user_token = TOKEN\nYou can also generate a new token.\"\"\"\n , fg=self.clr_message)\n self.print_auth_error()\n return\n else:\n while not self.user_token:\n self.user_token = input('Token: ')\n login_kwargs.update({'token': self.user_token})\n self.api = self.login(**login_kwargs)\n if self.user_feed:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_FEED, self\n .user_feed)\n <function token>\n\n def get_github_config_path(self, config_file_name):\n \"\"\"Attempt to find the github config file.\n\n Adapted from https://github.com/sigmavirus24/github-cli.\n\n :type config_file_name: str\n :param config_file_name: The config file name.\n\n :rtype: str\n :return: The github config file path.\n \"\"\"\n home = os.path.abspath(os.environ.get('HOME', ''))\n config_file_path = os.path.join(home, config_file_name)\n return config_file_path\n <function token>\n <function token>\n\n def load_config_colors(self, parser):\n \"\"\"Load the color config from ~/.gitsomeconfig.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser`.\n \"\"\"\n self.load_colors(parser)\n\n def load_colors(self, parser):\n \"\"\"Load all colors from ~/.gitsomeconfig.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser`.\n \"\"\"\n self.clr_primary = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_PRIMARY, default=self.clr_primary, color_config=True)\n self.clr_secondary = self.load_config(parser=parser, cfg_label=self\n .CONFIG_CLR_SECONDARY, default=self.clr_secondary, color_config\n =True)\n self.clr_tertiary = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TERTIARY, default=self.clr_tertiary, color_config=True)\n self.clr_quaternary = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_QUATERNARY, default=self.clr_quaternary,\n color_config=True)\n self.clr_bold = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_BOLD, default=self.clr_bold, color_config=True)\n self.clr_code = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_CODE, default=self.clr_code, color_config=True)\n self.clr_code = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_ERROR, default=self.clr_code, color_config=True)\n self.clr_header = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_HEADER, default=self.clr_header, color_config=True)\n self.clr_link = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_LINK, default=self.clr_link, color_config=True)\n self.clr_list = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_LIST, default=self.clr_list, color_config=True)\n self.clr_message = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_MESSAGE, default=self.clr_message, color_config=True)\n self.clr_num_comments = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_NUM_COMMENTS, default=self.clr_num_comments,\n color_config=True)\n self.clr_num_points = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_NUM_POINTS, default=self.clr_num_points,\n color_config=True)\n self.clr_tag = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TAG, default=self.clr_tag, color_config=True)\n self.clr_time = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TIME, default=self.clr_time, color_config=True)\n self.clr_title = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TITLE, default=self.clr_title, color_config=True)\n self.clr_tooltip = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TOOLTIP, default=self.clr_tooltip, color_config=True)\n self.clr_user = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_USER, default=self.clr_user, color_config=True)\n self.clr_view_link = self.load_config(parser=parser, cfg_label=self\n .CONFIG_CLR_VIEW_LINK, default=self.clr_view_link, color_config\n =True)\n self.clr_view_index = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_VIEW_INDEX, default=self.clr_view_index,\n color_config=True)\n\n def load_urls(self, view_in_browser):\n \"\"\"Load the current set of urls from ~/.gitsomeconfigurl.\n\n :type view_in_browser: bool\n :param view_in_browser: Determines whether to view the urls in a\n browser.\n\n :rtype: list\n :return: Collection of urls.\n \"\"\"\n config = self.get_github_config_path(self.CONFIG_URL)\n parser = configparser.RawConfigParser()\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n urls = parser.get(self.CONFIG_URL_SECTION, self.CONFIG_URL_LIST)\n urls = urls.strip()\n excludes = ['[', ']', \"'\"]\n for exclude in excludes:\n urls = urls.replace(exclude, '')\n if not view_in_browser:\n urls = urls.replace('https://github.com/', '')\n return urls.split(', ')\n\n def print_auth_error(self):\n \"\"\"Print a message the authorization has failed.\"\"\"\n click.secho('Authentication error.', fg=self.clr_error)\n click.secho(\n 'Update your credentials in ~/.gitsomeconfig or run:\\n gh configure'\n , fg=self.clr_message)\n\n def prompt_news_feed(self):\n \"\"\"Prompt the user to enter a news feed url.\"\"\"\n if click.confirm(\n \"\"\"No feed url detected.\n Calling gh events without an argument\n displays the logged in user's news feed.\nDo you want gitsome to track your news feed?\"\"\"\n , default=True):\n click.secho(\n \"\"\"Visit the following url while logged into GitHub:\n https://github.com\nEnter the url found under \"Subscribe to your news feed\".\"\"\"\n , fg=self.clr_message)\n self.user_feed = ''\n while not self.user_feed:\n self.user_feed = input('URL: ')\n <function token>\n\n def save_config(self):\n \"\"\"Saves the config to ~/.gitsomeconfig.\"\"\"\n if self.check_auth():\n config = self.get_github_config_path(self.CONFIG)\n parser = configparser.RawConfigParser()\n parser.add_section(self.CONFIG_SECTION)\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_LOGIN, self.\n user_login)\n if self.user_token is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_TOKEN,\n self.user_token)\n if self.user_feed is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_FEED, self\n .user_feed)\n if self.enterprise_url is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_ENTERPRISE_URL,\n self.enterprise_url)\n if self.user_pass is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_PASS,\n self.user_pass)\n else:\n parser.remove_option(self.CONFIG_SECTION, self.CONFIG_USER_PASS\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_VERIFY_SSL, self.\n verify_ssl)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_PRIMARY, self.\n clr_primary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_SECONDARY, self\n .clr_secondary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TERTIARY, self.\n clr_tertiary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_QUATERNARY,\n self.clr_quaternary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_BOLD, self.clr_bold\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_CODE, self.clr_code\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_ERROR, self.\n clr_error)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_HEADER, self.\n clr_header)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_LINK, self.clr_link\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_LIST, self.clr_list\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_MESSAGE, self.\n clr_message)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_NUM_COMMENTS,\n self.clr_num_comments)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_NUM_POINTS,\n self.clr_num_points)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TAG, self.clr_tag)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TIME, self.clr_time\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TITLE, self.\n clr_title)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TOOLTIP, self.\n clr_tooltip)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_USER, self.clr_user\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_VIEW_LINK, self\n .clr_view_link)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_VIEW_INDEX,\n self.clr_view_index)\n with open(config, 'w+') as config_file:\n parser.write(config_file)\n\n def save_urls(self):\n \"\"\"Save the current set of urls to ~/.gitsomeconfigurl.\"\"\"\n config = self.get_github_config_path(self.CONFIG_URL)\n parser = configparser.RawConfigParser()\n try:\n parser.add_section(self.CONFIG_URL_SECTION)\n except configparser.DuplicateSectionError:\n pass\n parser.set(self.CONFIG_URL_SECTION, self.CONFIG_URL_LIST, self.urls)\n with open(config, 'w+') as config_file:\n parser.write(config_file)\n\n def show_bash_completions_info(self):\n \"\"\"Show info on how to enable bash completions\"\"\"\n click.secho(\n \"\"\"By default, gitsome looks at the following locations to enable bash completions:\n https://github.com/donnemartin/gitsome/blob/master/xonsh/environ.py#L123-L130\nIf bash completions are not working for you, check out the following link:\n https://github.com/donnemartin/gitsome#enabling-bash-completions\"\"\"\n , fg=self.clr_message)\n",
"<import token>\n\n\nclass Config(object):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n def _init_colors(self):\n \"\"\"Initialize colors to their defaults.\"\"\"\n self.clr_primary = None\n self.clr_secondary = 'green'\n self.clr_tertiary = 'cyan'\n self.clr_quaternary = 'yellow'\n self.clr_bold = 'cyan'\n self.clr_code = 'cyan'\n self.clr_error = 'red'\n self.clr_header = 'yellow'\n self.clr_link = 'green'\n self.clr_list = 'cyan'\n self.clr_message = None\n self.clr_num_comments = 'green'\n self.clr_num_points = 'green'\n self.clr_tag = 'cyan'\n self.clr_time = 'yellow'\n self.clr_title = None\n self.clr_tooltip = None\n self.clr_user = 'cyan'\n self.clr_view_link = 'magenta'\n self.clr_view_index = 'magenta'\n <function token>\n\n def authenticate(self, enterprise=False, enterprise_auth=\n enterprise_login, overwrite=False):\n \"\"\"Log into GitHub.\n\n Adapted from https://github.com/sigmavirus24/github-cli.\n\n :type enterprise: bool\n :param enterprise: Determines whether to configure GitHub Enterprise.\n Default: False.\n\n :type overwrite: bool\n :param overwrite: indicates whether we cant to overwrite the current\n set of credentials. Default: False.\n \"\"\"\n if self.api is not None and not overwrite:\n return\n config = self.get_github_config_path(self.CONFIG)\n parser = configparser.RawConfigParser()\n if os.path.isfile(config) and os.access(config, os.R_OK | os.W_OK\n ) and not overwrite:\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n self.authenticate_cached_credentials(config, parser)\n else:\n login_kwargs = {'two_factor_callback': self.request_two_factor_code\n }\n if enterprise:\n self.login = enterprise_auth\n while not self.enterprise_url:\n self.enterprise_url = input('Enterprise URL: ')\n if click.confirm('Do you want to verify SSL certs?',\n default=True):\n self.verify_ssl = True\n else:\n self.verify_ssl = False\n login_kwargs.update({'url': self.enterprise_url, 'verify':\n self.verify_ssl})\n while not self.user_login:\n self.user_login = input('User Login: ')\n login_kwargs.update({'username': self.user_login})\n if click.confirm(\n 'Do you want to log in with a password [Y] or a personal access token [n]?'\n , default=True):\n user_pass = None\n while not user_pass:\n user_pass = self.getpass('Password: ')\n login_kwargs.update({'password': user_pass})\n try:\n if not enterprise:\n auth = self.authorize(self.user_login, user_pass,\n scopes=['user', 'repo'], note='gitsome',\n note_url=\n 'https://github.com/donnemartin/gitsome',\n two_factor_callback=self.request_two_factor_code)\n self.user_token = auth.token\n else:\n self.user_pass = user_pass\n except (UnprocessableEntity, AuthenticationFailed):\n click.secho('Error creating token.', fg=self.clr_error)\n click.secho(\n \"\"\"Visit the following page and verify you do not have an existing token named \"gitsome\":\n https://github.com/settings/tokens\nIf a token already exists, update your ~/.gitsomeconfig file with your token:\n user_token = TOKEN\nYou can also generate a new token.\"\"\"\n , fg=self.clr_message)\n self.print_auth_error()\n return\n else:\n while not self.user_token:\n self.user_token = input('Token: ')\n login_kwargs.update({'token': self.user_token})\n self.api = self.login(**login_kwargs)\n if self.user_feed:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_FEED, self\n .user_feed)\n <function token>\n <function token>\n <function token>\n <function token>\n\n def load_config_colors(self, parser):\n \"\"\"Load the color config from ~/.gitsomeconfig.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser`.\n \"\"\"\n self.load_colors(parser)\n\n def load_colors(self, parser):\n \"\"\"Load all colors from ~/.gitsomeconfig.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser`.\n \"\"\"\n self.clr_primary = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_PRIMARY, default=self.clr_primary, color_config=True)\n self.clr_secondary = self.load_config(parser=parser, cfg_label=self\n .CONFIG_CLR_SECONDARY, default=self.clr_secondary, color_config\n =True)\n self.clr_tertiary = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TERTIARY, default=self.clr_tertiary, color_config=True)\n self.clr_quaternary = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_QUATERNARY, default=self.clr_quaternary,\n color_config=True)\n self.clr_bold = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_BOLD, default=self.clr_bold, color_config=True)\n self.clr_code = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_CODE, default=self.clr_code, color_config=True)\n self.clr_code = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_ERROR, default=self.clr_code, color_config=True)\n self.clr_header = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_HEADER, default=self.clr_header, color_config=True)\n self.clr_link = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_LINK, default=self.clr_link, color_config=True)\n self.clr_list = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_LIST, default=self.clr_list, color_config=True)\n self.clr_message = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_MESSAGE, default=self.clr_message, color_config=True)\n self.clr_num_comments = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_NUM_COMMENTS, default=self.clr_num_comments,\n color_config=True)\n self.clr_num_points = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_NUM_POINTS, default=self.clr_num_points,\n color_config=True)\n self.clr_tag = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TAG, default=self.clr_tag, color_config=True)\n self.clr_time = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TIME, default=self.clr_time, color_config=True)\n self.clr_title = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TITLE, default=self.clr_title, color_config=True)\n self.clr_tooltip = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TOOLTIP, default=self.clr_tooltip, color_config=True)\n self.clr_user = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_USER, default=self.clr_user, color_config=True)\n self.clr_view_link = self.load_config(parser=parser, cfg_label=self\n .CONFIG_CLR_VIEW_LINK, default=self.clr_view_link, color_config\n =True)\n self.clr_view_index = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_VIEW_INDEX, default=self.clr_view_index,\n color_config=True)\n\n def load_urls(self, view_in_browser):\n \"\"\"Load the current set of urls from ~/.gitsomeconfigurl.\n\n :type view_in_browser: bool\n :param view_in_browser: Determines whether to view the urls in a\n browser.\n\n :rtype: list\n :return: Collection of urls.\n \"\"\"\n config = self.get_github_config_path(self.CONFIG_URL)\n parser = configparser.RawConfigParser()\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n urls = parser.get(self.CONFIG_URL_SECTION, self.CONFIG_URL_LIST)\n urls = urls.strip()\n excludes = ['[', ']', \"'\"]\n for exclude in excludes:\n urls = urls.replace(exclude, '')\n if not view_in_browser:\n urls = urls.replace('https://github.com/', '')\n return urls.split(', ')\n\n def print_auth_error(self):\n \"\"\"Print a message the authorization has failed.\"\"\"\n click.secho('Authentication error.', fg=self.clr_error)\n click.secho(\n 'Update your credentials in ~/.gitsomeconfig or run:\\n gh configure'\n , fg=self.clr_message)\n\n def prompt_news_feed(self):\n \"\"\"Prompt the user to enter a news feed url.\"\"\"\n if click.confirm(\n \"\"\"No feed url detected.\n Calling gh events without an argument\n displays the logged in user's news feed.\nDo you want gitsome to track your news feed?\"\"\"\n , default=True):\n click.secho(\n \"\"\"Visit the following url while logged into GitHub:\n https://github.com\nEnter the url found under \"Subscribe to your news feed\".\"\"\"\n , fg=self.clr_message)\n self.user_feed = ''\n while not self.user_feed:\n self.user_feed = input('URL: ')\n <function token>\n\n def save_config(self):\n \"\"\"Saves the config to ~/.gitsomeconfig.\"\"\"\n if self.check_auth():\n config = self.get_github_config_path(self.CONFIG)\n parser = configparser.RawConfigParser()\n parser.add_section(self.CONFIG_SECTION)\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_LOGIN, self.\n user_login)\n if self.user_token is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_TOKEN,\n self.user_token)\n if self.user_feed is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_FEED, self\n .user_feed)\n if self.enterprise_url is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_ENTERPRISE_URL,\n self.enterprise_url)\n if self.user_pass is not None:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_PASS,\n self.user_pass)\n else:\n parser.remove_option(self.CONFIG_SECTION, self.CONFIG_USER_PASS\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_VERIFY_SSL, self.\n verify_ssl)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_PRIMARY, self.\n clr_primary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_SECONDARY, self\n .clr_secondary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TERTIARY, self.\n clr_tertiary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_QUATERNARY,\n self.clr_quaternary)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_BOLD, self.clr_bold\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_CODE, self.clr_code\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_ERROR, self.\n clr_error)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_HEADER, self.\n clr_header)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_LINK, self.clr_link\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_LIST, self.clr_list\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_MESSAGE, self.\n clr_message)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_NUM_COMMENTS,\n self.clr_num_comments)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_NUM_POINTS,\n self.clr_num_points)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TAG, self.clr_tag)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TIME, self.clr_time\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TITLE, self.\n clr_title)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_TOOLTIP, self.\n clr_tooltip)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_USER, self.clr_user\n )\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_VIEW_LINK, self\n .clr_view_link)\n parser.set(self.CONFIG_SECTION, self.CONFIG_CLR_VIEW_INDEX,\n self.clr_view_index)\n with open(config, 'w+') as config_file:\n parser.write(config_file)\n\n def save_urls(self):\n \"\"\"Save the current set of urls to ~/.gitsomeconfigurl.\"\"\"\n config = self.get_github_config_path(self.CONFIG_URL)\n parser = configparser.RawConfigParser()\n try:\n parser.add_section(self.CONFIG_URL_SECTION)\n except configparser.DuplicateSectionError:\n pass\n parser.set(self.CONFIG_URL_SECTION, self.CONFIG_URL_LIST, self.urls)\n with open(config, 'w+') as config_file:\n parser.write(config_file)\n\n def show_bash_completions_info(self):\n \"\"\"Show info on how to enable bash completions\"\"\"\n click.secho(\n \"\"\"By default, gitsome looks at the following locations to enable bash completions:\n https://github.com/donnemartin/gitsome/blob/master/xonsh/environ.py#L123-L130\nIf bash completions are not working for you, check out the following link:\n https://github.com/donnemartin/gitsome#enabling-bash-completions\"\"\"\n , fg=self.clr_message)\n",
"<import token>\n\n\nclass Config(object):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n def _init_colors(self):\n \"\"\"Initialize colors to their defaults.\"\"\"\n self.clr_primary = None\n self.clr_secondary = 'green'\n self.clr_tertiary = 'cyan'\n self.clr_quaternary = 'yellow'\n self.clr_bold = 'cyan'\n self.clr_code = 'cyan'\n self.clr_error = 'red'\n self.clr_header = 'yellow'\n self.clr_link = 'green'\n self.clr_list = 'cyan'\n self.clr_message = None\n self.clr_num_comments = 'green'\n self.clr_num_points = 'green'\n self.clr_tag = 'cyan'\n self.clr_time = 'yellow'\n self.clr_title = None\n self.clr_tooltip = None\n self.clr_user = 'cyan'\n self.clr_view_link = 'magenta'\n self.clr_view_index = 'magenta'\n <function token>\n\n def authenticate(self, enterprise=False, enterprise_auth=\n enterprise_login, overwrite=False):\n \"\"\"Log into GitHub.\n\n Adapted from https://github.com/sigmavirus24/github-cli.\n\n :type enterprise: bool\n :param enterprise: Determines whether to configure GitHub Enterprise.\n Default: False.\n\n :type overwrite: bool\n :param overwrite: indicates whether we cant to overwrite the current\n set of credentials. Default: False.\n \"\"\"\n if self.api is not None and not overwrite:\n return\n config = self.get_github_config_path(self.CONFIG)\n parser = configparser.RawConfigParser()\n if os.path.isfile(config) and os.access(config, os.R_OK | os.W_OK\n ) and not overwrite:\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n self.authenticate_cached_credentials(config, parser)\n else:\n login_kwargs = {'two_factor_callback': self.request_two_factor_code\n }\n if enterprise:\n self.login = enterprise_auth\n while not self.enterprise_url:\n self.enterprise_url = input('Enterprise URL: ')\n if click.confirm('Do you want to verify SSL certs?',\n default=True):\n self.verify_ssl = True\n else:\n self.verify_ssl = False\n login_kwargs.update({'url': self.enterprise_url, 'verify':\n self.verify_ssl})\n while not self.user_login:\n self.user_login = input('User Login: ')\n login_kwargs.update({'username': self.user_login})\n if click.confirm(\n 'Do you want to log in with a password [Y] or a personal access token [n]?'\n , default=True):\n user_pass = None\n while not user_pass:\n user_pass = self.getpass('Password: ')\n login_kwargs.update({'password': user_pass})\n try:\n if not enterprise:\n auth = self.authorize(self.user_login, user_pass,\n scopes=['user', 'repo'], note='gitsome',\n note_url=\n 'https://github.com/donnemartin/gitsome',\n two_factor_callback=self.request_two_factor_code)\n self.user_token = auth.token\n else:\n self.user_pass = user_pass\n except (UnprocessableEntity, AuthenticationFailed):\n click.secho('Error creating token.', fg=self.clr_error)\n click.secho(\n \"\"\"Visit the following page and verify you do not have an existing token named \"gitsome\":\n https://github.com/settings/tokens\nIf a token already exists, update your ~/.gitsomeconfig file with your token:\n user_token = TOKEN\nYou can also generate a new token.\"\"\"\n , fg=self.clr_message)\n self.print_auth_error()\n return\n else:\n while not self.user_token:\n self.user_token = input('Token: ')\n login_kwargs.update({'token': self.user_token})\n self.api = self.login(**login_kwargs)\n if self.user_feed:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_FEED, self\n .user_feed)\n <function token>\n <function token>\n <function token>\n <function token>\n\n def load_config_colors(self, parser):\n \"\"\"Load the color config from ~/.gitsomeconfig.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser`.\n \"\"\"\n self.load_colors(parser)\n\n def load_colors(self, parser):\n \"\"\"Load all colors from ~/.gitsomeconfig.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser`.\n \"\"\"\n self.clr_primary = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_PRIMARY, default=self.clr_primary, color_config=True)\n self.clr_secondary = self.load_config(parser=parser, cfg_label=self\n .CONFIG_CLR_SECONDARY, default=self.clr_secondary, color_config\n =True)\n self.clr_tertiary = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TERTIARY, default=self.clr_tertiary, color_config=True)\n self.clr_quaternary = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_QUATERNARY, default=self.clr_quaternary,\n color_config=True)\n self.clr_bold = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_BOLD, default=self.clr_bold, color_config=True)\n self.clr_code = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_CODE, default=self.clr_code, color_config=True)\n self.clr_code = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_ERROR, default=self.clr_code, color_config=True)\n self.clr_header = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_HEADER, default=self.clr_header, color_config=True)\n self.clr_link = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_LINK, default=self.clr_link, color_config=True)\n self.clr_list = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_LIST, default=self.clr_list, color_config=True)\n self.clr_message = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_MESSAGE, default=self.clr_message, color_config=True)\n self.clr_num_comments = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_NUM_COMMENTS, default=self.clr_num_comments,\n color_config=True)\n self.clr_num_points = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_NUM_POINTS, default=self.clr_num_points,\n color_config=True)\n self.clr_tag = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TAG, default=self.clr_tag, color_config=True)\n self.clr_time = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TIME, default=self.clr_time, color_config=True)\n self.clr_title = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TITLE, default=self.clr_title, color_config=True)\n self.clr_tooltip = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TOOLTIP, default=self.clr_tooltip, color_config=True)\n self.clr_user = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_USER, default=self.clr_user, color_config=True)\n self.clr_view_link = self.load_config(parser=parser, cfg_label=self\n .CONFIG_CLR_VIEW_LINK, default=self.clr_view_link, color_config\n =True)\n self.clr_view_index = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_VIEW_INDEX, default=self.clr_view_index,\n color_config=True)\n\n def load_urls(self, view_in_browser):\n \"\"\"Load the current set of urls from ~/.gitsomeconfigurl.\n\n :type view_in_browser: bool\n :param view_in_browser: Determines whether to view the urls in a\n browser.\n\n :rtype: list\n :return: Collection of urls.\n \"\"\"\n config = self.get_github_config_path(self.CONFIG_URL)\n parser = configparser.RawConfigParser()\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n urls = parser.get(self.CONFIG_URL_SECTION, self.CONFIG_URL_LIST)\n urls = urls.strip()\n excludes = ['[', ']', \"'\"]\n for exclude in excludes:\n urls = urls.replace(exclude, '')\n if not view_in_browser:\n urls = urls.replace('https://github.com/', '')\n return urls.split(', ')\n\n def print_auth_error(self):\n \"\"\"Print a message the authorization has failed.\"\"\"\n click.secho('Authentication error.', fg=self.clr_error)\n click.secho(\n 'Update your credentials in ~/.gitsomeconfig or run:\\n gh configure'\n , fg=self.clr_message)\n\n def prompt_news_feed(self):\n \"\"\"Prompt the user to enter a news feed url.\"\"\"\n if click.confirm(\n \"\"\"No feed url detected.\n Calling gh events without an argument\n displays the logged in user's news feed.\nDo you want gitsome to track your news feed?\"\"\"\n , default=True):\n click.secho(\n \"\"\"Visit the following url while logged into GitHub:\n https://github.com\nEnter the url found under \"Subscribe to your news feed\".\"\"\"\n , fg=self.clr_message)\n self.user_feed = ''\n while not self.user_feed:\n self.user_feed = input('URL: ')\n <function token>\n <function token>\n\n def save_urls(self):\n \"\"\"Save the current set of urls to ~/.gitsomeconfigurl.\"\"\"\n config = self.get_github_config_path(self.CONFIG_URL)\n parser = configparser.RawConfigParser()\n try:\n parser.add_section(self.CONFIG_URL_SECTION)\n except configparser.DuplicateSectionError:\n pass\n parser.set(self.CONFIG_URL_SECTION, self.CONFIG_URL_LIST, self.urls)\n with open(config, 'w+') as config_file:\n parser.write(config_file)\n\n def show_bash_completions_info(self):\n \"\"\"Show info on how to enable bash completions\"\"\"\n click.secho(\n \"\"\"By default, gitsome looks at the following locations to enable bash completions:\n https://github.com/donnemartin/gitsome/blob/master/xonsh/environ.py#L123-L130\nIf bash completions are not working for you, check out the following link:\n https://github.com/donnemartin/gitsome#enabling-bash-completions\"\"\"\n , fg=self.clr_message)\n",
"<import token>\n\n\nclass Config(object):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n def _init_colors(self):\n \"\"\"Initialize colors to their defaults.\"\"\"\n self.clr_primary = None\n self.clr_secondary = 'green'\n self.clr_tertiary = 'cyan'\n self.clr_quaternary = 'yellow'\n self.clr_bold = 'cyan'\n self.clr_code = 'cyan'\n self.clr_error = 'red'\n self.clr_header = 'yellow'\n self.clr_link = 'green'\n self.clr_list = 'cyan'\n self.clr_message = None\n self.clr_num_comments = 'green'\n self.clr_num_points = 'green'\n self.clr_tag = 'cyan'\n self.clr_time = 'yellow'\n self.clr_title = None\n self.clr_tooltip = None\n self.clr_user = 'cyan'\n self.clr_view_link = 'magenta'\n self.clr_view_index = 'magenta'\n <function token>\n\n def authenticate(self, enterprise=False, enterprise_auth=\n enterprise_login, overwrite=False):\n \"\"\"Log into GitHub.\n\n Adapted from https://github.com/sigmavirus24/github-cli.\n\n :type enterprise: bool\n :param enterprise: Determines whether to configure GitHub Enterprise.\n Default: False.\n\n :type overwrite: bool\n :param overwrite: indicates whether we cant to overwrite the current\n set of credentials. Default: False.\n \"\"\"\n if self.api is not None and not overwrite:\n return\n config = self.get_github_config_path(self.CONFIG)\n parser = configparser.RawConfigParser()\n if os.path.isfile(config) and os.access(config, os.R_OK | os.W_OK\n ) and not overwrite:\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n self.authenticate_cached_credentials(config, parser)\n else:\n login_kwargs = {'two_factor_callback': self.request_two_factor_code\n }\n if enterprise:\n self.login = enterprise_auth\n while not self.enterprise_url:\n self.enterprise_url = input('Enterprise URL: ')\n if click.confirm('Do you want to verify SSL certs?',\n default=True):\n self.verify_ssl = True\n else:\n self.verify_ssl = False\n login_kwargs.update({'url': self.enterprise_url, 'verify':\n self.verify_ssl})\n while not self.user_login:\n self.user_login = input('User Login: ')\n login_kwargs.update({'username': self.user_login})\n if click.confirm(\n 'Do you want to log in with a password [Y] or a personal access token [n]?'\n , default=True):\n user_pass = None\n while not user_pass:\n user_pass = self.getpass('Password: ')\n login_kwargs.update({'password': user_pass})\n try:\n if not enterprise:\n auth = self.authorize(self.user_login, user_pass,\n scopes=['user', 'repo'], note='gitsome',\n note_url=\n 'https://github.com/donnemartin/gitsome',\n two_factor_callback=self.request_two_factor_code)\n self.user_token = auth.token\n else:\n self.user_pass = user_pass\n except (UnprocessableEntity, AuthenticationFailed):\n click.secho('Error creating token.', fg=self.clr_error)\n click.secho(\n \"\"\"Visit the following page and verify you do not have an existing token named \"gitsome\":\n https://github.com/settings/tokens\nIf a token already exists, update your ~/.gitsomeconfig file with your token:\n user_token = TOKEN\nYou can also generate a new token.\"\"\"\n , fg=self.clr_message)\n self.print_auth_error()\n return\n else:\n while not self.user_token:\n self.user_token = input('Token: ')\n login_kwargs.update({'token': self.user_token})\n self.api = self.login(**login_kwargs)\n if self.user_feed:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_FEED, self\n .user_feed)\n <function token>\n <function token>\n <function token>\n <function token>\n\n def load_config_colors(self, parser):\n \"\"\"Load the color config from ~/.gitsomeconfig.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser`.\n \"\"\"\n self.load_colors(parser)\n\n def load_colors(self, parser):\n \"\"\"Load all colors from ~/.gitsomeconfig.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser`.\n \"\"\"\n self.clr_primary = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_PRIMARY, default=self.clr_primary, color_config=True)\n self.clr_secondary = self.load_config(parser=parser, cfg_label=self\n .CONFIG_CLR_SECONDARY, default=self.clr_secondary, color_config\n =True)\n self.clr_tertiary = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TERTIARY, default=self.clr_tertiary, color_config=True)\n self.clr_quaternary = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_QUATERNARY, default=self.clr_quaternary,\n color_config=True)\n self.clr_bold = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_BOLD, default=self.clr_bold, color_config=True)\n self.clr_code = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_CODE, default=self.clr_code, color_config=True)\n self.clr_code = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_ERROR, default=self.clr_code, color_config=True)\n self.clr_header = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_HEADER, default=self.clr_header, color_config=True)\n self.clr_link = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_LINK, default=self.clr_link, color_config=True)\n self.clr_list = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_LIST, default=self.clr_list, color_config=True)\n self.clr_message = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_MESSAGE, default=self.clr_message, color_config=True)\n self.clr_num_comments = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_NUM_COMMENTS, default=self.clr_num_comments,\n color_config=True)\n self.clr_num_points = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_NUM_POINTS, default=self.clr_num_points,\n color_config=True)\n self.clr_tag = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TAG, default=self.clr_tag, color_config=True)\n self.clr_time = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TIME, default=self.clr_time, color_config=True)\n self.clr_title = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TITLE, default=self.clr_title, color_config=True)\n self.clr_tooltip = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TOOLTIP, default=self.clr_tooltip, color_config=True)\n self.clr_user = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_USER, default=self.clr_user, color_config=True)\n self.clr_view_link = self.load_config(parser=parser, cfg_label=self\n .CONFIG_CLR_VIEW_LINK, default=self.clr_view_link, color_config\n =True)\n self.clr_view_index = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_VIEW_INDEX, default=self.clr_view_index,\n color_config=True)\n\n def load_urls(self, view_in_browser):\n \"\"\"Load the current set of urls from ~/.gitsomeconfigurl.\n\n :type view_in_browser: bool\n :param view_in_browser: Determines whether to view the urls in a\n browser.\n\n :rtype: list\n :return: Collection of urls.\n \"\"\"\n config = self.get_github_config_path(self.CONFIG_URL)\n parser = configparser.RawConfigParser()\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n urls = parser.get(self.CONFIG_URL_SECTION, self.CONFIG_URL_LIST)\n urls = urls.strip()\n excludes = ['[', ']', \"'\"]\n for exclude in excludes:\n urls = urls.replace(exclude, '')\n if not view_in_browser:\n urls = urls.replace('https://github.com/', '')\n return urls.split(', ')\n\n def print_auth_error(self):\n \"\"\"Print a message the authorization has failed.\"\"\"\n click.secho('Authentication error.', fg=self.clr_error)\n click.secho(\n 'Update your credentials in ~/.gitsomeconfig or run:\\n gh configure'\n , fg=self.clr_message)\n <function token>\n <function token>\n <function token>\n\n def save_urls(self):\n \"\"\"Save the current set of urls to ~/.gitsomeconfigurl.\"\"\"\n config = self.get_github_config_path(self.CONFIG_URL)\n parser = configparser.RawConfigParser()\n try:\n parser.add_section(self.CONFIG_URL_SECTION)\n except configparser.DuplicateSectionError:\n pass\n parser.set(self.CONFIG_URL_SECTION, self.CONFIG_URL_LIST, self.urls)\n with open(config, 'w+') as config_file:\n parser.write(config_file)\n\n def show_bash_completions_info(self):\n \"\"\"Show info on how to enable bash completions\"\"\"\n click.secho(\n \"\"\"By default, gitsome looks at the following locations to enable bash completions:\n https://github.com/donnemartin/gitsome/blob/master/xonsh/environ.py#L123-L130\nIf bash completions are not working for you, check out the following link:\n https://github.com/donnemartin/gitsome#enabling-bash-completions\"\"\"\n , fg=self.clr_message)\n",
"<import token>\n\n\nclass Config(object):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n def _init_colors(self):\n \"\"\"Initialize colors to their defaults.\"\"\"\n self.clr_primary = None\n self.clr_secondary = 'green'\n self.clr_tertiary = 'cyan'\n self.clr_quaternary = 'yellow'\n self.clr_bold = 'cyan'\n self.clr_code = 'cyan'\n self.clr_error = 'red'\n self.clr_header = 'yellow'\n self.clr_link = 'green'\n self.clr_list = 'cyan'\n self.clr_message = None\n self.clr_num_comments = 'green'\n self.clr_num_points = 'green'\n self.clr_tag = 'cyan'\n self.clr_time = 'yellow'\n self.clr_title = None\n self.clr_tooltip = None\n self.clr_user = 'cyan'\n self.clr_view_link = 'magenta'\n self.clr_view_index = 'magenta'\n <function token>\n\n def authenticate(self, enterprise=False, enterprise_auth=\n enterprise_login, overwrite=False):\n \"\"\"Log into GitHub.\n\n Adapted from https://github.com/sigmavirus24/github-cli.\n\n :type enterprise: bool\n :param enterprise: Determines whether to configure GitHub Enterprise.\n Default: False.\n\n :type overwrite: bool\n :param overwrite: indicates whether we cant to overwrite the current\n set of credentials. Default: False.\n \"\"\"\n if self.api is not None and not overwrite:\n return\n config = self.get_github_config_path(self.CONFIG)\n parser = configparser.RawConfigParser()\n if os.path.isfile(config) and os.access(config, os.R_OK | os.W_OK\n ) and not overwrite:\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n self.authenticate_cached_credentials(config, parser)\n else:\n login_kwargs = {'two_factor_callback': self.request_two_factor_code\n }\n if enterprise:\n self.login = enterprise_auth\n while not self.enterprise_url:\n self.enterprise_url = input('Enterprise URL: ')\n if click.confirm('Do you want to verify SSL certs?',\n default=True):\n self.verify_ssl = True\n else:\n self.verify_ssl = False\n login_kwargs.update({'url': self.enterprise_url, 'verify':\n self.verify_ssl})\n while not self.user_login:\n self.user_login = input('User Login: ')\n login_kwargs.update({'username': self.user_login})\n if click.confirm(\n 'Do you want to log in with a password [Y] or a personal access token [n]?'\n , default=True):\n user_pass = None\n while not user_pass:\n user_pass = self.getpass('Password: ')\n login_kwargs.update({'password': user_pass})\n try:\n if not enterprise:\n auth = self.authorize(self.user_login, user_pass,\n scopes=['user', 'repo'], note='gitsome',\n note_url=\n 'https://github.com/donnemartin/gitsome',\n two_factor_callback=self.request_two_factor_code)\n self.user_token = auth.token\n else:\n self.user_pass = user_pass\n except (UnprocessableEntity, AuthenticationFailed):\n click.secho('Error creating token.', fg=self.clr_error)\n click.secho(\n \"\"\"Visit the following page and verify you do not have an existing token named \"gitsome\":\n https://github.com/settings/tokens\nIf a token already exists, update your ~/.gitsomeconfig file with your token:\n user_token = TOKEN\nYou can also generate a new token.\"\"\"\n , fg=self.clr_message)\n self.print_auth_error()\n return\n else:\n while not self.user_token:\n self.user_token = input('Token: ')\n login_kwargs.update({'token': self.user_token})\n self.api = self.login(**login_kwargs)\n if self.user_feed:\n parser.set(self.CONFIG_SECTION, self.CONFIG_USER_FEED, self\n .user_feed)\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def load_colors(self, parser):\n \"\"\"Load all colors from ~/.gitsomeconfig.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser`.\n \"\"\"\n self.clr_primary = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_PRIMARY, default=self.clr_primary, color_config=True)\n self.clr_secondary = self.load_config(parser=parser, cfg_label=self\n .CONFIG_CLR_SECONDARY, default=self.clr_secondary, color_config\n =True)\n self.clr_tertiary = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TERTIARY, default=self.clr_tertiary, color_config=True)\n self.clr_quaternary = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_QUATERNARY, default=self.clr_quaternary,\n color_config=True)\n self.clr_bold = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_BOLD, default=self.clr_bold, color_config=True)\n self.clr_code = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_CODE, default=self.clr_code, color_config=True)\n self.clr_code = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_ERROR, default=self.clr_code, color_config=True)\n self.clr_header = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_HEADER, default=self.clr_header, color_config=True)\n self.clr_link = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_LINK, default=self.clr_link, color_config=True)\n self.clr_list = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_LIST, default=self.clr_list, color_config=True)\n self.clr_message = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_MESSAGE, default=self.clr_message, color_config=True)\n self.clr_num_comments = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_NUM_COMMENTS, default=self.clr_num_comments,\n color_config=True)\n self.clr_num_points = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_NUM_POINTS, default=self.clr_num_points,\n color_config=True)\n self.clr_tag = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TAG, default=self.clr_tag, color_config=True)\n self.clr_time = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TIME, default=self.clr_time, color_config=True)\n self.clr_title = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TITLE, default=self.clr_title, color_config=True)\n self.clr_tooltip = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TOOLTIP, default=self.clr_tooltip, color_config=True)\n self.clr_user = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_USER, default=self.clr_user, color_config=True)\n self.clr_view_link = self.load_config(parser=parser, cfg_label=self\n .CONFIG_CLR_VIEW_LINK, default=self.clr_view_link, color_config\n =True)\n self.clr_view_index = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_VIEW_INDEX, default=self.clr_view_index,\n color_config=True)\n\n def load_urls(self, view_in_browser):\n \"\"\"Load the current set of urls from ~/.gitsomeconfigurl.\n\n :type view_in_browser: bool\n :param view_in_browser: Determines whether to view the urls in a\n browser.\n\n :rtype: list\n :return: Collection of urls.\n \"\"\"\n config = self.get_github_config_path(self.CONFIG_URL)\n parser = configparser.RawConfigParser()\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n urls = parser.get(self.CONFIG_URL_SECTION, self.CONFIG_URL_LIST)\n urls = urls.strip()\n excludes = ['[', ']', \"'\"]\n for exclude in excludes:\n urls = urls.replace(exclude, '')\n if not view_in_browser:\n urls = urls.replace('https://github.com/', '')\n return urls.split(', ')\n\n def print_auth_error(self):\n \"\"\"Print a message the authorization has failed.\"\"\"\n click.secho('Authentication error.', fg=self.clr_error)\n click.secho(\n 'Update your credentials in ~/.gitsomeconfig or run:\\n gh configure'\n , fg=self.clr_message)\n <function token>\n <function token>\n <function token>\n\n def save_urls(self):\n \"\"\"Save the current set of urls to ~/.gitsomeconfigurl.\"\"\"\n config = self.get_github_config_path(self.CONFIG_URL)\n parser = configparser.RawConfigParser()\n try:\n parser.add_section(self.CONFIG_URL_SECTION)\n except configparser.DuplicateSectionError:\n pass\n parser.set(self.CONFIG_URL_SECTION, self.CONFIG_URL_LIST, self.urls)\n with open(config, 'w+') as config_file:\n parser.write(config_file)\n\n def show_bash_completions_info(self):\n \"\"\"Show info on how to enable bash completions\"\"\"\n click.secho(\n \"\"\"By default, gitsome looks at the following locations to enable bash completions:\n https://github.com/donnemartin/gitsome/blob/master/xonsh/environ.py#L123-L130\nIf bash completions are not working for you, check out the following link:\n https://github.com/donnemartin/gitsome#enabling-bash-completions\"\"\"\n , fg=self.clr_message)\n",
"<import token>\n\n\nclass Config(object):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n def _init_colors(self):\n \"\"\"Initialize colors to their defaults.\"\"\"\n self.clr_primary = None\n self.clr_secondary = 'green'\n self.clr_tertiary = 'cyan'\n self.clr_quaternary = 'yellow'\n self.clr_bold = 'cyan'\n self.clr_code = 'cyan'\n self.clr_error = 'red'\n self.clr_header = 'yellow'\n self.clr_link = 'green'\n self.clr_list = 'cyan'\n self.clr_message = None\n self.clr_num_comments = 'green'\n self.clr_num_points = 'green'\n self.clr_tag = 'cyan'\n self.clr_time = 'yellow'\n self.clr_title = None\n self.clr_tooltip = None\n self.clr_user = 'cyan'\n self.clr_view_link = 'magenta'\n self.clr_view_index = 'magenta'\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def load_colors(self, parser):\n \"\"\"Load all colors from ~/.gitsomeconfig.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser`.\n \"\"\"\n self.clr_primary = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_PRIMARY, default=self.clr_primary, color_config=True)\n self.clr_secondary = self.load_config(parser=parser, cfg_label=self\n .CONFIG_CLR_SECONDARY, default=self.clr_secondary, color_config\n =True)\n self.clr_tertiary = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TERTIARY, default=self.clr_tertiary, color_config=True)\n self.clr_quaternary = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_QUATERNARY, default=self.clr_quaternary,\n color_config=True)\n self.clr_bold = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_BOLD, default=self.clr_bold, color_config=True)\n self.clr_code = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_CODE, default=self.clr_code, color_config=True)\n self.clr_code = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_ERROR, default=self.clr_code, color_config=True)\n self.clr_header = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_HEADER, default=self.clr_header, color_config=True)\n self.clr_link = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_LINK, default=self.clr_link, color_config=True)\n self.clr_list = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_LIST, default=self.clr_list, color_config=True)\n self.clr_message = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_MESSAGE, default=self.clr_message, color_config=True)\n self.clr_num_comments = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_NUM_COMMENTS, default=self.clr_num_comments,\n color_config=True)\n self.clr_num_points = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_NUM_POINTS, default=self.clr_num_points,\n color_config=True)\n self.clr_tag = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TAG, default=self.clr_tag, color_config=True)\n self.clr_time = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TIME, default=self.clr_time, color_config=True)\n self.clr_title = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TITLE, default=self.clr_title, color_config=True)\n self.clr_tooltip = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TOOLTIP, default=self.clr_tooltip, color_config=True)\n self.clr_user = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_USER, default=self.clr_user, color_config=True)\n self.clr_view_link = self.load_config(parser=parser, cfg_label=self\n .CONFIG_CLR_VIEW_LINK, default=self.clr_view_link, color_config\n =True)\n self.clr_view_index = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_VIEW_INDEX, default=self.clr_view_index,\n color_config=True)\n\n def load_urls(self, view_in_browser):\n \"\"\"Load the current set of urls from ~/.gitsomeconfigurl.\n\n :type view_in_browser: bool\n :param view_in_browser: Determines whether to view the urls in a\n browser.\n\n :rtype: list\n :return: Collection of urls.\n \"\"\"\n config = self.get_github_config_path(self.CONFIG_URL)\n parser = configparser.RawConfigParser()\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n urls = parser.get(self.CONFIG_URL_SECTION, self.CONFIG_URL_LIST)\n urls = urls.strip()\n excludes = ['[', ']', \"'\"]\n for exclude in excludes:\n urls = urls.replace(exclude, '')\n if not view_in_browser:\n urls = urls.replace('https://github.com/', '')\n return urls.split(', ')\n\n def print_auth_error(self):\n \"\"\"Print a message the authorization has failed.\"\"\"\n click.secho('Authentication error.', fg=self.clr_error)\n click.secho(\n 'Update your credentials in ~/.gitsomeconfig or run:\\n gh configure'\n , fg=self.clr_message)\n <function token>\n <function token>\n <function token>\n\n def save_urls(self):\n \"\"\"Save the current set of urls to ~/.gitsomeconfigurl.\"\"\"\n config = self.get_github_config_path(self.CONFIG_URL)\n parser = configparser.RawConfigParser()\n try:\n parser.add_section(self.CONFIG_URL_SECTION)\n except configparser.DuplicateSectionError:\n pass\n parser.set(self.CONFIG_URL_SECTION, self.CONFIG_URL_LIST, self.urls)\n with open(config, 'w+') as config_file:\n parser.write(config_file)\n\n def show_bash_completions_info(self):\n \"\"\"Show info on how to enable bash completions\"\"\"\n click.secho(\n \"\"\"By default, gitsome looks at the following locations to enable bash completions:\n https://github.com/donnemartin/gitsome/blob/master/xonsh/environ.py#L123-L130\nIf bash completions are not working for you, check out the following link:\n https://github.com/donnemartin/gitsome#enabling-bash-completions\"\"\"\n , fg=self.clr_message)\n",
"<import token>\n\n\nclass Config(object):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n def _init_colors(self):\n \"\"\"Initialize colors to their defaults.\"\"\"\n self.clr_primary = None\n self.clr_secondary = 'green'\n self.clr_tertiary = 'cyan'\n self.clr_quaternary = 'yellow'\n self.clr_bold = 'cyan'\n self.clr_code = 'cyan'\n self.clr_error = 'red'\n self.clr_header = 'yellow'\n self.clr_link = 'green'\n self.clr_list = 'cyan'\n self.clr_message = None\n self.clr_num_comments = 'green'\n self.clr_num_points = 'green'\n self.clr_tag = 'cyan'\n self.clr_time = 'yellow'\n self.clr_title = None\n self.clr_tooltip = None\n self.clr_user = 'cyan'\n self.clr_view_link = 'magenta'\n self.clr_view_index = 'magenta'\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def load_colors(self, parser):\n \"\"\"Load all colors from ~/.gitsomeconfig.\n\n :type parser: :class:`ConfigParser.RawConfigParser`\n :param parser: An instance of `ConfigParser.RawConfigParser`.\n \"\"\"\n self.clr_primary = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_PRIMARY, default=self.clr_primary, color_config=True)\n self.clr_secondary = self.load_config(parser=parser, cfg_label=self\n .CONFIG_CLR_SECONDARY, default=self.clr_secondary, color_config\n =True)\n self.clr_tertiary = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TERTIARY, default=self.clr_tertiary, color_config=True)\n self.clr_quaternary = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_QUATERNARY, default=self.clr_quaternary,\n color_config=True)\n self.clr_bold = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_BOLD, default=self.clr_bold, color_config=True)\n self.clr_code = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_CODE, default=self.clr_code, color_config=True)\n self.clr_code = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_ERROR, default=self.clr_code, color_config=True)\n self.clr_header = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_HEADER, default=self.clr_header, color_config=True)\n self.clr_link = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_LINK, default=self.clr_link, color_config=True)\n self.clr_list = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_LIST, default=self.clr_list, color_config=True)\n self.clr_message = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_MESSAGE, default=self.clr_message, color_config=True)\n self.clr_num_comments = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_NUM_COMMENTS, default=self.clr_num_comments,\n color_config=True)\n self.clr_num_points = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_NUM_POINTS, default=self.clr_num_points,\n color_config=True)\n self.clr_tag = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TAG, default=self.clr_tag, color_config=True)\n self.clr_time = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TIME, default=self.clr_time, color_config=True)\n self.clr_title = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TITLE, default=self.clr_title, color_config=True)\n self.clr_tooltip = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_TOOLTIP, default=self.clr_tooltip, color_config=True)\n self.clr_user = self.load_config(parser=parser, cfg_label=self.\n CONFIG_CLR_USER, default=self.clr_user, color_config=True)\n self.clr_view_link = self.load_config(parser=parser, cfg_label=self\n .CONFIG_CLR_VIEW_LINK, default=self.clr_view_link, color_config\n =True)\n self.clr_view_index = self.load_config(parser=parser, cfg_label=\n self.CONFIG_CLR_VIEW_INDEX, default=self.clr_view_index,\n color_config=True)\n\n def load_urls(self, view_in_browser):\n \"\"\"Load the current set of urls from ~/.gitsomeconfigurl.\n\n :type view_in_browser: bool\n :param view_in_browser: Determines whether to view the urls in a\n browser.\n\n :rtype: list\n :return: Collection of urls.\n \"\"\"\n config = self.get_github_config_path(self.CONFIG_URL)\n parser = configparser.RawConfigParser()\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n urls = parser.get(self.CONFIG_URL_SECTION, self.CONFIG_URL_LIST)\n urls = urls.strip()\n excludes = ['[', ']', \"'\"]\n for exclude in excludes:\n urls = urls.replace(exclude, '')\n if not view_in_browser:\n urls = urls.replace('https://github.com/', '')\n return urls.split(', ')\n\n def print_auth_error(self):\n \"\"\"Print a message the authorization has failed.\"\"\"\n click.secho('Authentication error.', fg=self.clr_error)\n click.secho(\n 'Update your credentials in ~/.gitsomeconfig or run:\\n gh configure'\n , fg=self.clr_message)\n <function token>\n <function token>\n <function token>\n\n def save_urls(self):\n \"\"\"Save the current set of urls to ~/.gitsomeconfigurl.\"\"\"\n config = self.get_github_config_path(self.CONFIG_URL)\n parser = configparser.RawConfigParser()\n try:\n parser.add_section(self.CONFIG_URL_SECTION)\n except configparser.DuplicateSectionError:\n pass\n parser.set(self.CONFIG_URL_SECTION, self.CONFIG_URL_LIST, self.urls)\n with open(config, 'w+') as config_file:\n parser.write(config_file)\n <function token>\n",
"<import token>\n\n\nclass Config(object):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n def _init_colors(self):\n \"\"\"Initialize colors to their defaults.\"\"\"\n self.clr_primary = None\n self.clr_secondary = 'green'\n self.clr_tertiary = 'cyan'\n self.clr_quaternary = 'yellow'\n self.clr_bold = 'cyan'\n self.clr_code = 'cyan'\n self.clr_error = 'red'\n self.clr_header = 'yellow'\n self.clr_link = 'green'\n self.clr_list = 'cyan'\n self.clr_message = None\n self.clr_num_comments = 'green'\n self.clr_num_points = 'green'\n self.clr_tag = 'cyan'\n self.clr_time = 'yellow'\n self.clr_title = None\n self.clr_tooltip = None\n self.clr_user = 'cyan'\n self.clr_view_link = 'magenta'\n self.clr_view_index = 'magenta'\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def load_urls(self, view_in_browser):\n \"\"\"Load the current set of urls from ~/.gitsomeconfigurl.\n\n :type view_in_browser: bool\n :param view_in_browser: Determines whether to view the urls in a\n browser.\n\n :rtype: list\n :return: Collection of urls.\n \"\"\"\n config = self.get_github_config_path(self.CONFIG_URL)\n parser = configparser.RawConfigParser()\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n urls = parser.get(self.CONFIG_URL_SECTION, self.CONFIG_URL_LIST)\n urls = urls.strip()\n excludes = ['[', ']', \"'\"]\n for exclude in excludes:\n urls = urls.replace(exclude, '')\n if not view_in_browser:\n urls = urls.replace('https://github.com/', '')\n return urls.split(', ')\n\n def print_auth_error(self):\n \"\"\"Print a message the authorization has failed.\"\"\"\n click.secho('Authentication error.', fg=self.clr_error)\n click.secho(\n 'Update your credentials in ~/.gitsomeconfig or run:\\n gh configure'\n , fg=self.clr_message)\n <function token>\n <function token>\n <function token>\n\n def save_urls(self):\n \"\"\"Save the current set of urls to ~/.gitsomeconfigurl.\"\"\"\n config = self.get_github_config_path(self.CONFIG_URL)\n parser = configparser.RawConfigParser()\n try:\n parser.add_section(self.CONFIG_URL_SECTION)\n except configparser.DuplicateSectionError:\n pass\n parser.set(self.CONFIG_URL_SECTION, self.CONFIG_URL_LIST, self.urls)\n with open(config, 'w+') as config_file:\n parser.write(config_file)\n <function token>\n",
"<import token>\n\n\nclass Config(object):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n def _init_colors(self):\n \"\"\"Initialize colors to their defaults.\"\"\"\n self.clr_primary = None\n self.clr_secondary = 'green'\n self.clr_tertiary = 'cyan'\n self.clr_quaternary = 'yellow'\n self.clr_bold = 'cyan'\n self.clr_code = 'cyan'\n self.clr_error = 'red'\n self.clr_header = 'yellow'\n self.clr_link = 'green'\n self.clr_list = 'cyan'\n self.clr_message = None\n self.clr_num_comments = 'green'\n self.clr_num_points = 'green'\n self.clr_tag = 'cyan'\n self.clr_time = 'yellow'\n self.clr_title = None\n self.clr_tooltip = None\n self.clr_user = 'cyan'\n self.clr_view_link = 'magenta'\n self.clr_view_index = 'magenta'\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def load_urls(self, view_in_browser):\n \"\"\"Load the current set of urls from ~/.gitsomeconfigurl.\n\n :type view_in_browser: bool\n :param view_in_browser: Determines whether to view the urls in a\n browser.\n\n :rtype: list\n :return: Collection of urls.\n \"\"\"\n config = self.get_github_config_path(self.CONFIG_URL)\n parser = configparser.RawConfigParser()\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n urls = parser.get(self.CONFIG_URL_SECTION, self.CONFIG_URL_LIST)\n urls = urls.strip()\n excludes = ['[', ']', \"'\"]\n for exclude in excludes:\n urls = urls.replace(exclude, '')\n if not view_in_browser:\n urls = urls.replace('https://github.com/', '')\n return urls.split(', ')\n\n def print_auth_error(self):\n \"\"\"Print a message the authorization has failed.\"\"\"\n click.secho('Authentication error.', fg=self.clr_error)\n click.secho(\n 'Update your credentials in ~/.gitsomeconfig or run:\\n gh configure'\n , fg=self.clr_message)\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n\n\nclass Config(object):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n def _init_colors(self):\n \"\"\"Initialize colors to their defaults.\"\"\"\n self.clr_primary = None\n self.clr_secondary = 'green'\n self.clr_tertiary = 'cyan'\n self.clr_quaternary = 'yellow'\n self.clr_bold = 'cyan'\n self.clr_code = 'cyan'\n self.clr_error = 'red'\n self.clr_header = 'yellow'\n self.clr_link = 'green'\n self.clr_list = 'cyan'\n self.clr_message = None\n self.clr_num_comments = 'green'\n self.clr_num_points = 'green'\n self.clr_tag = 'cyan'\n self.clr_time = 'yellow'\n self.clr_title = None\n self.clr_tooltip = None\n self.clr_user = 'cyan'\n self.clr_view_link = 'magenta'\n self.clr_view_index = 'magenta'\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def load_urls(self, view_in_browser):\n \"\"\"Load the current set of urls from ~/.gitsomeconfigurl.\n\n :type view_in_browser: bool\n :param view_in_browser: Determines whether to view the urls in a\n browser.\n\n :rtype: list\n :return: Collection of urls.\n \"\"\"\n config = self.get_github_config_path(self.CONFIG_URL)\n parser = configparser.RawConfigParser()\n with open(config) as config_file:\n try:\n parser.read_file(config_file)\n except AttributeError:\n parser.readfp(config_file)\n urls = parser.get(self.CONFIG_URL_SECTION, self.CONFIG_URL_LIST)\n urls = urls.strip()\n excludes = ['[', ']', \"'\"]\n for exclude in excludes:\n urls = urls.replace(exclude, '')\n if not view_in_browser:\n urls = urls.replace('https://github.com/', '')\n return urls.split(', ')\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n\n\nclass Config(object):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n def _init_colors(self):\n \"\"\"Initialize colors to their defaults.\"\"\"\n self.clr_primary = None\n self.clr_secondary = 'green'\n self.clr_tertiary = 'cyan'\n self.clr_quaternary = 'yellow'\n self.clr_bold = 'cyan'\n self.clr_code = 'cyan'\n self.clr_error = 'red'\n self.clr_header = 'yellow'\n self.clr_link = 'green'\n self.clr_list = 'cyan'\n self.clr_message = None\n self.clr_num_comments = 'green'\n self.clr_num_points = 'green'\n self.clr_tag = 'cyan'\n self.clr_time = 'yellow'\n self.clr_title = None\n self.clr_tooltip = None\n self.clr_user = 'cyan'\n self.clr_view_link = 'magenta'\n self.clr_view_index = 'magenta'\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n\n\nclass Config(object):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n<class token>\n"
] | false |
859 |
5488b32970a0b734334835457c712768a756de7f
|
from datetime import datetime
import requests as req
import smtplib
import mysql.connector
#mysql constant
MYSQL_HOST='den1.mysql6.gear.host'
MYSQL_USER='winlabiot'
MYSQL_PW='winlabiot+123'
MYSQL_DB="winlabiot"
Coffee_mailing_list_table='coffee_mailing_list'
#keys in dict receive via socket
TIME='time'
AMBIENT_TEMP='ambient_temp'
OBJECT_TEMP='object_temp'
#preset values for derivative
TIME_INTERVAL=300
CURR_TIME_INTERVAL=0
MAX_DATA_POINTS=100
#openhab port and host
IP_ADDR='localhost'
PORT=8080
CURR_DERIVATIVE_URL='http://{ip}:{port}/rest/items/DataAnalyzer_CurrentDerivative'.format(port=PORT, ip=IP_ADDR)
DERIVATIVE_THRESHOLD_URL='http://{ip}:{port}/rest/items/DataAnalyzer_DerivativeThreshold'.format(port=PORT, ip=IP_ADDR)
CURR_TIME_INTERVAL_URL='http://{ip}:{port}/rest/items/DataAnalyzer_CurrentTimeInterval'.format(port=PORT, ip=IP_ADDR)
#constant to decide whether it is noise or not
#avoid keep sending email when derivative always > threshold
Making_Coffee=False
Not_Making_Coffee_Count=0
#gmail access
USER='[email protected]'
PASSWORD='winlabiot123'
#email info
FROM ='[email protected]'
TO=[]
CONTENT='Coffee will be served soon!'
def update_To_email_addr():
#global cursor
global TO
#connect to GearHost mysql database
GearHostMySQL = mysql.connector.connect(
host=MYSQL_HOST,
user=MYSQL_USER,
passwd=MYSQL_PW,
database=MYSQL_DB
)
cursor = GearHostMySQL.cursor()
cursor.execute("SELECT email FROM coffee_mailing_list;")
TO=cursor.fetchall()
cursor.close()
GearHostMySQL.close()
def send_email(user, password, from_addr, to_addr, content):
server = smtplib.SMTP('smtp.gmail.com', 587)
server.ehlo()
server.starttls()
response=server.login(user,password)
print str(datetime.now())+' Server Response: '+str(response)
for address in to_addr:
server.sendmail(from_addr,address,content)
print str(datetime.now())+' Email Sent to '+str(address)
class analyzer:
#data type is AMBIENT_TEMP or OBJECT_TEMP
#derivative_threshold is degree/sec
def __init__(self,data_type,derivative_threshold, max_data_points=100,time_interval=300):
#data is array of dict data points
self.data=[]
#start index is the earliest data point
self.start_index=0
self.derivative=0
self.time_interval=time_interval
self.curr_time_interval=0
self.key=data_type
self.max_data_points=max_data_points
self.derivative_threshold=derivative_threshold
def process(self,newPoint, url,update_monitor=True, email=True):
global Making_Coffee
global Not_Making_Coffee_Count
self.add_data_point(newPoint)
self.update_derivative()
#update monitor
if update_monitor:
reponse=req.post(CURR_DERIVATIVE_URL, data=str(self.derivative))
reponse=req.post(DERIVATIVE_THRESHOLD_URL, data=str(self.derivative_threshold))
reponse=req.post(CURR_TIME_INTERVAL_URL, data=str(self.curr_time_interval))
#rv is whether making coffee regardless of noise
rv=False
if(self.derivative>self.derivative_threshold):
if update_monitor:
reponse=req.post(url, data='Making Coffee')
if(Making_Coffee==False and Not_Making_Coffee_Count>10 and email):
#update target email info
update_To_email_addr()
send_email(USER,PASSWORD,FROM,TO,CONTENT)
rv=True
#update constant
Making_Coffee=True
Not_Making_Coffee_Count=0
else:
if update_monitor:
reponse=req.post(url, data='Not Ready')
#update constant
Making_Coffee=False
Not_Making_Coffee_Count+=1
rv= False
return rv
#data --> dict
def add_data_point(self,newPoint):
newPoint[TIME]=self.str2datetime(newPoint[TIME])
self.data.append(newPoint)
self.curr_time_interval=(self.data[len(self.data)-1][TIME]-self.data[self.start_index][TIME]).total_seconds()
#clear expired date if max data points is reached
if(len(self.data)>self.max_data_points):
del self.data[0:self.start_index]
self.start_index=0
'''
if (len(self.data)==5):
#replace expired data point
self.data[self.start_index]=newPoint
#update start index
if self.start_index==4:
self.start_index=0
else:
self.start_index+=1
else:
self.data.append(newPoint)
'''
def str2datetime(self, datetime_string):
return datetime.strptime(datetime_string, '%Y-%m-%d %H:%M:%S.%f')
def update_derivative(self):
if self.curr_time_interval<self.time_interval:
return
else:
self.derivative=1.0*(self.data[len(self.data)-1][self.key]-self.data[self.start_index][self.key])/self.curr_time_interval
#update start_index
self.start_index+=1
#update curr_time_interval
self.curr_time_interval=(self.data[len(self.data)-1][TIME]-self.data[self.start_index][TIME]).total_seconds()
|
[
"from datetime import datetime\nimport requests as req\n\nimport smtplib\n\nimport mysql.connector\n\n#mysql constant\nMYSQL_HOST='den1.mysql6.gear.host'\nMYSQL_USER='winlabiot'\nMYSQL_PW='winlabiot+123'\nMYSQL_DB=\"winlabiot\"\nCoffee_mailing_list_table='coffee_mailing_list'\n\n\n#keys in dict receive via socket\nTIME='time'\nAMBIENT_TEMP='ambient_temp'\nOBJECT_TEMP='object_temp'\n\n#preset values for derivative\nTIME_INTERVAL=300\nCURR_TIME_INTERVAL=0\nMAX_DATA_POINTS=100\n\n#openhab port and host\nIP_ADDR='localhost'\nPORT=8080\n\nCURR_DERIVATIVE_URL='http://{ip}:{port}/rest/items/DataAnalyzer_CurrentDerivative'.format(port=PORT, ip=IP_ADDR)\nDERIVATIVE_THRESHOLD_URL='http://{ip}:{port}/rest/items/DataAnalyzer_DerivativeThreshold'.format(port=PORT, ip=IP_ADDR)\nCURR_TIME_INTERVAL_URL='http://{ip}:{port}/rest/items/DataAnalyzer_CurrentTimeInterval'.format(port=PORT, ip=IP_ADDR)\n\n#constant to decide whether it is noise or not\n#avoid keep sending email when derivative always > threshold\nMaking_Coffee=False\nNot_Making_Coffee_Count=0\n\n#gmail access\nUSER='[email protected]'\nPASSWORD='winlabiot123'\n\n#email info\nFROM ='[email protected]'\nTO=[]\n\nCONTENT='Coffee will be served soon!'\n\ndef update_To_email_addr():\n\t#global cursor\n\tglobal TO\n\n\t#connect to GearHost mysql database\n\tGearHostMySQL = mysql.connector.connect(\n\t host=MYSQL_HOST,\n\t user=MYSQL_USER,\n\t passwd=MYSQL_PW,\n\t database=MYSQL_DB\n\t)\n\tcursor = GearHostMySQL.cursor()\n\n\tcursor.execute(\"SELECT email FROM coffee_mailing_list;\")\n\tTO=cursor.fetchall()\n\tcursor.close()\n\tGearHostMySQL.close()\n\n\n\n\n\ndef send_email(user, password, from_addr, to_addr, content):\n\tserver = smtplib.SMTP('smtp.gmail.com', 587)\n\tserver.ehlo()\n\tserver.starttls()\n\n\tresponse=server.login(user,password)\n\tprint str(datetime.now())+'\t\t\tServer Response: '+str(response)\n\tfor address in to_addr:\n\t\tserver.sendmail(from_addr,address,content)\n\t\tprint str(datetime.now())+'\t\t\tEmail Sent to '+str(address)\n\n\n\nclass analyzer:\n\n\t#data type is AMBIENT_TEMP or OBJECT_TEMP\n\t#derivative_threshold is degree/sec\n\tdef __init__(self,data_type,derivative_threshold, max_data_points=100,time_interval=300):\n\t\t#data is array of dict data points\n\t\tself.data=[]\n\t\t#start index is the earliest data point\n\t\tself.start_index=0\n\t\tself.derivative=0\n\t\tself.time_interval=time_interval\n\t\tself.curr_time_interval=0\n\t\tself.key=data_type\n\t\tself.max_data_points=max_data_points\n\t\tself.derivative_threshold=derivative_threshold\n\n\tdef process(self,newPoint, url,update_monitor=True, email=True):\n\n\t\tglobal Making_Coffee\n\t\tglobal Not_Making_Coffee_Count\n\n\t\tself.add_data_point(newPoint)\n\t\tself.update_derivative()\n\n\t\t#update monitor \n\t\tif update_monitor:\n\t\t\treponse=req.post(CURR_DERIVATIVE_URL, data=str(self.derivative))\n\t\t\treponse=req.post(DERIVATIVE_THRESHOLD_URL, data=str(self.derivative_threshold))\n\t\t\treponse=req.post(CURR_TIME_INTERVAL_URL, data=str(self.curr_time_interval))\n\n\t\t#rv is whether making coffee regardless of noise\n\t\trv=False\n\n\n\t\tif(self.derivative>self.derivative_threshold):\n\t\t\tif update_monitor:\n\t\t\t\treponse=req.post(url, data='Making Coffee')\n\t\t\tif(Making_Coffee==False and Not_Making_Coffee_Count>10 and email):\n\t\t\t\t#update target email info\n\t\t\t\tupdate_To_email_addr()\n\t\t\t\tsend_email(USER,PASSWORD,FROM,TO,CONTENT)\n\t\t\t\trv=True\n\t\t\t#update constant\n\t\t\tMaking_Coffee=True \n\t\t\tNot_Making_Coffee_Count=0\n\n\t\telse:\n\t\t\tif update_monitor:\n\t\t\t\treponse=req.post(url, data='Not Ready')\t\n\n\t\t\t#update constant\n\t\t\tMaking_Coffee=False\n\t\t\tNot_Making_Coffee_Count+=1\n\n\t\t\trv= False\n\n\t\treturn rv\n\n\n\n\n\t#data --> dict\n\tdef add_data_point(self,newPoint):\n\n\t\tnewPoint[TIME]=self.str2datetime(newPoint[TIME])\n\t\tself.data.append(newPoint)\n\t\tself.curr_time_interval=(self.data[len(self.data)-1][TIME]-self.data[self.start_index][TIME]).total_seconds()\n\n\t\t#clear expired date if max data points is reached\n\t\tif(len(self.data)>self.max_data_points):\n\t\t\tdel self.data[0:self.start_index]\n\t\t\tself.start_index=0\n\n\n\n\t\t'''\t\t\n\t\tif (len(self.data)==5):\n\t\t\t#replace expired data point\n\t\t\tself.data[self.start_index]=newPoint\n\t\t\t#update start index\n\t\t\tif self.start_index==4:\n\t\t\t\tself.start_index=0\n\t\t\telse:\n\t\t\t\tself.start_index+=1\n\t\telse:\n\t\t\tself.data.append(newPoint)\n\t\t'''\n\n\tdef str2datetime(self, datetime_string):\n\t\treturn datetime.strptime(datetime_string, '%Y-%m-%d %H:%M:%S.%f')\n\n\n\tdef update_derivative(self):\n\t\tif self.curr_time_interval<self.time_interval:\n\t\t\treturn\n\t\telse:\n\t\t\tself.derivative=1.0*(self.data[len(self.data)-1][self.key]-self.data[self.start_index][self.key])/self.curr_time_interval\n\t\t\t#update start_index\n\t\t\tself.start_index+=1\n\n\t\t\t#update curr_time_interval\n\t\t\tself.curr_time_interval=(self.data[len(self.data)-1][TIME]-self.data[self.start_index][TIME]).total_seconds()\n\n\n\n\n\n\n\n\n\n"
] | true |
860 |
b039ed74e62f3a74e8506d4e14a3422499046c06
|
"""
Module for generic standard analysis plots.
"""
import numpy as np
import matplotlib.pyplot as plt
import cartopy as cart
import xarray as xr
import ecco_v4_py as ecco
def global_and_stereo_map(lat, lon, fld,
plot_type='pcolormesh',
cmap='YlOrRd',
title=None,
cmin=None,
cmax=None,
dpi=100,
show_colorbar=True):
"""Generate the Robinson and Arctic/Antarctic plot.
Parameters
----------
lat : xarray.DataArray
lon : xarray.DataArray
fld : xarray.DataArray
plot_type : string, optional
plot type to use, 'pcolormesh', or 'contourf'
cmap : string or colormap object (TBD)
cmin : double, optional
minimum value for colorbar
cmax : double, optional
maximum value for colorbar
dpi : int, optiopnal
plot resolution in dots (pixels) per inch
title,show_colorbar
figsize?
Output
------
"""
# to do
# -figsize option?
# -cmin/cmax defaults handling with plot_proj ...
# -colorbar defaults with diverging/sequential
# -number of colors in plot
# -suppress dask warnings
# -get the subplot size "just right" no matter the figsize
# -arrows for when colorbar is exceeded
# handle colorbar limits
cmin, cmax, extend_cbar = set_colorbar_limits(fld,cmin,cmax)
# default figsize which seems to work for a laptop screen
plt.figure(figsize=(12,6),dpi=dpi)
# the big top global plot
fig, ax1, p1, cb1 = ecco.plot_proj_to_latlon_grid(
lat,lon,fld,
cmap=cmap,
plot_type=plot_type,
subplot_grid=[2,1,1],
projection_type='robin',
show_colorbar=False,
cmin=cmin,
cmax=cmax,
user_lon_0=0
)
# Arctic: bottom left
fig, ax2, p2, cb2 = ecco.plot_proj_to_latlon_grid(
lat,lon,fld,
cmap=cmap,
plot_type=plot_type,
subplot_grid=[2,2,3],
projection_type='stereo',
show_colorbar=False,
cmin=cmin,
cmax=cmax,
lat_lim=50,
user_lon_0=0
)
# ACC: bottom right
fig, ax3, p3, cb3 = ecco.plot_proj_to_latlon_grid(
lat,lon,fld,
cmap=cmap,
plot_type=plot_type,
subplot_grid=[2,2,4],
projection_type='stereo',
show_colorbar=False,
cmin=cmin,
cmax=cmax,
lat_lim=-40,
user_lon_0=180
)
# Set land color to gray
ax1.add_feature(cart.feature.LAND,facecolor='0.7',zorder=2)
ax2.add_feature(cart.feature.LAND,facecolor='0.7',zorder=2)
ax3.add_feature(cart.feature.LAND,facecolor='0.7',zorder=2)
# Make a single title
if title is not None:
fig.suptitle(title,verticalalignment='top',fontsize=24)
# Make an overyling colorbar
if show_colorbar:
fig.subplots_adjust(right=0.9)
cbar_ax = fig.add_axes([0.87, 0.1, 0.025, 0.8])
fig.colorbar(p3, cax=cbar_ax, extend=extend_cbar)
return fig, (ax1,ax2,ax3)
def plot_depth_slice(x, depth, fld,
stretch_depth=-500,
plot_type='pcolormesh',
cmap='YlOrRd',
title=None,
cmin=None,
cmax=None,
dpi=100,
show_colorbar=True):
"""2D plot of depth vs some other variable, stretching first 500m of depth.
Parameters
----------
depth : xarray DataArray or numpy array
depth variable
x : xarray DataArray or numpy array
variable for x-axis. Likely to be time, latitude, or longitude
fld : xarray DataArray or numpy array
2D field with depth + 1 dim
stretch_depth : scalar (int or float), optional
stretch top depth to this limit
"""
# Ensure negative values
#if (depth>0).any():
# depth = -depth
#if stretch_depth > 0:
# stretch_depth = -stretch_depth
# Handle shape
if len(x) == fld.shape[0]:
fld = fld.transpose()
# handle colorbar limits
cmin, cmax, extend_cbar = set_colorbar_limits(fld,cmin,cmax)
# default figsize which seems to work for a laptop screen
fig = plt.figure(figsize=(12,6),dpi=dpi)
# Could also use plt.subplots here ...
# First top 500m
ax1 = plt.subplot(2,1,1)
if plot_type == 'pcolormesh':
p1 = ax1.pcolormesh(x,depth,fld,vmin=cmin,vmax=cmax,cmap=cmap)
elif plot_type == 'contourf':
p1 = ax1.contourf(x,depth,fld,vmin=cmin,vmax=cmax,cmap=cmap)
# Handle y-axis
plt.ylim([stretch_depth, 0])
ax1.yaxis.axes.set_yticks(np.arange(stretch_depth,1,100))
plt.ylabel('Depth [%s]' % depth.attrs['units'])
# Remove top plot xtick label
ax1.xaxis.axes.set_xticklabels([])
# Now the rest ...
ax2 = plt.subplot(2,1,2)
if plot_type == 'pcolormesh':
p2 = ax2.pcolormesh(x,depth,fld,vmin=cmin,vmax=cmax,cmap=cmap)
elif plot_type == 'contourf':
p2 = ax2.contourf(x,depth,fld,vmin=cmin,vmax=cmax,cmap=cmap)
# Handle y-axis
plt.ylim([depth.min(), stretch_depth])
yticks = np.flip(np.arange(2*stretch_depth,depth.min(),-1000))
ax2.yaxis.axes.set_yticks(yticks)
plt.ylabel('Depth [%s]' % depth.attrs['units'])
# Reduce space between subplots
fig.subplots_adjust(hspace=0.05)
# Make a single title
if title is not None:
fig.suptitle(title,verticalalignment='top',fontsize=24)
# Make an overyling colorbar
if show_colorbar:
fig.subplots_adjust(right=0.83)
cbar_ax = fig.add_axes([0.87, 0.1, 0.025, 0.8])
fig.colorbar(p2, cax=cbar_ax, extend=extend_cbar)
plt.show()
return fig,ax1,ax2
def set_colorbar_limits(fld,cmin,cmax):
"""If unset, compute colorbar limits based on field max/min values, sequential/divergent
Determine if colorbar needs to be extended
Parameters
----------
fld : xarray.DataArray
2D field to be plotted
Output
------
cmin : double
colorbar min value
cmax : double
colorbar max value
extend_cbar : string
flag to colorbar extension
"""
# handle input
if (cmin is None) and (cmax is not None):
raise RuntimeError('Only cmax given, must provide both cmin and cmax')
elif (cmin is not None) and (cmax is None):
raise RuntimeError('Only cmin given, must provide both cmin and cmax')
else:
# handle colorbar limits accidentally passed as with xarray functions
if type(cmin) is xr.DataArray:
cmin = cmin.values()
elif cmin is not None:
raise TypeError('Unsure of cmin type: ',type(cmin))
if type(cmax) is xr.DataArray:
cmax = cmax.values()
elif cmax is not None:
raise TypeError('Unsure of cmax type: ',type(cmax))
# compute fld limits
fld_min = fld.min(skipna=True).values
fld_max = fld.max(skipna=True).values
# if cmin/cmax not set, compute
if (cmin is None) and (cmax is None):
cmin = fld_min
cmax = fld_max
# determine if divergent colorbar
# Note: Not making divergent colorbar for temperature
# in degC because still sequential even though +/-
if (fld_max*fld_min < 0) and (fld.name is not 'THETA'):
cmax = np.nanmax(np.abs(fld.values))
cmin = -cmax
# determine if colorbar needs to be extended
if (cmin > fld_min) and (cmax < fld_max):
extend_cbar = "both"
elif cmin > fld_min:
extend_cbar = "min"
elif cmax < fld_max:
extend_cbar = "max"
else:
extend_cbar = "neither"
return cmin, cmax, extend_cbar
|
[
"\"\"\"\nModule for generic standard analysis plots.\n\"\"\"\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport cartopy as cart\nimport xarray as xr\nimport ecco_v4_py as ecco\n\n\ndef global_and_stereo_map(lat, lon, fld,\n plot_type='pcolormesh',\n cmap='YlOrRd',\n title=None,\n cmin=None,\n cmax=None,\n dpi=100,\n show_colorbar=True):\n\n \"\"\"Generate the Robinson and Arctic/Antarctic plot.\n\n Parameters\n ----------\n lat : xarray.DataArray \n\n lon : xarray.DataArray\n\n fld : xarray.DataArray\n\n plot_type : string, optional\n plot type to use, 'pcolormesh', or 'contourf'\n\n cmap : string or colormap object (TBD)\n\n cmin : double, optional\n minimum value for colorbar\n\n cmax : double, optional\n maximum value for colorbar\n\n dpi : int, optiopnal\n plot resolution in dots (pixels) per inch\n\n title,show_colorbar\n \n figsize?\n\n Output\n ------\n\n \"\"\"\n\n # to do\n # -figsize option?\n # -cmin/cmax defaults handling with plot_proj ... \n # -colorbar defaults with diverging/sequential\n # -number of colors in plot\n # -suppress dask warnings\n # -get the subplot size \"just right\" no matter the figsize\n # -arrows for when colorbar is exceeded\n\n # handle colorbar limits\n cmin, cmax, extend_cbar = set_colorbar_limits(fld,cmin,cmax)\n\n # default figsize which seems to work for a laptop screen\n plt.figure(figsize=(12,6),dpi=dpi)\n\n # the big top global plot\n fig, ax1, p1, cb1 = ecco.plot_proj_to_latlon_grid(\n lat,lon,fld,\n cmap=cmap,\n plot_type=plot_type,\n subplot_grid=[2,1,1],\n projection_type='robin',\n show_colorbar=False,\n cmin=cmin,\n cmax=cmax,\n user_lon_0=0\n )\n\n # Arctic: bottom left\n fig, ax2, p2, cb2 = ecco.plot_proj_to_latlon_grid(\n lat,lon,fld,\n cmap=cmap,\n plot_type=plot_type,\n subplot_grid=[2,2,3],\n projection_type='stereo',\n show_colorbar=False,\n cmin=cmin,\n cmax=cmax,\n lat_lim=50,\n user_lon_0=0\n )\n\n\n # ACC: bottom right\n fig, ax3, p3, cb3 = ecco.plot_proj_to_latlon_grid(\n lat,lon,fld,\n cmap=cmap,\n plot_type=plot_type,\n subplot_grid=[2,2,4],\n projection_type='stereo',\n show_colorbar=False,\n cmin=cmin,\n cmax=cmax,\n lat_lim=-40,\n user_lon_0=180\n )\n\n # Set land color to gray\n ax1.add_feature(cart.feature.LAND,facecolor='0.7',zorder=2)\n ax2.add_feature(cart.feature.LAND,facecolor='0.7',zorder=2)\n ax3.add_feature(cart.feature.LAND,facecolor='0.7',zorder=2)\n\n # Make a single title\n if title is not None:\n fig.suptitle(title,verticalalignment='top',fontsize=24)\n\n # Make an overyling colorbar\n if show_colorbar:\n fig.subplots_adjust(right=0.9)\n cbar_ax = fig.add_axes([0.87, 0.1, 0.025, 0.8])\n fig.colorbar(p3, cax=cbar_ax, extend=extend_cbar)\n\n\n\n return fig, (ax1,ax2,ax3)\n\ndef plot_depth_slice(x, depth, fld, \n stretch_depth=-500,\n plot_type='pcolormesh',\n cmap='YlOrRd',\n title=None,\n cmin=None,\n cmax=None,\n dpi=100,\n show_colorbar=True):\n \"\"\"2D plot of depth vs some other variable, stretching first 500m of depth.\n\n Parameters\n ----------\n depth : xarray DataArray or numpy array\n depth variable\n x : xarray DataArray or numpy array\n variable for x-axis. Likely to be time, latitude, or longitude\n fld : xarray DataArray or numpy array\n 2D field with depth + 1 dim\n stretch_depth : scalar (int or float), optional\n stretch top depth to this limit\n \"\"\"\n\n # Ensure negative values \n #if (depth>0).any():\n # depth = -depth\n\n #if stretch_depth > 0:\n # stretch_depth = -stretch_depth\n\n # Handle shape\n if len(x) == fld.shape[0]:\n fld = fld.transpose()\n\n # handle colorbar limits\n cmin, cmax, extend_cbar = set_colorbar_limits(fld,cmin,cmax)\n\n # default figsize which seems to work for a laptop screen\n fig = plt.figure(figsize=(12,6),dpi=dpi)\n\n # Could also use plt.subplots here ...\n\n # First top 500m\n ax1 = plt.subplot(2,1,1)\n if plot_type == 'pcolormesh':\n p1 = ax1.pcolormesh(x,depth,fld,vmin=cmin,vmax=cmax,cmap=cmap)\n\n elif plot_type == 'contourf':\n p1 = ax1.contourf(x,depth,fld,vmin=cmin,vmax=cmax,cmap=cmap)\n\n # Handle y-axis\n plt.ylim([stretch_depth, 0])\n ax1.yaxis.axes.set_yticks(np.arange(stretch_depth,1,100))\n plt.ylabel('Depth [%s]' % depth.attrs['units'])\n\n\n # Remove top plot xtick label\n ax1.xaxis.axes.set_xticklabels([])\n\n # Now the rest ...\n ax2 = plt.subplot(2,1,2)\n if plot_type == 'pcolormesh':\n p2 = ax2.pcolormesh(x,depth,fld,vmin=cmin,vmax=cmax,cmap=cmap)\n\n elif plot_type == 'contourf':\n p2 = ax2.contourf(x,depth,fld,vmin=cmin,vmax=cmax,cmap=cmap)\n\n # Handle y-axis\n plt.ylim([depth.min(), stretch_depth])\n yticks = np.flip(np.arange(2*stretch_depth,depth.min(),-1000))\n ax2.yaxis.axes.set_yticks(yticks)\n plt.ylabel('Depth [%s]' % depth.attrs['units'])\n\n # Reduce space between subplots\n fig.subplots_adjust(hspace=0.05)\n\n # Make a single title\n if title is not None:\n fig.suptitle(title,verticalalignment='top',fontsize=24)\n\n # Make an overyling colorbar\n if show_colorbar:\n fig.subplots_adjust(right=0.83)\n cbar_ax = fig.add_axes([0.87, 0.1, 0.025, 0.8])\n fig.colorbar(p2, cax=cbar_ax, extend=extend_cbar)\n\n plt.show()\n\n return fig,ax1,ax2\n\n\ndef set_colorbar_limits(fld,cmin,cmax):\n \"\"\"If unset, compute colorbar limits based on field max/min values, sequential/divergent\n Determine if colorbar needs to be extended\n\n Parameters\n ----------\n fld : xarray.DataArray\n 2D field to be plotted\n\n Output\n ------\n cmin : double \n colorbar min value\n cmax : double \n colorbar max value\n extend_cbar : string \n flag to colorbar extension\n\n \"\"\"\n\n # handle input\n if (cmin is None) and (cmax is not None):\n raise RuntimeError('Only cmax given, must provide both cmin and cmax')\n elif (cmin is not None) and (cmax is None):\n raise RuntimeError('Only cmin given, must provide both cmin and cmax')\n else:\n # handle colorbar limits accidentally passed as with xarray functions\n if type(cmin) is xr.DataArray:\n cmin = cmin.values()\n elif cmin is not None:\n raise TypeError('Unsure of cmin type: ',type(cmin))\n if type(cmax) is xr.DataArray:\n cmax = cmax.values()\n elif cmax is not None:\n raise TypeError('Unsure of cmax type: ',type(cmax))\n\n # compute fld limits\n fld_min = fld.min(skipna=True).values\n fld_max = fld.max(skipna=True).values\n\n # if cmin/cmax not set, compute\n if (cmin is None) and (cmax is None):\n\n cmin = fld_min\n cmax = fld_max\n\n # determine if divergent colorbar \n # Note: Not making divergent colorbar for temperature\n # in degC because still sequential even though +/-\n if (fld_max*fld_min < 0) and (fld.name is not 'THETA'):\n cmax = np.nanmax(np.abs(fld.values))\n cmin = -cmax\n\n # determine if colorbar needs to be extended\n if (cmin > fld_min) and (cmax < fld_max):\n extend_cbar = \"both\"\n elif cmin > fld_min:\n extend_cbar = \"min\"\n elif cmax < fld_max:\n extend_cbar = \"max\"\n else:\n extend_cbar = \"neither\"\n\n return cmin, cmax, extend_cbar\n",
"<docstring token>\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport cartopy as cart\nimport xarray as xr\nimport ecco_v4_py as ecco\n\n\ndef global_and_stereo_map(lat, lon, fld, plot_type='pcolormesh', cmap=\n 'YlOrRd', title=None, cmin=None, cmax=None, dpi=100, show_colorbar=True):\n \"\"\"Generate the Robinson and Arctic/Antarctic plot.\n\n Parameters\n ----------\n lat : xarray.DataArray \n\n lon : xarray.DataArray\n\n fld : xarray.DataArray\n\n plot_type : string, optional\n plot type to use, 'pcolormesh', or 'contourf'\n\n cmap : string or colormap object (TBD)\n\n cmin : double, optional\n minimum value for colorbar\n\n cmax : double, optional\n maximum value for colorbar\n\n dpi : int, optiopnal\n plot resolution in dots (pixels) per inch\n\n title,show_colorbar\n \n figsize?\n\n Output\n ------\n\n \"\"\"\n cmin, cmax, extend_cbar = set_colorbar_limits(fld, cmin, cmax)\n plt.figure(figsize=(12, 6), dpi=dpi)\n fig, ax1, p1, cb1 = ecco.plot_proj_to_latlon_grid(lat, lon, fld, cmap=\n cmap, plot_type=plot_type, subplot_grid=[2, 1, 1], projection_type=\n 'robin', show_colorbar=False, cmin=cmin, cmax=cmax, user_lon_0=0)\n fig, ax2, p2, cb2 = ecco.plot_proj_to_latlon_grid(lat, lon, fld, cmap=\n cmap, plot_type=plot_type, subplot_grid=[2, 2, 3], projection_type=\n 'stereo', show_colorbar=False, cmin=cmin, cmax=cmax, lat_lim=50,\n user_lon_0=0)\n fig, ax3, p3, cb3 = ecco.plot_proj_to_latlon_grid(lat, lon, fld, cmap=\n cmap, plot_type=plot_type, subplot_grid=[2, 2, 4], projection_type=\n 'stereo', show_colorbar=False, cmin=cmin, cmax=cmax, lat_lim=-40,\n user_lon_0=180)\n ax1.add_feature(cart.feature.LAND, facecolor='0.7', zorder=2)\n ax2.add_feature(cart.feature.LAND, facecolor='0.7', zorder=2)\n ax3.add_feature(cart.feature.LAND, facecolor='0.7', zorder=2)\n if title is not None:\n fig.suptitle(title, verticalalignment='top', fontsize=24)\n if show_colorbar:\n fig.subplots_adjust(right=0.9)\n cbar_ax = fig.add_axes([0.87, 0.1, 0.025, 0.8])\n fig.colorbar(p3, cax=cbar_ax, extend=extend_cbar)\n return fig, (ax1, ax2, ax3)\n\n\ndef plot_depth_slice(x, depth, fld, stretch_depth=-500, plot_type=\n 'pcolormesh', cmap='YlOrRd', title=None, cmin=None, cmax=None, dpi=100,\n show_colorbar=True):\n \"\"\"2D plot of depth vs some other variable, stretching first 500m of depth.\n\n Parameters\n ----------\n depth : xarray DataArray or numpy array\n depth variable\n x : xarray DataArray or numpy array\n variable for x-axis. Likely to be time, latitude, or longitude\n fld : xarray DataArray or numpy array\n 2D field with depth + 1 dim\n stretch_depth : scalar (int or float), optional\n stretch top depth to this limit\n \"\"\"\n if len(x) == fld.shape[0]:\n fld = fld.transpose()\n cmin, cmax, extend_cbar = set_colorbar_limits(fld, cmin, cmax)\n fig = plt.figure(figsize=(12, 6), dpi=dpi)\n ax1 = plt.subplot(2, 1, 1)\n if plot_type == 'pcolormesh':\n p1 = ax1.pcolormesh(x, depth, fld, vmin=cmin, vmax=cmax, cmap=cmap)\n elif plot_type == 'contourf':\n p1 = ax1.contourf(x, depth, fld, vmin=cmin, vmax=cmax, cmap=cmap)\n plt.ylim([stretch_depth, 0])\n ax1.yaxis.axes.set_yticks(np.arange(stretch_depth, 1, 100))\n plt.ylabel('Depth [%s]' % depth.attrs['units'])\n ax1.xaxis.axes.set_xticklabels([])\n ax2 = plt.subplot(2, 1, 2)\n if plot_type == 'pcolormesh':\n p2 = ax2.pcolormesh(x, depth, fld, vmin=cmin, vmax=cmax, cmap=cmap)\n elif plot_type == 'contourf':\n p2 = ax2.contourf(x, depth, fld, vmin=cmin, vmax=cmax, cmap=cmap)\n plt.ylim([depth.min(), stretch_depth])\n yticks = np.flip(np.arange(2 * stretch_depth, depth.min(), -1000))\n ax2.yaxis.axes.set_yticks(yticks)\n plt.ylabel('Depth [%s]' % depth.attrs['units'])\n fig.subplots_adjust(hspace=0.05)\n if title is not None:\n fig.suptitle(title, verticalalignment='top', fontsize=24)\n if show_colorbar:\n fig.subplots_adjust(right=0.83)\n cbar_ax = fig.add_axes([0.87, 0.1, 0.025, 0.8])\n fig.colorbar(p2, cax=cbar_ax, extend=extend_cbar)\n plt.show()\n return fig, ax1, ax2\n\n\ndef set_colorbar_limits(fld, cmin, cmax):\n \"\"\"If unset, compute colorbar limits based on field max/min values, sequential/divergent\n Determine if colorbar needs to be extended\n\n Parameters\n ----------\n fld : xarray.DataArray\n 2D field to be plotted\n\n Output\n ------\n cmin : double \n colorbar min value\n cmax : double \n colorbar max value\n extend_cbar : string \n flag to colorbar extension\n\n \"\"\"\n if cmin is None and cmax is not None:\n raise RuntimeError('Only cmax given, must provide both cmin and cmax')\n elif cmin is not None and cmax is None:\n raise RuntimeError('Only cmin given, must provide both cmin and cmax')\n else:\n if type(cmin) is xr.DataArray:\n cmin = cmin.values()\n elif cmin is not None:\n raise TypeError('Unsure of cmin type: ', type(cmin))\n if type(cmax) is xr.DataArray:\n cmax = cmax.values()\n elif cmax is not None:\n raise TypeError('Unsure of cmax type: ', type(cmax))\n fld_min = fld.min(skipna=True).values\n fld_max = fld.max(skipna=True).values\n if cmin is None and cmax is None:\n cmin = fld_min\n cmax = fld_max\n if fld_max * fld_min < 0 and fld.name is not 'THETA':\n cmax = np.nanmax(np.abs(fld.values))\n cmin = -cmax\n if cmin > fld_min and cmax < fld_max:\n extend_cbar = 'both'\n elif cmin > fld_min:\n extend_cbar = 'min'\n elif cmax < fld_max:\n extend_cbar = 'max'\n else:\n extend_cbar = 'neither'\n return cmin, cmax, extend_cbar\n",
"<docstring token>\n<import token>\n\n\ndef global_and_stereo_map(lat, lon, fld, plot_type='pcolormesh', cmap=\n 'YlOrRd', title=None, cmin=None, cmax=None, dpi=100, show_colorbar=True):\n \"\"\"Generate the Robinson and Arctic/Antarctic plot.\n\n Parameters\n ----------\n lat : xarray.DataArray \n\n lon : xarray.DataArray\n\n fld : xarray.DataArray\n\n plot_type : string, optional\n plot type to use, 'pcolormesh', or 'contourf'\n\n cmap : string or colormap object (TBD)\n\n cmin : double, optional\n minimum value for colorbar\n\n cmax : double, optional\n maximum value for colorbar\n\n dpi : int, optiopnal\n plot resolution in dots (pixels) per inch\n\n title,show_colorbar\n \n figsize?\n\n Output\n ------\n\n \"\"\"\n cmin, cmax, extend_cbar = set_colorbar_limits(fld, cmin, cmax)\n plt.figure(figsize=(12, 6), dpi=dpi)\n fig, ax1, p1, cb1 = ecco.plot_proj_to_latlon_grid(lat, lon, fld, cmap=\n cmap, plot_type=plot_type, subplot_grid=[2, 1, 1], projection_type=\n 'robin', show_colorbar=False, cmin=cmin, cmax=cmax, user_lon_0=0)\n fig, ax2, p2, cb2 = ecco.plot_proj_to_latlon_grid(lat, lon, fld, cmap=\n cmap, plot_type=plot_type, subplot_grid=[2, 2, 3], projection_type=\n 'stereo', show_colorbar=False, cmin=cmin, cmax=cmax, lat_lim=50,\n user_lon_0=0)\n fig, ax3, p3, cb3 = ecco.plot_proj_to_latlon_grid(lat, lon, fld, cmap=\n cmap, plot_type=plot_type, subplot_grid=[2, 2, 4], projection_type=\n 'stereo', show_colorbar=False, cmin=cmin, cmax=cmax, lat_lim=-40,\n user_lon_0=180)\n ax1.add_feature(cart.feature.LAND, facecolor='0.7', zorder=2)\n ax2.add_feature(cart.feature.LAND, facecolor='0.7', zorder=2)\n ax3.add_feature(cart.feature.LAND, facecolor='0.7', zorder=2)\n if title is not None:\n fig.suptitle(title, verticalalignment='top', fontsize=24)\n if show_colorbar:\n fig.subplots_adjust(right=0.9)\n cbar_ax = fig.add_axes([0.87, 0.1, 0.025, 0.8])\n fig.colorbar(p3, cax=cbar_ax, extend=extend_cbar)\n return fig, (ax1, ax2, ax3)\n\n\ndef plot_depth_slice(x, depth, fld, stretch_depth=-500, plot_type=\n 'pcolormesh', cmap='YlOrRd', title=None, cmin=None, cmax=None, dpi=100,\n show_colorbar=True):\n \"\"\"2D plot of depth vs some other variable, stretching first 500m of depth.\n\n Parameters\n ----------\n depth : xarray DataArray or numpy array\n depth variable\n x : xarray DataArray or numpy array\n variable for x-axis. Likely to be time, latitude, or longitude\n fld : xarray DataArray or numpy array\n 2D field with depth + 1 dim\n stretch_depth : scalar (int or float), optional\n stretch top depth to this limit\n \"\"\"\n if len(x) == fld.shape[0]:\n fld = fld.transpose()\n cmin, cmax, extend_cbar = set_colorbar_limits(fld, cmin, cmax)\n fig = plt.figure(figsize=(12, 6), dpi=dpi)\n ax1 = plt.subplot(2, 1, 1)\n if plot_type == 'pcolormesh':\n p1 = ax1.pcolormesh(x, depth, fld, vmin=cmin, vmax=cmax, cmap=cmap)\n elif plot_type == 'contourf':\n p1 = ax1.contourf(x, depth, fld, vmin=cmin, vmax=cmax, cmap=cmap)\n plt.ylim([stretch_depth, 0])\n ax1.yaxis.axes.set_yticks(np.arange(stretch_depth, 1, 100))\n plt.ylabel('Depth [%s]' % depth.attrs['units'])\n ax1.xaxis.axes.set_xticklabels([])\n ax2 = plt.subplot(2, 1, 2)\n if plot_type == 'pcolormesh':\n p2 = ax2.pcolormesh(x, depth, fld, vmin=cmin, vmax=cmax, cmap=cmap)\n elif plot_type == 'contourf':\n p2 = ax2.contourf(x, depth, fld, vmin=cmin, vmax=cmax, cmap=cmap)\n plt.ylim([depth.min(), stretch_depth])\n yticks = np.flip(np.arange(2 * stretch_depth, depth.min(), -1000))\n ax2.yaxis.axes.set_yticks(yticks)\n plt.ylabel('Depth [%s]' % depth.attrs['units'])\n fig.subplots_adjust(hspace=0.05)\n if title is not None:\n fig.suptitle(title, verticalalignment='top', fontsize=24)\n if show_colorbar:\n fig.subplots_adjust(right=0.83)\n cbar_ax = fig.add_axes([0.87, 0.1, 0.025, 0.8])\n fig.colorbar(p2, cax=cbar_ax, extend=extend_cbar)\n plt.show()\n return fig, ax1, ax2\n\n\ndef set_colorbar_limits(fld, cmin, cmax):\n \"\"\"If unset, compute colorbar limits based on field max/min values, sequential/divergent\n Determine if colorbar needs to be extended\n\n Parameters\n ----------\n fld : xarray.DataArray\n 2D field to be plotted\n\n Output\n ------\n cmin : double \n colorbar min value\n cmax : double \n colorbar max value\n extend_cbar : string \n flag to colorbar extension\n\n \"\"\"\n if cmin is None and cmax is not None:\n raise RuntimeError('Only cmax given, must provide both cmin and cmax')\n elif cmin is not None and cmax is None:\n raise RuntimeError('Only cmin given, must provide both cmin and cmax')\n else:\n if type(cmin) is xr.DataArray:\n cmin = cmin.values()\n elif cmin is not None:\n raise TypeError('Unsure of cmin type: ', type(cmin))\n if type(cmax) is xr.DataArray:\n cmax = cmax.values()\n elif cmax is not None:\n raise TypeError('Unsure of cmax type: ', type(cmax))\n fld_min = fld.min(skipna=True).values\n fld_max = fld.max(skipna=True).values\n if cmin is None and cmax is None:\n cmin = fld_min\n cmax = fld_max\n if fld_max * fld_min < 0 and fld.name is not 'THETA':\n cmax = np.nanmax(np.abs(fld.values))\n cmin = -cmax\n if cmin > fld_min and cmax < fld_max:\n extend_cbar = 'both'\n elif cmin > fld_min:\n extend_cbar = 'min'\n elif cmax < fld_max:\n extend_cbar = 'max'\n else:\n extend_cbar = 'neither'\n return cmin, cmax, extend_cbar\n",
"<docstring token>\n<import token>\n\n\ndef global_and_stereo_map(lat, lon, fld, plot_type='pcolormesh', cmap=\n 'YlOrRd', title=None, cmin=None, cmax=None, dpi=100, show_colorbar=True):\n \"\"\"Generate the Robinson and Arctic/Antarctic plot.\n\n Parameters\n ----------\n lat : xarray.DataArray \n\n lon : xarray.DataArray\n\n fld : xarray.DataArray\n\n plot_type : string, optional\n plot type to use, 'pcolormesh', or 'contourf'\n\n cmap : string or colormap object (TBD)\n\n cmin : double, optional\n minimum value for colorbar\n\n cmax : double, optional\n maximum value for colorbar\n\n dpi : int, optiopnal\n plot resolution in dots (pixels) per inch\n\n title,show_colorbar\n \n figsize?\n\n Output\n ------\n\n \"\"\"\n cmin, cmax, extend_cbar = set_colorbar_limits(fld, cmin, cmax)\n plt.figure(figsize=(12, 6), dpi=dpi)\n fig, ax1, p1, cb1 = ecco.plot_proj_to_latlon_grid(lat, lon, fld, cmap=\n cmap, plot_type=plot_type, subplot_grid=[2, 1, 1], projection_type=\n 'robin', show_colorbar=False, cmin=cmin, cmax=cmax, user_lon_0=0)\n fig, ax2, p2, cb2 = ecco.plot_proj_to_latlon_grid(lat, lon, fld, cmap=\n cmap, plot_type=plot_type, subplot_grid=[2, 2, 3], projection_type=\n 'stereo', show_colorbar=False, cmin=cmin, cmax=cmax, lat_lim=50,\n user_lon_0=0)\n fig, ax3, p3, cb3 = ecco.plot_proj_to_latlon_grid(lat, lon, fld, cmap=\n cmap, plot_type=plot_type, subplot_grid=[2, 2, 4], projection_type=\n 'stereo', show_colorbar=False, cmin=cmin, cmax=cmax, lat_lim=-40,\n user_lon_0=180)\n ax1.add_feature(cart.feature.LAND, facecolor='0.7', zorder=2)\n ax2.add_feature(cart.feature.LAND, facecolor='0.7', zorder=2)\n ax3.add_feature(cart.feature.LAND, facecolor='0.7', zorder=2)\n if title is not None:\n fig.suptitle(title, verticalalignment='top', fontsize=24)\n if show_colorbar:\n fig.subplots_adjust(right=0.9)\n cbar_ax = fig.add_axes([0.87, 0.1, 0.025, 0.8])\n fig.colorbar(p3, cax=cbar_ax, extend=extend_cbar)\n return fig, (ax1, ax2, ax3)\n\n\n<function token>\n\n\ndef set_colorbar_limits(fld, cmin, cmax):\n \"\"\"If unset, compute colorbar limits based on field max/min values, sequential/divergent\n Determine if colorbar needs to be extended\n\n Parameters\n ----------\n fld : xarray.DataArray\n 2D field to be plotted\n\n Output\n ------\n cmin : double \n colorbar min value\n cmax : double \n colorbar max value\n extend_cbar : string \n flag to colorbar extension\n\n \"\"\"\n if cmin is None and cmax is not None:\n raise RuntimeError('Only cmax given, must provide both cmin and cmax')\n elif cmin is not None and cmax is None:\n raise RuntimeError('Only cmin given, must provide both cmin and cmax')\n else:\n if type(cmin) is xr.DataArray:\n cmin = cmin.values()\n elif cmin is not None:\n raise TypeError('Unsure of cmin type: ', type(cmin))\n if type(cmax) is xr.DataArray:\n cmax = cmax.values()\n elif cmax is not None:\n raise TypeError('Unsure of cmax type: ', type(cmax))\n fld_min = fld.min(skipna=True).values\n fld_max = fld.max(skipna=True).values\n if cmin is None and cmax is None:\n cmin = fld_min\n cmax = fld_max\n if fld_max * fld_min < 0 and fld.name is not 'THETA':\n cmax = np.nanmax(np.abs(fld.values))\n cmin = -cmax\n if cmin > fld_min and cmax < fld_max:\n extend_cbar = 'both'\n elif cmin > fld_min:\n extend_cbar = 'min'\n elif cmax < fld_max:\n extend_cbar = 'max'\n else:\n extend_cbar = 'neither'\n return cmin, cmax, extend_cbar\n",
"<docstring token>\n<import token>\n\n\ndef global_and_stereo_map(lat, lon, fld, plot_type='pcolormesh', cmap=\n 'YlOrRd', title=None, cmin=None, cmax=None, dpi=100, show_colorbar=True):\n \"\"\"Generate the Robinson and Arctic/Antarctic plot.\n\n Parameters\n ----------\n lat : xarray.DataArray \n\n lon : xarray.DataArray\n\n fld : xarray.DataArray\n\n plot_type : string, optional\n plot type to use, 'pcolormesh', or 'contourf'\n\n cmap : string or colormap object (TBD)\n\n cmin : double, optional\n minimum value for colorbar\n\n cmax : double, optional\n maximum value for colorbar\n\n dpi : int, optiopnal\n plot resolution in dots (pixels) per inch\n\n title,show_colorbar\n \n figsize?\n\n Output\n ------\n\n \"\"\"\n cmin, cmax, extend_cbar = set_colorbar_limits(fld, cmin, cmax)\n plt.figure(figsize=(12, 6), dpi=dpi)\n fig, ax1, p1, cb1 = ecco.plot_proj_to_latlon_grid(lat, lon, fld, cmap=\n cmap, plot_type=plot_type, subplot_grid=[2, 1, 1], projection_type=\n 'robin', show_colorbar=False, cmin=cmin, cmax=cmax, user_lon_0=0)\n fig, ax2, p2, cb2 = ecco.plot_proj_to_latlon_grid(lat, lon, fld, cmap=\n cmap, plot_type=plot_type, subplot_grid=[2, 2, 3], projection_type=\n 'stereo', show_colorbar=False, cmin=cmin, cmax=cmax, lat_lim=50,\n user_lon_0=0)\n fig, ax3, p3, cb3 = ecco.plot_proj_to_latlon_grid(lat, lon, fld, cmap=\n cmap, plot_type=plot_type, subplot_grid=[2, 2, 4], projection_type=\n 'stereo', show_colorbar=False, cmin=cmin, cmax=cmax, lat_lim=-40,\n user_lon_0=180)\n ax1.add_feature(cart.feature.LAND, facecolor='0.7', zorder=2)\n ax2.add_feature(cart.feature.LAND, facecolor='0.7', zorder=2)\n ax3.add_feature(cart.feature.LAND, facecolor='0.7', zorder=2)\n if title is not None:\n fig.suptitle(title, verticalalignment='top', fontsize=24)\n if show_colorbar:\n fig.subplots_adjust(right=0.9)\n cbar_ax = fig.add_axes([0.87, 0.1, 0.025, 0.8])\n fig.colorbar(p3, cax=cbar_ax, extend=extend_cbar)\n return fig, (ax1, ax2, ax3)\n\n\n<function token>\n<function token>\n",
"<docstring token>\n<import token>\n<function token>\n<function token>\n<function token>\n"
] | false |
861 |
bf6d1ddf66bc0d54320c0491e344925a5f507df7
|
import os
import sys
sys.path.insert(0, "/path/to/mm-api/python")
sys.path.insert(0, "/path/to/mm-api/distrib/python_osx")
print(sys.path)
import mmapi
from mmRemote import *
import mm;
# assumption: we are running
examples_dir = "/dir/of/models/"
part_filename1 = os.path.join( examples_dir, "model1.stl" )
part_filename2 = os.path.join( examples_dir, "model2.stl" )
# initialize connection
remote = mmRemote()
remote.connect()
cmd = mmapi.StoredCommands()
new_obj1 = mm.append_objects_from_file(remote, part_filename1);
new_obj1 = mm.append_objects_from_file(remote, part_filename2);
#done!
remote.shutdown()
|
[
"import os\nimport sys\nsys.path.insert(0, \"/path/to/mm-api/python\")\nsys.path.insert(0, \"/path/to/mm-api/distrib/python_osx\")\nprint(sys.path)\n\n\nimport mmapi\nfrom mmRemote import *\nimport mm;\n\n# assumption: we are running\nexamples_dir = \"/dir/of/models/\"\npart_filename1 = os.path.join( examples_dir, \"model1.stl\" )\npart_filename2 = os.path.join( examples_dir, \"model2.stl\" )\n\n# initialize connection\nremote = mmRemote()\nremote.connect()\n\ncmd = mmapi.StoredCommands()\n\n\nnew_obj1 = mm.append_objects_from_file(remote, part_filename1);\nnew_obj1 = mm.append_objects_from_file(remote, part_filename2);\n\n#done!\nremote.shutdown()\n",
"import os\nimport sys\nsys.path.insert(0, '/path/to/mm-api/python')\nsys.path.insert(0, '/path/to/mm-api/distrib/python_osx')\nprint(sys.path)\nimport mmapi\nfrom mmRemote import *\nimport mm\nexamples_dir = '/dir/of/models/'\npart_filename1 = os.path.join(examples_dir, 'model1.stl')\npart_filename2 = os.path.join(examples_dir, 'model2.stl')\nremote = mmRemote()\nremote.connect()\ncmd = mmapi.StoredCommands()\nnew_obj1 = mm.append_objects_from_file(remote, part_filename1)\nnew_obj1 = mm.append_objects_from_file(remote, part_filename2)\nremote.shutdown()\n",
"<import token>\nsys.path.insert(0, '/path/to/mm-api/python')\nsys.path.insert(0, '/path/to/mm-api/distrib/python_osx')\nprint(sys.path)\n<import token>\nexamples_dir = '/dir/of/models/'\npart_filename1 = os.path.join(examples_dir, 'model1.stl')\npart_filename2 = os.path.join(examples_dir, 'model2.stl')\nremote = mmRemote()\nremote.connect()\ncmd = mmapi.StoredCommands()\nnew_obj1 = mm.append_objects_from_file(remote, part_filename1)\nnew_obj1 = mm.append_objects_from_file(remote, part_filename2)\nremote.shutdown()\n",
"<import token>\nsys.path.insert(0, '/path/to/mm-api/python')\nsys.path.insert(0, '/path/to/mm-api/distrib/python_osx')\nprint(sys.path)\n<import token>\n<assignment token>\nremote.connect()\n<assignment token>\nremote.shutdown()\n",
"<import token>\n<code token>\n<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n"
] | false |
862 |
28854823b1edc7df6cf025175811c1858efd2c42
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2016-12-19 15:25
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='OpenHumansMember',
fields=[
('oh_id', models.CharField(max_length=16, primary_key=True, serialize=False, unique=True)),
('access_token', models.CharField(max_length=256)),
('refresh_token', models.CharField(max_length=256)),
('token_expires', models.DateTimeField()),
('seeq_id', models.IntegerField(null=True)),
],
),
]
|
[
"# -*- coding: utf-8 -*-\n# Generated by Django 1.10.4 on 2016-12-19 15:25\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='OpenHumansMember',\n fields=[\n ('oh_id', models.CharField(max_length=16, primary_key=True, serialize=False, unique=True)),\n ('access_token', models.CharField(max_length=256)),\n ('refresh_token', models.CharField(max_length=256)),\n ('token_expires', models.DateTimeField()),\n ('seeq_id', models.IntegerField(null=True)),\n ],\n ),\n ]\n",
"from __future__ import unicode_literals\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = []\n operations = [migrations.CreateModel(name='OpenHumansMember', fields=[(\n 'oh_id', models.CharField(max_length=16, primary_key=True,\n serialize=False, unique=True)), ('access_token', models.CharField(\n max_length=256)), ('refresh_token', models.CharField(max_length=256\n )), ('token_expires', models.DateTimeField()), ('seeq_id', models.\n IntegerField(null=True))])]\n",
"<import token>\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = []\n operations = [migrations.CreateModel(name='OpenHumansMember', fields=[(\n 'oh_id', models.CharField(max_length=16, primary_key=True,\n serialize=False, unique=True)), ('access_token', models.CharField(\n max_length=256)), ('refresh_token', models.CharField(max_length=256\n )), ('token_expires', models.DateTimeField()), ('seeq_id', models.\n IntegerField(null=True))])]\n",
"<import token>\n\n\nclass Migration(migrations.Migration):\n <assignment token>\n <assignment token>\n <assignment token>\n",
"<import token>\n<class token>\n"
] | false |
863 |
e3665141397d52877242463d548c059272d13536
|
#!/usr/bin/env python3
import io
import json
import fire
from collections import OrderedDict
def main(input, output):
vocab = OrderedDict({'</s>': 0, '<unk>': 1})
for line in io.open(input, 'r', encoding='utf-8'):
word, count = line.strip().split()
vocab[word] = len(vocab)
with io.open(output, 'w', encoding='utf-8') as out:
json.dump(vocab, out, indent=2, ensure_ascii=False)
if __name__ == '__main__':
fire.Fire(main)
|
[
"#!/usr/bin/env python3\n\nimport io\nimport json\nimport fire\nfrom collections import OrderedDict\n\n\ndef main(input, output):\n vocab = OrderedDict({'</s>': 0, '<unk>': 1})\n for line in io.open(input, 'r', encoding='utf-8'):\n word, count = line.strip().split()\n vocab[word] = len(vocab)\n with io.open(output, 'w', encoding='utf-8') as out:\n json.dump(vocab, out, indent=2, ensure_ascii=False)\n\n\nif __name__ == '__main__':\n fire.Fire(main)\n",
"import io\nimport json\nimport fire\nfrom collections import OrderedDict\n\n\ndef main(input, output):\n vocab = OrderedDict({'</s>': 0, '<unk>': 1})\n for line in io.open(input, 'r', encoding='utf-8'):\n word, count = line.strip().split()\n vocab[word] = len(vocab)\n with io.open(output, 'w', encoding='utf-8') as out:\n json.dump(vocab, out, indent=2, ensure_ascii=False)\n\n\nif __name__ == '__main__':\n fire.Fire(main)\n",
"<import token>\n\n\ndef main(input, output):\n vocab = OrderedDict({'</s>': 0, '<unk>': 1})\n for line in io.open(input, 'r', encoding='utf-8'):\n word, count = line.strip().split()\n vocab[word] = len(vocab)\n with io.open(output, 'w', encoding='utf-8') as out:\n json.dump(vocab, out, indent=2, ensure_ascii=False)\n\n\nif __name__ == '__main__':\n fire.Fire(main)\n",
"<import token>\n\n\ndef main(input, output):\n vocab = OrderedDict({'</s>': 0, '<unk>': 1})\n for line in io.open(input, 'r', encoding='utf-8'):\n word, count = line.strip().split()\n vocab[word] = len(vocab)\n with io.open(output, 'w', encoding='utf-8') as out:\n json.dump(vocab, out, indent=2, ensure_ascii=False)\n\n\n<code token>\n",
"<import token>\n<function token>\n<code token>\n"
] | false |
864 |
25ff54a969651d365de33f2420c662518dd63738
|
import json
import random
from time import sleep
url = "data/data.json"
def loop(run_state):
error = 1
simulations = 1
while run:
error_margin = str((error/simulations) * 100) + "%"
prediction = get_prediction()
print("Prediction: %s" % prediction)
print("Error Margin: %s" % error_margin)
print("Flip the coin and insert your result:\nh = head\nt = tail")
answer = input()
comparator = ""
if answer is "h" or answer is "t":
if answer == "t":
write_data(False)
comparator = "tail"
elif answer == "h":
write_data(True)
comparator = "head"
simulations += 1
if comparator != prediction:
error += 1
else:
print("Invalid answer\n")
def get_prediction():
file = read_file()
data = file["coin-result"]
true = 0
for i in data:
if i is True:
true += 1
head = true/len(data)
tail = 1-head
if head + tail == 1:
rand = random.uniform(0.0, 1.0)
if head == 1:
return "head"
elif tail == 1:
return "tail"
elif head > tail:
if rand > head:
return "head"
else:
return "tail"
elif head < tail:
if rand > tail:
return "tail"
else:
return "head"
elif head == tail:
rand = random.randint(0, 1)
if rand == 0:
return "tail"
else:
return "head"
def read_file():
file = open(url, "r")
data = json.loads(file.read())
file.close()
return data
def write_data(value):
data = read_file()
file = open(url, "w")
data["coin-result"].append(value)
json.dump(data, file)
file.close()
def get_answer(answer):
if answer == "c":
return "head"
elif answer == "t":
return "tail"
else:
print("Invalid answer")
# OnRun
run = True
print("Welcome to CoinPredictor\n")
loop(run)
'''
file = open("data/data.json", "w")
data['coin-result'].append(False)
data = json.dump(data, file)
print(data)
file.close()'''
|
[
"import json\nimport random\nfrom time import sleep\n\nurl = \"data/data.json\"\n\n\ndef loop(run_state):\n error = 1\n simulations = 1\n\n while run:\n\n error_margin = str((error/simulations) * 100) + \"%\"\n prediction = get_prediction()\n print(\"Prediction: %s\" % prediction)\n print(\"Error Margin: %s\" % error_margin)\n print(\"Flip the coin and insert your result:\\nh = head\\nt = tail\")\n answer = input()\n comparator = \"\"\n\n if answer is \"h\" or answer is \"t\":\n if answer == \"t\":\n write_data(False)\n comparator = \"tail\"\n\n elif answer == \"h\":\n write_data(True)\n comparator = \"head\"\n\n simulations += 1\n\n if comparator != prediction:\n error += 1\n\n else:\n print(\"Invalid answer\\n\")\n\n\ndef get_prediction():\n file = read_file()\n data = file[\"coin-result\"]\n true = 0\n\n for i in data:\n if i is True:\n true += 1\n\n head = true/len(data)\n tail = 1-head\n\n if head + tail == 1:\n rand = random.uniform(0.0, 1.0)\n\n if head == 1:\n return \"head\"\n\n elif tail == 1:\n return \"tail\"\n\n elif head > tail:\n if rand > head:\n return \"head\"\n else:\n return \"tail\"\n\n elif head < tail:\n if rand > tail:\n return \"tail\"\n else:\n return \"head\"\n\n elif head == tail:\n rand = random.randint(0, 1)\n if rand == 0:\n return \"tail\"\n else:\n return \"head\"\n\n\ndef read_file():\n file = open(url, \"r\")\n data = json.loads(file.read())\n file.close()\n return data\n\n\ndef write_data(value):\n data = read_file()\n file = open(url, \"w\")\n data[\"coin-result\"].append(value)\n json.dump(data, file)\n file.close()\n\n\ndef get_answer(answer):\n if answer == \"c\":\n return \"head\"\n elif answer == \"t\":\n return \"tail\"\n else:\n print(\"Invalid answer\")\n\n\n# OnRun\nrun = True\nprint(\"Welcome to CoinPredictor\\n\")\nloop(run)\n\n\n'''\n\nfile = open(\"data/data.json\", \"w\")\ndata['coin-result'].append(False)\ndata = json.dump(data, file)\nprint(data)\nfile.close()'''\n",
"import json\nimport random\nfrom time import sleep\nurl = 'data/data.json'\n\n\ndef loop(run_state):\n error = 1\n simulations = 1\n while run:\n error_margin = str(error / simulations * 100) + '%'\n prediction = get_prediction()\n print('Prediction: %s' % prediction)\n print('Error Margin: %s' % error_margin)\n print('Flip the coin and insert your result:\\nh = head\\nt = tail')\n answer = input()\n comparator = ''\n if answer is 'h' or answer is 't':\n if answer == 't':\n write_data(False)\n comparator = 'tail'\n elif answer == 'h':\n write_data(True)\n comparator = 'head'\n simulations += 1\n if comparator != prediction:\n error += 1\n else:\n print('Invalid answer\\n')\n\n\ndef get_prediction():\n file = read_file()\n data = file['coin-result']\n true = 0\n for i in data:\n if i is True:\n true += 1\n head = true / len(data)\n tail = 1 - head\n if head + tail == 1:\n rand = random.uniform(0.0, 1.0)\n if head == 1:\n return 'head'\n elif tail == 1:\n return 'tail'\n elif head > tail:\n if rand > head:\n return 'head'\n else:\n return 'tail'\n elif head < tail:\n if rand > tail:\n return 'tail'\n else:\n return 'head'\n elif head == tail:\n rand = random.randint(0, 1)\n if rand == 0:\n return 'tail'\n else:\n return 'head'\n\n\ndef read_file():\n file = open(url, 'r')\n data = json.loads(file.read())\n file.close()\n return data\n\n\ndef write_data(value):\n data = read_file()\n file = open(url, 'w')\n data['coin-result'].append(value)\n json.dump(data, file)\n file.close()\n\n\ndef get_answer(answer):\n if answer == 'c':\n return 'head'\n elif answer == 't':\n return 'tail'\n else:\n print('Invalid answer')\n\n\nrun = True\nprint('Welcome to CoinPredictor\\n')\nloop(run)\n<docstring token>\n",
"<import token>\nurl = 'data/data.json'\n\n\ndef loop(run_state):\n error = 1\n simulations = 1\n while run:\n error_margin = str(error / simulations * 100) + '%'\n prediction = get_prediction()\n print('Prediction: %s' % prediction)\n print('Error Margin: %s' % error_margin)\n print('Flip the coin and insert your result:\\nh = head\\nt = tail')\n answer = input()\n comparator = ''\n if answer is 'h' or answer is 't':\n if answer == 't':\n write_data(False)\n comparator = 'tail'\n elif answer == 'h':\n write_data(True)\n comparator = 'head'\n simulations += 1\n if comparator != prediction:\n error += 1\n else:\n print('Invalid answer\\n')\n\n\ndef get_prediction():\n file = read_file()\n data = file['coin-result']\n true = 0\n for i in data:\n if i is True:\n true += 1\n head = true / len(data)\n tail = 1 - head\n if head + tail == 1:\n rand = random.uniform(0.0, 1.0)\n if head == 1:\n return 'head'\n elif tail == 1:\n return 'tail'\n elif head > tail:\n if rand > head:\n return 'head'\n else:\n return 'tail'\n elif head < tail:\n if rand > tail:\n return 'tail'\n else:\n return 'head'\n elif head == tail:\n rand = random.randint(0, 1)\n if rand == 0:\n return 'tail'\n else:\n return 'head'\n\n\ndef read_file():\n file = open(url, 'r')\n data = json.loads(file.read())\n file.close()\n return data\n\n\ndef write_data(value):\n data = read_file()\n file = open(url, 'w')\n data['coin-result'].append(value)\n json.dump(data, file)\n file.close()\n\n\ndef get_answer(answer):\n if answer == 'c':\n return 'head'\n elif answer == 't':\n return 'tail'\n else:\n print('Invalid answer')\n\n\nrun = True\nprint('Welcome to CoinPredictor\\n')\nloop(run)\n<docstring token>\n",
"<import token>\n<assignment token>\n\n\ndef loop(run_state):\n error = 1\n simulations = 1\n while run:\n error_margin = str(error / simulations * 100) + '%'\n prediction = get_prediction()\n print('Prediction: %s' % prediction)\n print('Error Margin: %s' % error_margin)\n print('Flip the coin and insert your result:\\nh = head\\nt = tail')\n answer = input()\n comparator = ''\n if answer is 'h' or answer is 't':\n if answer == 't':\n write_data(False)\n comparator = 'tail'\n elif answer == 'h':\n write_data(True)\n comparator = 'head'\n simulations += 1\n if comparator != prediction:\n error += 1\n else:\n print('Invalid answer\\n')\n\n\ndef get_prediction():\n file = read_file()\n data = file['coin-result']\n true = 0\n for i in data:\n if i is True:\n true += 1\n head = true / len(data)\n tail = 1 - head\n if head + tail == 1:\n rand = random.uniform(0.0, 1.0)\n if head == 1:\n return 'head'\n elif tail == 1:\n return 'tail'\n elif head > tail:\n if rand > head:\n return 'head'\n else:\n return 'tail'\n elif head < tail:\n if rand > tail:\n return 'tail'\n else:\n return 'head'\n elif head == tail:\n rand = random.randint(0, 1)\n if rand == 0:\n return 'tail'\n else:\n return 'head'\n\n\ndef read_file():\n file = open(url, 'r')\n data = json.loads(file.read())\n file.close()\n return data\n\n\ndef write_data(value):\n data = read_file()\n file = open(url, 'w')\n data['coin-result'].append(value)\n json.dump(data, file)\n file.close()\n\n\ndef get_answer(answer):\n if answer == 'c':\n return 'head'\n elif answer == 't':\n return 'tail'\n else:\n print('Invalid answer')\n\n\n<assignment token>\nprint('Welcome to CoinPredictor\\n')\nloop(run)\n<docstring token>\n",
"<import token>\n<assignment token>\n\n\ndef loop(run_state):\n error = 1\n simulations = 1\n while run:\n error_margin = str(error / simulations * 100) + '%'\n prediction = get_prediction()\n print('Prediction: %s' % prediction)\n print('Error Margin: %s' % error_margin)\n print('Flip the coin and insert your result:\\nh = head\\nt = tail')\n answer = input()\n comparator = ''\n if answer is 'h' or answer is 't':\n if answer == 't':\n write_data(False)\n comparator = 'tail'\n elif answer == 'h':\n write_data(True)\n comparator = 'head'\n simulations += 1\n if comparator != prediction:\n error += 1\n else:\n print('Invalid answer\\n')\n\n\ndef get_prediction():\n file = read_file()\n data = file['coin-result']\n true = 0\n for i in data:\n if i is True:\n true += 1\n head = true / len(data)\n tail = 1 - head\n if head + tail == 1:\n rand = random.uniform(0.0, 1.0)\n if head == 1:\n return 'head'\n elif tail == 1:\n return 'tail'\n elif head > tail:\n if rand > head:\n return 'head'\n else:\n return 'tail'\n elif head < tail:\n if rand > tail:\n return 'tail'\n else:\n return 'head'\n elif head == tail:\n rand = random.randint(0, 1)\n if rand == 0:\n return 'tail'\n else:\n return 'head'\n\n\ndef read_file():\n file = open(url, 'r')\n data = json.loads(file.read())\n file.close()\n return data\n\n\ndef write_data(value):\n data = read_file()\n file = open(url, 'w')\n data['coin-result'].append(value)\n json.dump(data, file)\n file.close()\n\n\ndef get_answer(answer):\n if answer == 'c':\n return 'head'\n elif answer == 't':\n return 'tail'\n else:\n print('Invalid answer')\n\n\n<assignment token>\n<code token>\n<docstring token>\n",
"<import token>\n<assignment token>\n\n\ndef loop(run_state):\n error = 1\n simulations = 1\n while run:\n error_margin = str(error / simulations * 100) + '%'\n prediction = get_prediction()\n print('Prediction: %s' % prediction)\n print('Error Margin: %s' % error_margin)\n print('Flip the coin and insert your result:\\nh = head\\nt = tail')\n answer = input()\n comparator = ''\n if answer is 'h' or answer is 't':\n if answer == 't':\n write_data(False)\n comparator = 'tail'\n elif answer == 'h':\n write_data(True)\n comparator = 'head'\n simulations += 1\n if comparator != prediction:\n error += 1\n else:\n print('Invalid answer\\n')\n\n\ndef get_prediction():\n file = read_file()\n data = file['coin-result']\n true = 0\n for i in data:\n if i is True:\n true += 1\n head = true / len(data)\n tail = 1 - head\n if head + tail == 1:\n rand = random.uniform(0.0, 1.0)\n if head == 1:\n return 'head'\n elif tail == 1:\n return 'tail'\n elif head > tail:\n if rand > head:\n return 'head'\n else:\n return 'tail'\n elif head < tail:\n if rand > tail:\n return 'tail'\n else:\n return 'head'\n elif head == tail:\n rand = random.randint(0, 1)\n if rand == 0:\n return 'tail'\n else:\n return 'head'\n\n\ndef read_file():\n file = open(url, 'r')\n data = json.loads(file.read())\n file.close()\n return data\n\n\ndef write_data(value):\n data = read_file()\n file = open(url, 'w')\n data['coin-result'].append(value)\n json.dump(data, file)\n file.close()\n\n\n<function token>\n<assignment token>\n<code token>\n<docstring token>\n",
"<import token>\n<assignment token>\n\n\ndef loop(run_state):\n error = 1\n simulations = 1\n while run:\n error_margin = str(error / simulations * 100) + '%'\n prediction = get_prediction()\n print('Prediction: %s' % prediction)\n print('Error Margin: %s' % error_margin)\n print('Flip the coin and insert your result:\\nh = head\\nt = tail')\n answer = input()\n comparator = ''\n if answer is 'h' or answer is 't':\n if answer == 't':\n write_data(False)\n comparator = 'tail'\n elif answer == 'h':\n write_data(True)\n comparator = 'head'\n simulations += 1\n if comparator != prediction:\n error += 1\n else:\n print('Invalid answer\\n')\n\n\ndef get_prediction():\n file = read_file()\n data = file['coin-result']\n true = 0\n for i in data:\n if i is True:\n true += 1\n head = true / len(data)\n tail = 1 - head\n if head + tail == 1:\n rand = random.uniform(0.0, 1.0)\n if head == 1:\n return 'head'\n elif tail == 1:\n return 'tail'\n elif head > tail:\n if rand > head:\n return 'head'\n else:\n return 'tail'\n elif head < tail:\n if rand > tail:\n return 'tail'\n else:\n return 'head'\n elif head == tail:\n rand = random.randint(0, 1)\n if rand == 0:\n return 'tail'\n else:\n return 'head'\n\n\ndef read_file():\n file = open(url, 'r')\n data = json.loads(file.read())\n file.close()\n return data\n\n\n<function token>\n<function token>\n<assignment token>\n<code token>\n<docstring token>\n",
"<import token>\n<assignment token>\n\n\ndef loop(run_state):\n error = 1\n simulations = 1\n while run:\n error_margin = str(error / simulations * 100) + '%'\n prediction = get_prediction()\n print('Prediction: %s' % prediction)\n print('Error Margin: %s' % error_margin)\n print('Flip the coin and insert your result:\\nh = head\\nt = tail')\n answer = input()\n comparator = ''\n if answer is 'h' or answer is 't':\n if answer == 't':\n write_data(False)\n comparator = 'tail'\n elif answer == 'h':\n write_data(True)\n comparator = 'head'\n simulations += 1\n if comparator != prediction:\n error += 1\n else:\n print('Invalid answer\\n')\n\n\n<function token>\n\n\ndef read_file():\n file = open(url, 'r')\n data = json.loads(file.read())\n file.close()\n return data\n\n\n<function token>\n<function token>\n<assignment token>\n<code token>\n<docstring token>\n",
"<import token>\n<assignment token>\n<function token>\n<function token>\n\n\ndef read_file():\n file = open(url, 'r')\n data = json.loads(file.read())\n file.close()\n return data\n\n\n<function token>\n<function token>\n<assignment token>\n<code token>\n<docstring token>\n",
"<import token>\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<assignment token>\n<code token>\n<docstring token>\n"
] | false |
865 |
73d7b1895282df5b744d8c03ec7e6f8530366b76
|
# -*- coding: utf-8 -*-
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
from matplotlib.font_manager import FontProperties
from sklearn import svm
data=np.loadtxt('yucedata1.txt')
X=data[:,0]
y=data[:,1]
plt.figure(1,figsize=(8,6))
myfont = FontProperties(fname=r"c:\windows\fonts\simsun.ttc", size=14)
plt.scatter(X,y,color="red",label="ini_data",linewidth=3)
plt.xlabel(u'Exam1 Score',fontproperties=myfont)
plt.ylabel('Exam2 Score')
plt.legend()
# plt.show()
X=X.reshape(-1,1)
print X
clf = svm.SVR(kernel='linear').fit(X, y)
# clf = svm.SVC(kernel='poly',degree=5,gamma=1,coef0=0).fit(X, y)
# clf = svm.SVR(kernel='rbf',C=100,gamma=20).fit(X, y)
'''gamma越大,多项式项数越多,导致高方差'''
# print u'精准度为: %.2f' % clf.score(X, y)
X1=np.linspace(0,25,100).reshape(-1,1)
y1=clf.predict(X1)
plt.plot(X1,y1,color="orange",label="Fitting Line",linewidth=2)
plt.show()
|
[
"# -*- coding: utf-8 -*-\r\nimport numpy as np\r\nimport matplotlib as mpl\r\nimport matplotlib.pyplot as plt \r\nfrom matplotlib.font_manager import FontProperties \r\nfrom sklearn import svm\r\n\r\n\r\ndata=np.loadtxt('yucedata1.txt')\r\n\r\nX=data[:,0]\r\ny=data[:,1]\r\n\r\nplt.figure(1,figsize=(8,6))\r\nmyfont = FontProperties(fname=r\"c:\\windows\\fonts\\simsun.ttc\", size=14) \r\nplt.scatter(X,y,color=\"red\",label=\"ini_data\",linewidth=3)\r\nplt.xlabel(u'Exam1 Score',fontproperties=myfont)\r\nplt.ylabel('Exam2 Score')\r\nplt.legend()\r\n\r\n# plt.show()\r\nX=X.reshape(-1,1)\r\nprint X\r\nclf = svm.SVR(kernel='linear').fit(X, y)\r\n# clf = svm.SVC(kernel='poly',degree=5,gamma=1,coef0=0).fit(X, y)\r\n# clf = svm.SVR(kernel='rbf',C=100,gamma=20).fit(X, y)\r\n\r\n'''gamma越大,多项式项数越多,导致高方差'''\r\n\r\n\r\n# print u'精准度为: %.2f' % clf.score(X, y)\r\n\r\nX1=np.linspace(0,25,100).reshape(-1,1)\r\n\r\ny1=clf.predict(X1)\r\n\r\nplt.plot(X1,y1,color=\"orange\",label=\"Fitting Line\",linewidth=2) \r\n\r\n\r\nplt.show()\r\n"
] | true |
866 |
f2cdee7e5eebaeeb784cb901c3ac6301e90ac7b9
|
from django.shortcuts import render, get_object_or_404, redirect
#from emailupdate.forms import emailupdate_form
from forms import EmailForm
from django.utils import timezone
def index(request):
if request.method == "POST":
form = EmailForm(request.POST)
if form.is_valid():
post = form.save(commit=False)
post.signup_date = timezone.now()
post.email_confirmed = True
post.save()
return redirect('/emailupdate/thanks/')
else:
form_class = EmailForm
return render(request, 'emailupdate/emailupdate.html', {
'form': form_class,
})
def thanks(request):
return render(request, 'emailupdate/emailupdate_thanks.html')
|
[
"from django.shortcuts import render, get_object_or_404, redirect\n#from emailupdate.forms import emailupdate_form\nfrom forms import EmailForm\nfrom django.utils import timezone\n\ndef index(request):\n\tif request.method == \"POST\":\n\t\tform = EmailForm(request.POST)\n\t\tif form.is_valid():\n\t\t\tpost = form.save(commit=False)\n\t\t\tpost.signup_date = timezone.now()\n\t\t\tpost.email_confirmed = True\n\t\t\tpost.save()\n\t\t\treturn redirect('/emailupdate/thanks/')\n\telse:\n\t\tform_class = EmailForm\n\t\treturn render(request, 'emailupdate/emailupdate.html', {\n\t\t\t'form': form_class,\n\t\t})\t\n\ndef thanks(request):\n\treturn render(request, 'emailupdate/emailupdate_thanks.html')",
"from django.shortcuts import render, get_object_or_404, redirect\nfrom forms import EmailForm\nfrom django.utils import timezone\n\n\ndef index(request):\n if request.method == 'POST':\n form = EmailForm(request.POST)\n if form.is_valid():\n post = form.save(commit=False)\n post.signup_date = timezone.now()\n post.email_confirmed = True\n post.save()\n return redirect('/emailupdate/thanks/')\n else:\n form_class = EmailForm\n return render(request, 'emailupdate/emailupdate.html', {'form':\n form_class})\n\n\ndef thanks(request):\n return render(request, 'emailupdate/emailupdate_thanks.html')\n",
"<import token>\n\n\ndef index(request):\n if request.method == 'POST':\n form = EmailForm(request.POST)\n if form.is_valid():\n post = form.save(commit=False)\n post.signup_date = timezone.now()\n post.email_confirmed = True\n post.save()\n return redirect('/emailupdate/thanks/')\n else:\n form_class = EmailForm\n return render(request, 'emailupdate/emailupdate.html', {'form':\n form_class})\n\n\ndef thanks(request):\n return render(request, 'emailupdate/emailupdate_thanks.html')\n",
"<import token>\n<function token>\n\n\ndef thanks(request):\n return render(request, 'emailupdate/emailupdate_thanks.html')\n",
"<import token>\n<function token>\n<function token>\n"
] | false |
867 |
18dae039f6455f944cbaa97bcb9c36ed29ac9a21
|
incremental_factors_file = '../2019_2020_IncrementalFactorsList.csv'
tax_pickle_for_apns = 'kmes_taxes.p'
tax_history_pickle = '../cusd_1percent_tax_history.p'
distribution_pickle_out = 'kmes_distribution.p'
cabrillo_key = 50200
def read_incremental_factors():
import csv
inc_file = open(incremental_factors_file, 'r')
reader = csv.reader(inc_file)
increment_map = dict()
funding_code_map = dict()
this_trn_code = ''
for row in reader:
if row[0] != '':
this_trn_code = row[0].replace('-','')
this_trn = increment_map.get(this_trn_code,{})
this_trn[int(row[1])] = float(row[3])
funding_code_map[int(row[1])] = row[2]
increment_map[this_trn_code] = this_trn
return increment_map, funding_code_map
increment_map, funding_code_map = read_incremental_factors()
import pickle as p
tax_data_apns = p.load(open(tax_pickle_for_apns,'rb'))
apns = list(set([d[0] for d in tax_data_apns]))
apns.sort()
tax_distribution = list()
tax_history = p.load(open(tax_history_pickle,'rb'))
tax_history_apns = [d[0] for d in tax_history]
for apn in apns:
try:
tax_history_index = tax_history_apns.index(apn)
except:
tax_history_index = None
if tax_history_index is None:
print('No Matching APN: ' + apn)
else:
this_tax_history = tax_history[tax_history_index]
total_tax = this_tax_history[3]
tra = this_tax_history[1]
this_tra = increment_map.get(tra, None)
if this_tra is None:
print('TRA is Null for APN: ' + apn)
else:
fraction = this_tra.get(cabrillo_key, None)
if fraction is None:
print('APN: ' + apn + ' is not in district')
else:
tax_distribution += [[this_tax_history[0], this_tax_history[1], this_tax_history[2], fraction, this_tax_history[3], [t*fraction for t in this_tax_history[3]]]]
import numpy as np
district_data = np.array(np.array([x[5] for x in tax_distribution]))
print('District Contributions: ')
district_sum = np.sum(district_data, axis=0)
year = 2007
for ds in district_sum:
print(str(year) + ": " + str(ds))
year += 1
p.dump([tax_distribution, funding_code_map], open(distribution_pickle_out,'wb'))
|
[
"incremental_factors_file = '../2019_2020_IncrementalFactorsList.csv'\ntax_pickle_for_apns = 'kmes_taxes.p'\ntax_history_pickle = '../cusd_1percent_tax_history.p'\ndistribution_pickle_out = 'kmes_distribution.p'\ncabrillo_key = 50200\n\ndef read_incremental_factors():\n import csv\n inc_file = open(incremental_factors_file, 'r')\n reader = csv.reader(inc_file)\n increment_map = dict()\n funding_code_map = dict()\n this_trn_code = ''\n for row in reader:\n if row[0] != '':\n this_trn_code = row[0].replace('-','')\n this_trn = increment_map.get(this_trn_code,{})\n this_trn[int(row[1])] = float(row[3])\n funding_code_map[int(row[1])] = row[2]\n increment_map[this_trn_code] = this_trn\n return increment_map, funding_code_map\n\nincrement_map, funding_code_map = read_incremental_factors()\nimport pickle as p\ntax_data_apns = p.load(open(tax_pickle_for_apns,'rb'))\napns = list(set([d[0] for d in tax_data_apns]))\napns.sort()\ntax_distribution = list()\ntax_history = p.load(open(tax_history_pickle,'rb'))\ntax_history_apns = [d[0] for d in tax_history]\n\nfor apn in apns:\n try:\n tax_history_index = tax_history_apns.index(apn)\n except:\n tax_history_index = None\n if tax_history_index is None:\n print('No Matching APN: ' + apn)\n else:\n this_tax_history = tax_history[tax_history_index]\n total_tax = this_tax_history[3]\n tra = this_tax_history[1]\n this_tra = increment_map.get(tra, None)\n if this_tra is None:\n print('TRA is Null for APN: ' + apn)\n else:\n fraction = this_tra.get(cabrillo_key, None)\n if fraction is None:\n print('APN: ' + apn + ' is not in district')\n else:\n tax_distribution += [[this_tax_history[0], this_tax_history[1], this_tax_history[2], fraction, this_tax_history[3], [t*fraction for t in this_tax_history[3]]]]\n\nimport numpy as np\n\ndistrict_data = np.array(np.array([x[5] for x in tax_distribution]))\n\nprint('District Contributions: ')\n\ndistrict_sum = np.sum(district_data, axis=0)\nyear = 2007\nfor ds in district_sum:\n print(str(year) + \": \" + str(ds))\n year += 1\n\np.dump([tax_distribution, funding_code_map], open(distribution_pickle_out,'wb'))\n\n",
"incremental_factors_file = '../2019_2020_IncrementalFactorsList.csv'\ntax_pickle_for_apns = 'kmes_taxes.p'\ntax_history_pickle = '../cusd_1percent_tax_history.p'\ndistribution_pickle_out = 'kmes_distribution.p'\ncabrillo_key = 50200\n\n\ndef read_incremental_factors():\n import csv\n inc_file = open(incremental_factors_file, 'r')\n reader = csv.reader(inc_file)\n increment_map = dict()\n funding_code_map = dict()\n this_trn_code = ''\n for row in reader:\n if row[0] != '':\n this_trn_code = row[0].replace('-', '')\n this_trn = increment_map.get(this_trn_code, {})\n this_trn[int(row[1])] = float(row[3])\n funding_code_map[int(row[1])] = row[2]\n increment_map[this_trn_code] = this_trn\n return increment_map, funding_code_map\n\n\nincrement_map, funding_code_map = read_incremental_factors()\nimport pickle as p\ntax_data_apns = p.load(open(tax_pickle_for_apns, 'rb'))\napns = list(set([d[0] for d in tax_data_apns]))\napns.sort()\ntax_distribution = list()\ntax_history = p.load(open(tax_history_pickle, 'rb'))\ntax_history_apns = [d[0] for d in tax_history]\nfor apn in apns:\n try:\n tax_history_index = tax_history_apns.index(apn)\n except:\n tax_history_index = None\n if tax_history_index is None:\n print('No Matching APN: ' + apn)\n else:\n this_tax_history = tax_history[tax_history_index]\n total_tax = this_tax_history[3]\n tra = this_tax_history[1]\n this_tra = increment_map.get(tra, None)\n if this_tra is None:\n print('TRA is Null for APN: ' + apn)\n else:\n fraction = this_tra.get(cabrillo_key, None)\n if fraction is None:\n print('APN: ' + apn + ' is not in district')\n else:\n tax_distribution += [[this_tax_history[0], this_tax_history\n [1], this_tax_history[2], fraction, this_tax_history[3],\n [(t * fraction) for t in this_tax_history[3]]]]\nimport numpy as np\ndistrict_data = np.array(np.array([x[5] for x in tax_distribution]))\nprint('District Contributions: ')\ndistrict_sum = np.sum(district_data, axis=0)\nyear = 2007\nfor ds in district_sum:\n print(str(year) + ': ' + str(ds))\n year += 1\np.dump([tax_distribution, funding_code_map], open(distribution_pickle_out,\n 'wb'))\n",
"incremental_factors_file = '../2019_2020_IncrementalFactorsList.csv'\ntax_pickle_for_apns = 'kmes_taxes.p'\ntax_history_pickle = '../cusd_1percent_tax_history.p'\ndistribution_pickle_out = 'kmes_distribution.p'\ncabrillo_key = 50200\n\n\ndef read_incremental_factors():\n import csv\n inc_file = open(incremental_factors_file, 'r')\n reader = csv.reader(inc_file)\n increment_map = dict()\n funding_code_map = dict()\n this_trn_code = ''\n for row in reader:\n if row[0] != '':\n this_trn_code = row[0].replace('-', '')\n this_trn = increment_map.get(this_trn_code, {})\n this_trn[int(row[1])] = float(row[3])\n funding_code_map[int(row[1])] = row[2]\n increment_map[this_trn_code] = this_trn\n return increment_map, funding_code_map\n\n\nincrement_map, funding_code_map = read_incremental_factors()\n<import token>\ntax_data_apns = p.load(open(tax_pickle_for_apns, 'rb'))\napns = list(set([d[0] for d in tax_data_apns]))\napns.sort()\ntax_distribution = list()\ntax_history = p.load(open(tax_history_pickle, 'rb'))\ntax_history_apns = [d[0] for d in tax_history]\nfor apn in apns:\n try:\n tax_history_index = tax_history_apns.index(apn)\n except:\n tax_history_index = None\n if tax_history_index is None:\n print('No Matching APN: ' + apn)\n else:\n this_tax_history = tax_history[tax_history_index]\n total_tax = this_tax_history[3]\n tra = this_tax_history[1]\n this_tra = increment_map.get(tra, None)\n if this_tra is None:\n print('TRA is Null for APN: ' + apn)\n else:\n fraction = this_tra.get(cabrillo_key, None)\n if fraction is None:\n print('APN: ' + apn + ' is not in district')\n else:\n tax_distribution += [[this_tax_history[0], this_tax_history\n [1], this_tax_history[2], fraction, this_tax_history[3],\n [(t * fraction) for t in this_tax_history[3]]]]\n<import token>\ndistrict_data = np.array(np.array([x[5] for x in tax_distribution]))\nprint('District Contributions: ')\ndistrict_sum = np.sum(district_data, axis=0)\nyear = 2007\nfor ds in district_sum:\n print(str(year) + ': ' + str(ds))\n year += 1\np.dump([tax_distribution, funding_code_map], open(distribution_pickle_out,\n 'wb'))\n",
"<assignment token>\n\n\ndef read_incremental_factors():\n import csv\n inc_file = open(incremental_factors_file, 'r')\n reader = csv.reader(inc_file)\n increment_map = dict()\n funding_code_map = dict()\n this_trn_code = ''\n for row in reader:\n if row[0] != '':\n this_trn_code = row[0].replace('-', '')\n this_trn = increment_map.get(this_trn_code, {})\n this_trn[int(row[1])] = float(row[3])\n funding_code_map[int(row[1])] = row[2]\n increment_map[this_trn_code] = this_trn\n return increment_map, funding_code_map\n\n\n<assignment token>\n<import token>\n<assignment token>\napns.sort()\n<assignment token>\nfor apn in apns:\n try:\n tax_history_index = tax_history_apns.index(apn)\n except:\n tax_history_index = None\n if tax_history_index is None:\n print('No Matching APN: ' + apn)\n else:\n this_tax_history = tax_history[tax_history_index]\n total_tax = this_tax_history[3]\n tra = this_tax_history[1]\n this_tra = increment_map.get(tra, None)\n if this_tra is None:\n print('TRA is Null for APN: ' + apn)\n else:\n fraction = this_tra.get(cabrillo_key, None)\n if fraction is None:\n print('APN: ' + apn + ' is not in district')\n else:\n tax_distribution += [[this_tax_history[0], this_tax_history\n [1], this_tax_history[2], fraction, this_tax_history[3],\n [(t * fraction) for t in this_tax_history[3]]]]\n<import token>\n<assignment token>\nprint('District Contributions: ')\n<assignment token>\nfor ds in district_sum:\n print(str(year) + ': ' + str(ds))\n year += 1\np.dump([tax_distribution, funding_code_map], open(distribution_pickle_out,\n 'wb'))\n",
"<assignment token>\n\n\ndef read_incremental_factors():\n import csv\n inc_file = open(incremental_factors_file, 'r')\n reader = csv.reader(inc_file)\n increment_map = dict()\n funding_code_map = dict()\n this_trn_code = ''\n for row in reader:\n if row[0] != '':\n this_trn_code = row[0].replace('-', '')\n this_trn = increment_map.get(this_trn_code, {})\n this_trn[int(row[1])] = float(row[3])\n funding_code_map[int(row[1])] = row[2]\n increment_map[this_trn_code] = this_trn\n return increment_map, funding_code_map\n\n\n<assignment token>\n<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n",
"<assignment token>\n<function token>\n<assignment token>\n<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n"
] | false |
868 |
860f77b031c815df40a16669dae8d32af4afa5bf
|
from flask import Flask, jsonify, request, render_template
from werkzeug import secure_filename
import os
from utils import allowed_file, convert_html_to_pdf, convert_doc_to_pdf
app = Flask(__name__)
@app.route('/', methods=['GET'])
def index():
""" Renders Index.html """
try:
return render_template('index.html')
except Exception as e:
print("Exception Occurred", e)
return jsonify({"status": "failed", "message": "Something Went Wrong !!"})
@app.route('/upload', methods=['POST'])
def file_converter():
"""
Function Processing Steps:
Step-1 : Check uploaded file extension ,if accepted format process further
Step-2 : Save the files into uploads folder
Step-3 : Convert the html,doc and docx files into pdf file and stores into converted_files folder
Note : If file is already in pdf format than file will directly save in converted_files
folder without other action.
"""
if request.method == "POST":
try:
files = request.files.getlist('file')
print("files", files)
if len(files) > 0:
for data in files:
if allowed_file(data.filename):
filename = secure_filename(data.filename)
extension = filename.split('.')
file_path = os.path.join('static/uploads', filename)
if extension[-1] == 'pdf':
pdf_file_path = os.path.join('static/converted_files', filename)
data.save(pdf_file_path)
else:
data.save(file_path)
if extension[-1] == 'html':
if convert_html_to_pdf(file_path, extension[0]):
print("File Converted to PDF Successfully !!")
else:
raise Exception('Something Went Wrong !')
elif extension[-1] == "docx" or extension[-1] == "doc":
if convert_doc_to_pdf(file_path):
print("File Converted to PDF Successfully !!")
else:
raise Exception('Something Went Wrong !')
return jsonify({"status": "success", "message": "File Uploaded Successfully !!"})
else:
return jsonify({"status": "failed", "message": "Format Not Allowed !!"})
else:
return jsonify({"status": "failed"})
except Exception as e:
print("Exception Occurred", e)
return jsonify({"status": "exception", "message": "Something Went Wrong !!"})
else:
return jsonify({"status": "failed", "message": "Method Not Allowed !"})
if __name__ == '__main__':
app.run(host='0.0.0.0', port=9000)
|
[
"from flask import Flask, jsonify, request, render_template\nfrom werkzeug import secure_filename\nimport os\n\nfrom utils import allowed_file, convert_html_to_pdf, convert_doc_to_pdf\n\napp = Flask(__name__)\n\n\[email protected]('/', methods=['GET'])\ndef index():\n \"\"\" Renders Index.html \"\"\"\n try:\n return render_template('index.html')\n except Exception as e:\n print(\"Exception Occurred\", e)\n return jsonify({\"status\": \"failed\", \"message\": \"Something Went Wrong !!\"})\n\n\[email protected]('/upload', methods=['POST'])\ndef file_converter():\n \"\"\"\n Function Processing Steps:\n Step-1 : Check uploaded file extension ,if accepted format process further\n Step-2 : Save the files into uploads folder\n Step-3 : Convert the html,doc and docx files into pdf file and stores into converted_files folder\n\n Note : If file is already in pdf format than file will directly save in converted_files\n folder without other action.\n \"\"\"\n if request.method == \"POST\":\n try:\n files = request.files.getlist('file')\n print(\"files\", files)\n if len(files) > 0:\n for data in files:\n if allowed_file(data.filename):\n filename = secure_filename(data.filename)\n extension = filename.split('.')\n file_path = os.path.join('static/uploads', filename)\n\n if extension[-1] == 'pdf':\n pdf_file_path = os.path.join('static/converted_files', filename)\n data.save(pdf_file_path)\n else:\n data.save(file_path)\n\n if extension[-1] == 'html':\n if convert_html_to_pdf(file_path, extension[0]):\n print(\"File Converted to PDF Successfully !!\")\n else:\n raise Exception('Something Went Wrong !')\n\n elif extension[-1] == \"docx\" or extension[-1] == \"doc\":\n if convert_doc_to_pdf(file_path):\n print(\"File Converted to PDF Successfully !!\")\n else:\n raise Exception('Something Went Wrong !')\n return jsonify({\"status\": \"success\", \"message\": \"File Uploaded Successfully !!\"})\n\n else:\n return jsonify({\"status\": \"failed\", \"message\": \"Format Not Allowed !!\"})\n else:\n return jsonify({\"status\": \"failed\"})\n except Exception as e:\n print(\"Exception Occurred\", e)\n return jsonify({\"status\": \"exception\", \"message\": \"Something Went Wrong !!\"})\n else:\n return jsonify({\"status\": \"failed\", \"message\": \"Method Not Allowed !\"})\n\n\nif __name__ == '__main__':\n app.run(host='0.0.0.0', port=9000)\n",
"from flask import Flask, jsonify, request, render_template\nfrom werkzeug import secure_filename\nimport os\nfrom utils import allowed_file, convert_html_to_pdf, convert_doc_to_pdf\napp = Flask(__name__)\n\n\[email protected]('/', methods=['GET'])\ndef index():\n \"\"\" Renders Index.html \"\"\"\n try:\n return render_template('index.html')\n except Exception as e:\n print('Exception Occurred', e)\n return jsonify({'status': 'failed', 'message':\n 'Something Went Wrong !!'})\n\n\[email protected]('/upload', methods=['POST'])\ndef file_converter():\n \"\"\"\n Function Processing Steps:\n Step-1 : Check uploaded file extension ,if accepted format process further\n Step-2 : Save the files into uploads folder\n Step-3 : Convert the html,doc and docx files into pdf file and stores into converted_files folder\n\n Note : If file is already in pdf format than file will directly save in converted_files\n folder without other action.\n \"\"\"\n if request.method == 'POST':\n try:\n files = request.files.getlist('file')\n print('files', files)\n if len(files) > 0:\n for data in files:\n if allowed_file(data.filename):\n filename = secure_filename(data.filename)\n extension = filename.split('.')\n file_path = os.path.join('static/uploads', filename)\n if extension[-1] == 'pdf':\n pdf_file_path = os.path.join(\n 'static/converted_files', filename)\n data.save(pdf_file_path)\n else:\n data.save(file_path)\n if extension[-1] == 'html':\n if convert_html_to_pdf(file_path, extension[0]):\n print('File Converted to PDF Successfully !!')\n else:\n raise Exception('Something Went Wrong !')\n elif extension[-1] == 'docx' or extension[-1] == 'doc':\n if convert_doc_to_pdf(file_path):\n print('File Converted to PDF Successfully !!')\n else:\n raise Exception('Something Went Wrong !')\n return jsonify({'status': 'success', 'message':\n 'File Uploaded Successfully !!'})\n else:\n return jsonify({'status': 'failed', 'message':\n 'Format Not Allowed !!'})\n else:\n return jsonify({'status': 'failed'})\n except Exception as e:\n print('Exception Occurred', e)\n return jsonify({'status': 'exception', 'message':\n 'Something Went Wrong !!'})\n else:\n return jsonify({'status': 'failed', 'message': 'Method Not Allowed !'})\n\n\nif __name__ == '__main__':\n app.run(host='0.0.0.0', port=9000)\n",
"<import token>\napp = Flask(__name__)\n\n\[email protected]('/', methods=['GET'])\ndef index():\n \"\"\" Renders Index.html \"\"\"\n try:\n return render_template('index.html')\n except Exception as e:\n print('Exception Occurred', e)\n return jsonify({'status': 'failed', 'message':\n 'Something Went Wrong !!'})\n\n\[email protected]('/upload', methods=['POST'])\ndef file_converter():\n \"\"\"\n Function Processing Steps:\n Step-1 : Check uploaded file extension ,if accepted format process further\n Step-2 : Save the files into uploads folder\n Step-3 : Convert the html,doc and docx files into pdf file and stores into converted_files folder\n\n Note : If file is already in pdf format than file will directly save in converted_files\n folder without other action.\n \"\"\"\n if request.method == 'POST':\n try:\n files = request.files.getlist('file')\n print('files', files)\n if len(files) > 0:\n for data in files:\n if allowed_file(data.filename):\n filename = secure_filename(data.filename)\n extension = filename.split('.')\n file_path = os.path.join('static/uploads', filename)\n if extension[-1] == 'pdf':\n pdf_file_path = os.path.join(\n 'static/converted_files', filename)\n data.save(pdf_file_path)\n else:\n data.save(file_path)\n if extension[-1] == 'html':\n if convert_html_to_pdf(file_path, extension[0]):\n print('File Converted to PDF Successfully !!')\n else:\n raise Exception('Something Went Wrong !')\n elif extension[-1] == 'docx' or extension[-1] == 'doc':\n if convert_doc_to_pdf(file_path):\n print('File Converted to PDF Successfully !!')\n else:\n raise Exception('Something Went Wrong !')\n return jsonify({'status': 'success', 'message':\n 'File Uploaded Successfully !!'})\n else:\n return jsonify({'status': 'failed', 'message':\n 'Format Not Allowed !!'})\n else:\n return jsonify({'status': 'failed'})\n except Exception as e:\n print('Exception Occurred', e)\n return jsonify({'status': 'exception', 'message':\n 'Something Went Wrong !!'})\n else:\n return jsonify({'status': 'failed', 'message': 'Method Not Allowed !'})\n\n\nif __name__ == '__main__':\n app.run(host='0.0.0.0', port=9000)\n",
"<import token>\n<assignment token>\n\n\[email protected]('/', methods=['GET'])\ndef index():\n \"\"\" Renders Index.html \"\"\"\n try:\n return render_template('index.html')\n except Exception as e:\n print('Exception Occurred', e)\n return jsonify({'status': 'failed', 'message':\n 'Something Went Wrong !!'})\n\n\[email protected]('/upload', methods=['POST'])\ndef file_converter():\n \"\"\"\n Function Processing Steps:\n Step-1 : Check uploaded file extension ,if accepted format process further\n Step-2 : Save the files into uploads folder\n Step-3 : Convert the html,doc and docx files into pdf file and stores into converted_files folder\n\n Note : If file is already in pdf format than file will directly save in converted_files\n folder without other action.\n \"\"\"\n if request.method == 'POST':\n try:\n files = request.files.getlist('file')\n print('files', files)\n if len(files) > 0:\n for data in files:\n if allowed_file(data.filename):\n filename = secure_filename(data.filename)\n extension = filename.split('.')\n file_path = os.path.join('static/uploads', filename)\n if extension[-1] == 'pdf':\n pdf_file_path = os.path.join(\n 'static/converted_files', filename)\n data.save(pdf_file_path)\n else:\n data.save(file_path)\n if extension[-1] == 'html':\n if convert_html_to_pdf(file_path, extension[0]):\n print('File Converted to PDF Successfully !!')\n else:\n raise Exception('Something Went Wrong !')\n elif extension[-1] == 'docx' or extension[-1] == 'doc':\n if convert_doc_to_pdf(file_path):\n print('File Converted to PDF Successfully !!')\n else:\n raise Exception('Something Went Wrong !')\n return jsonify({'status': 'success', 'message':\n 'File Uploaded Successfully !!'})\n else:\n return jsonify({'status': 'failed', 'message':\n 'Format Not Allowed !!'})\n else:\n return jsonify({'status': 'failed'})\n except Exception as e:\n print('Exception Occurred', e)\n return jsonify({'status': 'exception', 'message':\n 'Something Went Wrong !!'})\n else:\n return jsonify({'status': 'failed', 'message': 'Method Not Allowed !'})\n\n\nif __name__ == '__main__':\n app.run(host='0.0.0.0', port=9000)\n",
"<import token>\n<assignment token>\n\n\[email protected]('/', methods=['GET'])\ndef index():\n \"\"\" Renders Index.html \"\"\"\n try:\n return render_template('index.html')\n except Exception as e:\n print('Exception Occurred', e)\n return jsonify({'status': 'failed', 'message':\n 'Something Went Wrong !!'})\n\n\[email protected]('/upload', methods=['POST'])\ndef file_converter():\n \"\"\"\n Function Processing Steps:\n Step-1 : Check uploaded file extension ,if accepted format process further\n Step-2 : Save the files into uploads folder\n Step-3 : Convert the html,doc and docx files into pdf file and stores into converted_files folder\n\n Note : If file is already in pdf format than file will directly save in converted_files\n folder without other action.\n \"\"\"\n if request.method == 'POST':\n try:\n files = request.files.getlist('file')\n print('files', files)\n if len(files) > 0:\n for data in files:\n if allowed_file(data.filename):\n filename = secure_filename(data.filename)\n extension = filename.split('.')\n file_path = os.path.join('static/uploads', filename)\n if extension[-1] == 'pdf':\n pdf_file_path = os.path.join(\n 'static/converted_files', filename)\n data.save(pdf_file_path)\n else:\n data.save(file_path)\n if extension[-1] == 'html':\n if convert_html_to_pdf(file_path, extension[0]):\n print('File Converted to PDF Successfully !!')\n else:\n raise Exception('Something Went Wrong !')\n elif extension[-1] == 'docx' or extension[-1] == 'doc':\n if convert_doc_to_pdf(file_path):\n print('File Converted to PDF Successfully !!')\n else:\n raise Exception('Something Went Wrong !')\n return jsonify({'status': 'success', 'message':\n 'File Uploaded Successfully !!'})\n else:\n return jsonify({'status': 'failed', 'message':\n 'Format Not Allowed !!'})\n else:\n return jsonify({'status': 'failed'})\n except Exception as e:\n print('Exception Occurred', e)\n return jsonify({'status': 'exception', 'message':\n 'Something Went Wrong !!'})\n else:\n return jsonify({'status': 'failed', 'message': 'Method Not Allowed !'})\n\n\n<code token>\n",
"<import token>\n<assignment token>\n\n\[email protected]('/', methods=['GET'])\ndef index():\n \"\"\" Renders Index.html \"\"\"\n try:\n return render_template('index.html')\n except Exception as e:\n print('Exception Occurred', e)\n return jsonify({'status': 'failed', 'message':\n 'Something Went Wrong !!'})\n\n\n<function token>\n<code token>\n",
"<import token>\n<assignment token>\n<function token>\n<function token>\n<code token>\n"
] | false |
869 |
f080191fec4e56adc4013da74c840817e88caf56
|
import os
import base64
from urllib.parse import urlencode
import json
from flask import Blueprint, request, redirect, jsonify, make_response
import requests
spotify = Blueprint('spotify', __name__)
# Client Keys
SPOTIFY_CLIENT_ID = os.environ.get('SPOTIFY_CLIENT_ID')
SPOTIFY_CLIENT_SECRET = os.environ.get('SPOTIFY_CLIENT_SECRET')
# Spotify URLS
SPOTIFY_AUTH_URL = "https://accounts.spotify.com/authorize"
SPOTIFY_TOKEN_URL = "https://accounts.spotify.com/api/token"
SPOTIFY_API_BASE_URL = "https://api.spotify.com"
API_VERSION = "v1"
SPOTIFY_API_URL = "{}/{}".format(SPOTIFY_API_BASE_URL, API_VERSION)
# Server-side Parameters
CLIENT_SIDE_URL = "http://localhost"
PORT = 8888
REDIRECT_URI = "{}:{}/callback".format(CLIENT_SIDE_URL, PORT)
SCOPE = "playlist-modify-public playlist-modify-private user-read-currently-playing"
STATE = ""
SHOW_DIALOG_BOOL = True
SHOW_DIALOG_STR = str(SHOW_DIALOG_BOOL).lower()
auth_query_parameters = {
"response_type": "code",
"redirect_uri": REDIRECT_URI,
"scope": SCOPE,
# "state": STATE,
# "show_dialog": SHOW_DIALOG_str,
"client_id": SPOTIFY_CLIENT_ID
}
@spotify.route("/login")
def login():
# Auth Step 1: Authorization
url_args = urlencode(auth_query_parameters)
print(url_args)
auth_url = "{}/?{}".format(SPOTIFY_AUTH_URL, url_args)
return redirect(auth_url)
@spotify.route("/callback")
def callback():
# Auth Step 4: Requests refresh and access tokens
auth_code = request.args['code']
code_payload = {
"grant_type": "authorization_code",
"code": str(auth_code),
"redirect_uri": REDIRECT_URI
}
base64encoded = base64.b64encode(bytes("{}:{}".format(SPOTIFY_CLIENT_ID, SPOTIFY_CLIENT_SECRET), 'utf-8'))
headers = {"Authorization": "Basic {}".format(base64encoded.decode('utf-8'))}
post_request = requests.post(SPOTIFY_TOKEN_URL, data=code_payload, headers=headers)
# Auth Step 5: Tokens are Returned to Application
response_data = json.loads(post_request.text)
access_token = response_data["access_token"]
print(access_token)
refresh_token = response_data["refresh_token"]
token_type = response_data["token_type"]
expires_in = response_data["expires_in"]
redirect_to_index = redirect("http://localhost:3000/")
response = make_response(redirect_to_index)
response.set_cookie('access_token', value=access_token)
response.set_cookie('refresh_token', value=refresh_token)
return response
@spotify.route("/refresh_token", methods=['POST'])
def refresh_token():
# 7. Requesting access token from refresh token
r = request.get_json()
refresh_token = r['refresh_token']
code_payload = {
"grant_type": "refresh_token",
"refresh_token": refresh_token
}
base64encoded = base64.b64encode(bytes("{}:{}".format(SPOTIFY_CLIENT_ID, SPOTIFY_CLIENT_SECRET), 'utf-8'))
headers = {"Authorization": "Basic {}".format(base64encoded.decode('utf-8'))}
post_request = requests.post(SPOTIFY_TOKEN_URL, data=code_payload, headers=headers)
response_data = json.loads(post_request.text)
return jsonify(response_data)
|
[
"import os\nimport base64\nfrom urllib.parse import urlencode\nimport json\n\nfrom flask import Blueprint, request, redirect, jsonify, make_response\nimport requests\n\nspotify = Blueprint('spotify', __name__)\n\n# Client Keys\nSPOTIFY_CLIENT_ID = os.environ.get('SPOTIFY_CLIENT_ID')\nSPOTIFY_CLIENT_SECRET = os.environ.get('SPOTIFY_CLIENT_SECRET')\n\n# Spotify URLS\nSPOTIFY_AUTH_URL = \"https://accounts.spotify.com/authorize\"\nSPOTIFY_TOKEN_URL = \"https://accounts.spotify.com/api/token\"\nSPOTIFY_API_BASE_URL = \"https://api.spotify.com\"\nAPI_VERSION = \"v1\"\nSPOTIFY_API_URL = \"{}/{}\".format(SPOTIFY_API_BASE_URL, API_VERSION)\n\n# Server-side Parameters\nCLIENT_SIDE_URL = \"http://localhost\"\nPORT = 8888\nREDIRECT_URI = \"{}:{}/callback\".format(CLIENT_SIDE_URL, PORT)\nSCOPE = \"playlist-modify-public playlist-modify-private user-read-currently-playing\"\nSTATE = \"\"\nSHOW_DIALOG_BOOL = True\nSHOW_DIALOG_STR = str(SHOW_DIALOG_BOOL).lower()\n\n\nauth_query_parameters = {\n \"response_type\": \"code\",\n \"redirect_uri\": REDIRECT_URI,\n \"scope\": SCOPE,\n # \"state\": STATE,\n # \"show_dialog\": SHOW_DIALOG_str,\n \"client_id\": SPOTIFY_CLIENT_ID\n}\n\n\[email protected](\"/login\")\ndef login():\n # Auth Step 1: Authorization\n url_args = urlencode(auth_query_parameters)\n print(url_args)\n auth_url = \"{}/?{}\".format(SPOTIFY_AUTH_URL, url_args)\n return redirect(auth_url)\n\n\[email protected](\"/callback\")\ndef callback():\n # Auth Step 4: Requests refresh and access tokens\n auth_code = request.args['code']\n code_payload = {\n \"grant_type\": \"authorization_code\",\n \"code\": str(auth_code),\n \"redirect_uri\": REDIRECT_URI\n }\n\n base64encoded = base64.b64encode(bytes(\"{}:{}\".format(SPOTIFY_CLIENT_ID, SPOTIFY_CLIENT_SECRET), 'utf-8'))\n headers = {\"Authorization\": \"Basic {}\".format(base64encoded.decode('utf-8'))}\n post_request = requests.post(SPOTIFY_TOKEN_URL, data=code_payload, headers=headers)\n\n # Auth Step 5: Tokens are Returned to Application\n response_data = json.loads(post_request.text)\n access_token = response_data[\"access_token\"]\n print(access_token)\n refresh_token = response_data[\"refresh_token\"]\n token_type = response_data[\"token_type\"]\n expires_in = response_data[\"expires_in\"]\n\n redirect_to_index = redirect(\"http://localhost:3000/\")\n response = make_response(redirect_to_index)\n response.set_cookie('access_token', value=access_token)\n response.set_cookie('refresh_token', value=refresh_token)\n return response\n\n\[email protected](\"/refresh_token\", methods=['POST'])\ndef refresh_token():\n # 7. Requesting access token from refresh token\n r = request.get_json()\n refresh_token = r['refresh_token']\n code_payload = {\n \"grant_type\": \"refresh_token\",\n \"refresh_token\": refresh_token\n }\n base64encoded = base64.b64encode(bytes(\"{}:{}\".format(SPOTIFY_CLIENT_ID, SPOTIFY_CLIENT_SECRET), 'utf-8'))\n headers = {\"Authorization\": \"Basic {}\".format(base64encoded.decode('utf-8'))}\n post_request = requests.post(SPOTIFY_TOKEN_URL, data=code_payload, headers=headers)\n response_data = json.loads(post_request.text)\n return jsonify(response_data)\n",
"import os\nimport base64\nfrom urllib.parse import urlencode\nimport json\nfrom flask import Blueprint, request, redirect, jsonify, make_response\nimport requests\nspotify = Blueprint('spotify', __name__)\nSPOTIFY_CLIENT_ID = os.environ.get('SPOTIFY_CLIENT_ID')\nSPOTIFY_CLIENT_SECRET = os.environ.get('SPOTIFY_CLIENT_SECRET')\nSPOTIFY_AUTH_URL = 'https://accounts.spotify.com/authorize'\nSPOTIFY_TOKEN_URL = 'https://accounts.spotify.com/api/token'\nSPOTIFY_API_BASE_URL = 'https://api.spotify.com'\nAPI_VERSION = 'v1'\nSPOTIFY_API_URL = '{}/{}'.format(SPOTIFY_API_BASE_URL, API_VERSION)\nCLIENT_SIDE_URL = 'http://localhost'\nPORT = 8888\nREDIRECT_URI = '{}:{}/callback'.format(CLIENT_SIDE_URL, PORT)\nSCOPE = (\n 'playlist-modify-public playlist-modify-private user-read-currently-playing'\n )\nSTATE = ''\nSHOW_DIALOG_BOOL = True\nSHOW_DIALOG_STR = str(SHOW_DIALOG_BOOL).lower()\nauth_query_parameters = {'response_type': 'code', 'redirect_uri':\n REDIRECT_URI, 'scope': SCOPE, 'client_id': SPOTIFY_CLIENT_ID}\n\n\[email protected]('/login')\ndef login():\n url_args = urlencode(auth_query_parameters)\n print(url_args)\n auth_url = '{}/?{}'.format(SPOTIFY_AUTH_URL, url_args)\n return redirect(auth_url)\n\n\[email protected]('/callback')\ndef callback():\n auth_code = request.args['code']\n code_payload = {'grant_type': 'authorization_code', 'code': str(\n auth_code), 'redirect_uri': REDIRECT_URI}\n base64encoded = base64.b64encode(bytes('{}:{}'.format(SPOTIFY_CLIENT_ID,\n SPOTIFY_CLIENT_SECRET), 'utf-8'))\n headers = {'Authorization': 'Basic {}'.format(base64encoded.decode(\n 'utf-8'))}\n post_request = requests.post(SPOTIFY_TOKEN_URL, data=code_payload,\n headers=headers)\n response_data = json.loads(post_request.text)\n access_token = response_data['access_token']\n print(access_token)\n refresh_token = response_data['refresh_token']\n token_type = response_data['token_type']\n expires_in = response_data['expires_in']\n redirect_to_index = redirect('http://localhost:3000/')\n response = make_response(redirect_to_index)\n response.set_cookie('access_token', value=access_token)\n response.set_cookie('refresh_token', value=refresh_token)\n return response\n\n\[email protected]('/refresh_token', methods=['POST'])\ndef refresh_token():\n r = request.get_json()\n refresh_token = r['refresh_token']\n code_payload = {'grant_type': 'refresh_token', 'refresh_token':\n refresh_token}\n base64encoded = base64.b64encode(bytes('{}:{}'.format(SPOTIFY_CLIENT_ID,\n SPOTIFY_CLIENT_SECRET), 'utf-8'))\n headers = {'Authorization': 'Basic {}'.format(base64encoded.decode(\n 'utf-8'))}\n post_request = requests.post(SPOTIFY_TOKEN_URL, data=code_payload,\n headers=headers)\n response_data = json.loads(post_request.text)\n return jsonify(response_data)\n",
"<import token>\nspotify = Blueprint('spotify', __name__)\nSPOTIFY_CLIENT_ID = os.environ.get('SPOTIFY_CLIENT_ID')\nSPOTIFY_CLIENT_SECRET = os.environ.get('SPOTIFY_CLIENT_SECRET')\nSPOTIFY_AUTH_URL = 'https://accounts.spotify.com/authorize'\nSPOTIFY_TOKEN_URL = 'https://accounts.spotify.com/api/token'\nSPOTIFY_API_BASE_URL = 'https://api.spotify.com'\nAPI_VERSION = 'v1'\nSPOTIFY_API_URL = '{}/{}'.format(SPOTIFY_API_BASE_URL, API_VERSION)\nCLIENT_SIDE_URL = 'http://localhost'\nPORT = 8888\nREDIRECT_URI = '{}:{}/callback'.format(CLIENT_SIDE_URL, PORT)\nSCOPE = (\n 'playlist-modify-public playlist-modify-private user-read-currently-playing'\n )\nSTATE = ''\nSHOW_DIALOG_BOOL = True\nSHOW_DIALOG_STR = str(SHOW_DIALOG_BOOL).lower()\nauth_query_parameters = {'response_type': 'code', 'redirect_uri':\n REDIRECT_URI, 'scope': SCOPE, 'client_id': SPOTIFY_CLIENT_ID}\n\n\[email protected]('/login')\ndef login():\n url_args = urlencode(auth_query_parameters)\n print(url_args)\n auth_url = '{}/?{}'.format(SPOTIFY_AUTH_URL, url_args)\n return redirect(auth_url)\n\n\[email protected]('/callback')\ndef callback():\n auth_code = request.args['code']\n code_payload = {'grant_type': 'authorization_code', 'code': str(\n auth_code), 'redirect_uri': REDIRECT_URI}\n base64encoded = base64.b64encode(bytes('{}:{}'.format(SPOTIFY_CLIENT_ID,\n SPOTIFY_CLIENT_SECRET), 'utf-8'))\n headers = {'Authorization': 'Basic {}'.format(base64encoded.decode(\n 'utf-8'))}\n post_request = requests.post(SPOTIFY_TOKEN_URL, data=code_payload,\n headers=headers)\n response_data = json.loads(post_request.text)\n access_token = response_data['access_token']\n print(access_token)\n refresh_token = response_data['refresh_token']\n token_type = response_data['token_type']\n expires_in = response_data['expires_in']\n redirect_to_index = redirect('http://localhost:3000/')\n response = make_response(redirect_to_index)\n response.set_cookie('access_token', value=access_token)\n response.set_cookie('refresh_token', value=refresh_token)\n return response\n\n\[email protected]('/refresh_token', methods=['POST'])\ndef refresh_token():\n r = request.get_json()\n refresh_token = r['refresh_token']\n code_payload = {'grant_type': 'refresh_token', 'refresh_token':\n refresh_token}\n base64encoded = base64.b64encode(bytes('{}:{}'.format(SPOTIFY_CLIENT_ID,\n SPOTIFY_CLIENT_SECRET), 'utf-8'))\n headers = {'Authorization': 'Basic {}'.format(base64encoded.decode(\n 'utf-8'))}\n post_request = requests.post(SPOTIFY_TOKEN_URL, data=code_payload,\n headers=headers)\n response_data = json.loads(post_request.text)\n return jsonify(response_data)\n",
"<import token>\n<assignment token>\n\n\[email protected]('/login')\ndef login():\n url_args = urlencode(auth_query_parameters)\n print(url_args)\n auth_url = '{}/?{}'.format(SPOTIFY_AUTH_URL, url_args)\n return redirect(auth_url)\n\n\[email protected]('/callback')\ndef callback():\n auth_code = request.args['code']\n code_payload = {'grant_type': 'authorization_code', 'code': str(\n auth_code), 'redirect_uri': REDIRECT_URI}\n base64encoded = base64.b64encode(bytes('{}:{}'.format(SPOTIFY_CLIENT_ID,\n SPOTIFY_CLIENT_SECRET), 'utf-8'))\n headers = {'Authorization': 'Basic {}'.format(base64encoded.decode(\n 'utf-8'))}\n post_request = requests.post(SPOTIFY_TOKEN_URL, data=code_payload,\n headers=headers)\n response_data = json.loads(post_request.text)\n access_token = response_data['access_token']\n print(access_token)\n refresh_token = response_data['refresh_token']\n token_type = response_data['token_type']\n expires_in = response_data['expires_in']\n redirect_to_index = redirect('http://localhost:3000/')\n response = make_response(redirect_to_index)\n response.set_cookie('access_token', value=access_token)\n response.set_cookie('refresh_token', value=refresh_token)\n return response\n\n\[email protected]('/refresh_token', methods=['POST'])\ndef refresh_token():\n r = request.get_json()\n refresh_token = r['refresh_token']\n code_payload = {'grant_type': 'refresh_token', 'refresh_token':\n refresh_token}\n base64encoded = base64.b64encode(bytes('{}:{}'.format(SPOTIFY_CLIENT_ID,\n SPOTIFY_CLIENT_SECRET), 'utf-8'))\n headers = {'Authorization': 'Basic {}'.format(base64encoded.decode(\n 'utf-8'))}\n post_request = requests.post(SPOTIFY_TOKEN_URL, data=code_payload,\n headers=headers)\n response_data = json.loads(post_request.text)\n return jsonify(response_data)\n",
"<import token>\n<assignment token>\n\n\[email protected]('/login')\ndef login():\n url_args = urlencode(auth_query_parameters)\n print(url_args)\n auth_url = '{}/?{}'.format(SPOTIFY_AUTH_URL, url_args)\n return redirect(auth_url)\n\n\n<function token>\n\n\[email protected]('/refresh_token', methods=['POST'])\ndef refresh_token():\n r = request.get_json()\n refresh_token = r['refresh_token']\n code_payload = {'grant_type': 'refresh_token', 'refresh_token':\n refresh_token}\n base64encoded = base64.b64encode(bytes('{}:{}'.format(SPOTIFY_CLIENT_ID,\n SPOTIFY_CLIENT_SECRET), 'utf-8'))\n headers = {'Authorization': 'Basic {}'.format(base64encoded.decode(\n 'utf-8'))}\n post_request = requests.post(SPOTIFY_TOKEN_URL, data=code_payload,\n headers=headers)\n response_data = json.loads(post_request.text)\n return jsonify(response_data)\n",
"<import token>\n<assignment token>\n\n\[email protected]('/login')\ndef login():\n url_args = urlencode(auth_query_parameters)\n print(url_args)\n auth_url = '{}/?{}'.format(SPOTIFY_AUTH_URL, url_args)\n return redirect(auth_url)\n\n\n<function token>\n<function token>\n",
"<import token>\n<assignment token>\n<function token>\n<function token>\n<function token>\n"
] | false |
870 |
d4d19411f0c48ffb99bd17e8387f1741144e43b4
|
from celery import shared_task
import tweepy
from datetime import datetime, timedelta
from .models import Tweet
from django.db import IntegrityError
CONSUMER_KEY = 'Vp7FVQLSwESvE9oTQruw0TnhW'
CONSUMER_SECRET = 'miy6EsGklNYxAaVn37vTjAVGwP0c67IOyuY71AAyL1p2Ba4VPN'
ACCESS_TOKEN = '1952022900-5WAHk6l5d3GllFtqDPaucSpnraIokE6hU7aBxNJ'
ACCESS_TOKEN_SECRET = 'ekONOf6QxJG6Lq3k2kznfQ16x12BGm909wckYFcP8SlYZ'
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
api = tweepy.API(auth, wait_on_rate_limit=True, wait_on_rate_limit_notify=True)
@shared_task(name='clean_tweetdb')
def clean_tweetdb():
tweets = Tweet.objects.all()
for tweets.tweet_date in tweets:
if tweets.tweet_date <= datetime.now() - timedelta(days=8):
tweets.delet()
@shared_task(name='get_tweets')
def get_tweets():
"""Get some tweets from the twitter api and store them to the db."""
clean_tweetdb.delay()
db_tweets = Tweet.objects.all()
max_id = min([tweet.tweet_id for tweet in db_tweets])
tweets = api.search(
q='#python',
max_id=max_id,
count=100
)
tweets_id = [tweet.id for tweet in tweets]
tweets_date = [tweet.created_at for tweet in tweets]
tweets_source = [tweet.source for tweet in tweets]
tweets_favorite_cnt = [tweet.favorite_count for tweet in tweets]
tweets_retweet_cnt = [tweet.retweet_count for tweet in tweets]
tweets_text = [tweet.text for tweet in tweets]
for i, j, k, l, m, n in zip(
tweets_id,
tweets_date,
tweets_source,
tweets_favorite_cnt,
tweets_retweet_cnt,
tweets_text,
):
try:
Tweet.objects.create(
tweet_id=i,
tweet_date=j,
tweet_source=k,
tweet_favorite_cnt=l,
tweet_retweet_cnt=m,
tweet_text=n,
)
except IntegrityError:
pass
|
[
"from celery import shared_task\nimport tweepy\nfrom datetime import datetime, timedelta\n\nfrom .models import Tweet\nfrom django.db import IntegrityError\n\nCONSUMER_KEY = 'Vp7FVQLSwESvE9oTQruw0TnhW'\nCONSUMER_SECRET = 'miy6EsGklNYxAaVn37vTjAVGwP0c67IOyuY71AAyL1p2Ba4VPN'\nACCESS_TOKEN = '1952022900-5WAHk6l5d3GllFtqDPaucSpnraIokE6hU7aBxNJ'\nACCESS_TOKEN_SECRET = 'ekONOf6QxJG6Lq3k2kznfQ16x12BGm909wckYFcP8SlYZ'\n\nauth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)\nauth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)\n\napi = tweepy.API(auth, wait_on_rate_limit=True, wait_on_rate_limit_notify=True)\n\n\n@shared_task(name='clean_tweetdb')\ndef clean_tweetdb():\n tweets = Tweet.objects.all()\n for tweets.tweet_date in tweets:\n if tweets.tweet_date <= datetime.now() - timedelta(days=8):\n tweets.delet()\n\n\n@shared_task(name='get_tweets')\ndef get_tweets():\n \"\"\"Get some tweets from the twitter api and store them to the db.\"\"\"\n clean_tweetdb.delay()\n db_tweets = Tweet.objects.all()\n max_id = min([tweet.tweet_id for tweet in db_tweets])\n tweets = api.search(\n q='#python',\n max_id=max_id,\n count=100\n )\n tweets_id = [tweet.id for tweet in tweets]\n tweets_date = [tweet.created_at for tweet in tweets]\n tweets_source = [tweet.source for tweet in tweets]\n tweets_favorite_cnt = [tweet.favorite_count for tweet in tweets]\n tweets_retweet_cnt = [tweet.retweet_count for tweet in tweets]\n tweets_text = [tweet.text for tweet in tweets]\n\n for i, j, k, l, m, n in zip(\n tweets_id,\n tweets_date,\n tweets_source,\n tweets_favorite_cnt,\n tweets_retweet_cnt,\n tweets_text,\n ):\n try:\n Tweet.objects.create(\n tweet_id=i,\n tweet_date=j,\n tweet_source=k,\n tweet_favorite_cnt=l,\n tweet_retweet_cnt=m,\n tweet_text=n,\n )\n except IntegrityError:\n pass\n",
"from celery import shared_task\nimport tweepy\nfrom datetime import datetime, timedelta\nfrom .models import Tweet\nfrom django.db import IntegrityError\nCONSUMER_KEY = 'Vp7FVQLSwESvE9oTQruw0TnhW'\nCONSUMER_SECRET = 'miy6EsGklNYxAaVn37vTjAVGwP0c67IOyuY71AAyL1p2Ba4VPN'\nACCESS_TOKEN = '1952022900-5WAHk6l5d3GllFtqDPaucSpnraIokE6hU7aBxNJ'\nACCESS_TOKEN_SECRET = 'ekONOf6QxJG6Lq3k2kznfQ16x12BGm909wckYFcP8SlYZ'\nauth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)\nauth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)\napi = tweepy.API(auth, wait_on_rate_limit=True, wait_on_rate_limit_notify=True)\n\n\n@shared_task(name='clean_tweetdb')\ndef clean_tweetdb():\n tweets = Tweet.objects.all()\n for tweets.tweet_date in tweets:\n if tweets.tweet_date <= datetime.now() - timedelta(days=8):\n tweets.delet()\n\n\n@shared_task(name='get_tweets')\ndef get_tweets():\n \"\"\"Get some tweets from the twitter api and store them to the db.\"\"\"\n clean_tweetdb.delay()\n db_tweets = Tweet.objects.all()\n max_id = min([tweet.tweet_id for tweet in db_tweets])\n tweets = api.search(q='#python', max_id=max_id, count=100)\n tweets_id = [tweet.id for tweet in tweets]\n tweets_date = [tweet.created_at for tweet in tweets]\n tweets_source = [tweet.source for tweet in tweets]\n tweets_favorite_cnt = [tweet.favorite_count for tweet in tweets]\n tweets_retweet_cnt = [tweet.retweet_count for tweet in tweets]\n tweets_text = [tweet.text for tweet in tweets]\n for i, j, k, l, m, n in zip(tweets_id, tweets_date, tweets_source,\n tweets_favorite_cnt, tweets_retweet_cnt, tweets_text):\n try:\n Tweet.objects.create(tweet_id=i, tweet_date=j, tweet_source=k,\n tweet_favorite_cnt=l, tweet_retweet_cnt=m, tweet_text=n)\n except IntegrityError:\n pass\n",
"<import token>\nCONSUMER_KEY = 'Vp7FVQLSwESvE9oTQruw0TnhW'\nCONSUMER_SECRET = 'miy6EsGklNYxAaVn37vTjAVGwP0c67IOyuY71AAyL1p2Ba4VPN'\nACCESS_TOKEN = '1952022900-5WAHk6l5d3GllFtqDPaucSpnraIokE6hU7aBxNJ'\nACCESS_TOKEN_SECRET = 'ekONOf6QxJG6Lq3k2kznfQ16x12BGm909wckYFcP8SlYZ'\nauth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)\nauth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)\napi = tweepy.API(auth, wait_on_rate_limit=True, wait_on_rate_limit_notify=True)\n\n\n@shared_task(name='clean_tweetdb')\ndef clean_tweetdb():\n tweets = Tweet.objects.all()\n for tweets.tweet_date in tweets:\n if tweets.tweet_date <= datetime.now() - timedelta(days=8):\n tweets.delet()\n\n\n@shared_task(name='get_tweets')\ndef get_tweets():\n \"\"\"Get some tweets from the twitter api and store them to the db.\"\"\"\n clean_tweetdb.delay()\n db_tweets = Tweet.objects.all()\n max_id = min([tweet.tweet_id for tweet in db_tweets])\n tweets = api.search(q='#python', max_id=max_id, count=100)\n tweets_id = [tweet.id for tweet in tweets]\n tweets_date = [tweet.created_at for tweet in tweets]\n tweets_source = [tweet.source for tweet in tweets]\n tweets_favorite_cnt = [tweet.favorite_count for tweet in tweets]\n tweets_retweet_cnt = [tweet.retweet_count for tweet in tweets]\n tweets_text = [tweet.text for tweet in tweets]\n for i, j, k, l, m, n in zip(tweets_id, tweets_date, tweets_source,\n tweets_favorite_cnt, tweets_retweet_cnt, tweets_text):\n try:\n Tweet.objects.create(tweet_id=i, tweet_date=j, tweet_source=k,\n tweet_favorite_cnt=l, tweet_retweet_cnt=m, tweet_text=n)\n except IntegrityError:\n pass\n",
"<import token>\n<assignment token>\nauth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)\n<assignment token>\n\n\n@shared_task(name='clean_tweetdb')\ndef clean_tweetdb():\n tweets = Tweet.objects.all()\n for tweets.tweet_date in tweets:\n if tweets.tweet_date <= datetime.now() - timedelta(days=8):\n tweets.delet()\n\n\n@shared_task(name='get_tweets')\ndef get_tweets():\n \"\"\"Get some tweets from the twitter api and store them to the db.\"\"\"\n clean_tweetdb.delay()\n db_tweets = Tweet.objects.all()\n max_id = min([tweet.tweet_id for tweet in db_tweets])\n tweets = api.search(q='#python', max_id=max_id, count=100)\n tweets_id = [tweet.id for tweet in tweets]\n tweets_date = [tweet.created_at for tweet in tweets]\n tweets_source = [tweet.source for tweet in tweets]\n tweets_favorite_cnt = [tweet.favorite_count for tweet in tweets]\n tweets_retweet_cnt = [tweet.retweet_count for tweet in tweets]\n tweets_text = [tweet.text for tweet in tweets]\n for i, j, k, l, m, n in zip(tweets_id, tweets_date, tweets_source,\n tweets_favorite_cnt, tweets_retweet_cnt, tweets_text):\n try:\n Tweet.objects.create(tweet_id=i, tweet_date=j, tweet_source=k,\n tweet_favorite_cnt=l, tweet_retweet_cnt=m, tweet_text=n)\n except IntegrityError:\n pass\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n\n\n@shared_task(name='clean_tweetdb')\ndef clean_tweetdb():\n tweets = Tweet.objects.all()\n for tweets.tweet_date in tweets:\n if tweets.tweet_date <= datetime.now() - timedelta(days=8):\n tweets.delet()\n\n\n@shared_task(name='get_tweets')\ndef get_tweets():\n \"\"\"Get some tweets from the twitter api and store them to the db.\"\"\"\n clean_tweetdb.delay()\n db_tweets = Tweet.objects.all()\n max_id = min([tweet.tweet_id for tweet in db_tweets])\n tweets = api.search(q='#python', max_id=max_id, count=100)\n tweets_id = [tweet.id for tweet in tweets]\n tweets_date = [tweet.created_at for tweet in tweets]\n tweets_source = [tweet.source for tweet in tweets]\n tweets_favorite_cnt = [tweet.favorite_count for tweet in tweets]\n tweets_retweet_cnt = [tweet.retweet_count for tweet in tweets]\n tweets_text = [tweet.text for tweet in tweets]\n for i, j, k, l, m, n in zip(tweets_id, tweets_date, tweets_source,\n tweets_favorite_cnt, tweets_retweet_cnt, tweets_text):\n try:\n Tweet.objects.create(tweet_id=i, tweet_date=j, tweet_source=k,\n tweet_favorite_cnt=l, tweet_retweet_cnt=m, tweet_text=n)\n except IntegrityError:\n pass\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n<function token>\n\n\n@shared_task(name='get_tweets')\ndef get_tweets():\n \"\"\"Get some tweets from the twitter api and store them to the db.\"\"\"\n clean_tweetdb.delay()\n db_tweets = Tweet.objects.all()\n max_id = min([tweet.tweet_id for tweet in db_tweets])\n tweets = api.search(q='#python', max_id=max_id, count=100)\n tweets_id = [tweet.id for tweet in tweets]\n tweets_date = [tweet.created_at for tweet in tweets]\n tweets_source = [tweet.source for tweet in tweets]\n tweets_favorite_cnt = [tweet.favorite_count for tweet in tweets]\n tweets_retweet_cnt = [tweet.retweet_count for tweet in tweets]\n tweets_text = [tweet.text for tweet in tweets]\n for i, j, k, l, m, n in zip(tweets_id, tweets_date, tweets_source,\n tweets_favorite_cnt, tweets_retweet_cnt, tweets_text):\n try:\n Tweet.objects.create(tweet_id=i, tweet_date=j, tweet_source=k,\n tweet_favorite_cnt=l, tweet_retweet_cnt=m, tweet_text=n)\n except IntegrityError:\n pass\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n<function token>\n<function token>\n"
] | false |
871 |
879482e4df9c3d7f32d9b2a883201ae043e1189f
|
import os
import json
from nltk.corpus import wordnet as wn
from itertools import combinations #計算排列組合
# 需要被計算的分類
myTypes = ['animal', 'vehicle', 'food', 'fashion', 'dog', 'cat', 'car', 'motorcycle']
# 計算完網紅權重存放的位置
scorePath = "..\\data\\score"
# getUsersData.py儲存網紅貼文資料的json檔案,拿來計算分數
usersDataFile = "..\\data\\usersData.json"
with open(usersDataFile, 'r') as load_f:
usersData = json.load(load_f)
def get_similar_words(words):
words = [w.lower() for w in words]
if len(words) > 1:
maxScore = 0
firstWord = ''
secondWord = ''
labelCom = list(combinations(words, 2)) #計算所有label內的排列組合
for i in labelCom: #labelCom 為排列組合的結果
labelMean1 = wn.synsets(i[0])#取出每個計算詞的詞性
labelMean2 = wn.synsets(i[1])
for j in labelMean1:
for k in labelMean2:
if j.wup_similarity(k) is not None:#因有可能出現計算結果為None的狀況 所以需要排除
if j.wup_similarity(k) > maxScore:
maxScore = j.wup_similarity(k)
firstWord = j
secondWord = k
print("兩個詞的語意獲得最高分(語意相近)")
print("score : {}".format(maxScore))
print("firstWord : {}".format(firstWord))
print("secondWord : {}".format(secondWord))
print("\n")
if type(firstWord) == type('') :
return get_similar_words( list(words[0]) )
else:
print(firstWord, firstWord.definition())
print(secondWord, secondWord.definition())
print('\n')
return [firstWord, secondWord]
else:
synSetList = []
for i in range(len(words)):
labelMean1 = wn.synsets(words[i])
for j in labelMean1:
synSetList.append(j)
return synSetList
def getWordNetScore(model):
new_dic = {}
scoreFile = ("{}\\{}.json".format( scorePath, model ) )
print(scoreFile)
if not os.path.exists(scoreFile):
with open(scoreFile,"w") as dump_f:
new_dic['50'] = list()
new_dic['100'] = list()
new_dic['150'] = list()
new_dic['200'] = list()
new_dic['250'] = list()
new_dic['300'] = list()
json.dump(new_dic,dump_f)
with open(scoreFile,'r') as load_f:
load_dict = json.load(load_f)
for user in usersData:
print('\n')
print( user )
print('\n')
countPost = 0
countLike = 0
countComment = 0
imageScoreDic = {}
videoScoreDic = {}
# 換帳號,圖片分類分數初始化
countImages = 0
for t in myTypes:
imageScoreDic[t] = 0
# 換帳號,影片分類分數初始化
countVideos = 0
for t in myTypes:
videoScoreDic[t] = 0
for timestamp in usersData[user]['data']:
countPost += 1
countLike += usersData[user]['data'][timestamp]['likes']
countComment += usersData[user]['data'][timestamp]['comments']
if usersData[user]['data'][timestamp]['is_video']:
countVideos += 1
else:
countImages += 1
if 'labels' not in usersData[user]['data'][timestamp]:
print( user )
print( timestamp )
print( usersData[user]['data'][timestamp] )
if len(usersData[user]['data'][timestamp]['labels']) > 0:
synsetWords = get_similar_words(usersData[user]['data'][timestamp]['labels'])
if len(synsetWords) == 2:
for t in myTypes:
standard = wn.synsets(t)
firstWordMaxWordSimilarity = 0
secondWordMaxWordSimilarity = 0
for k in standard:
if synsetWords[0].wup_similarity(k) is not None:
if synsetWords[0].wup_similarity(k) > firstWordMaxWordSimilarity:
firstWordMaxWordSimilarity = synsetWords[0].wup_similarity(k)
print("{} vs {} = {}".format( synsetWords[0], k, firstWordMaxWordSimilarity ))
if synsetWords[1].wup_similarity(k) is not None:
if synsetWords[1].wup_similarity(k) > secondWordMaxWordSimilarity:
secondWordMaxWordSimilarity = synsetWords[1].wup_similarity(k)
print("{} vs {} = {}".format( synsetWords[1], k, secondWordMaxWordSimilarity ))
maxScore = (firstWordMaxWordSimilarity+secondWordMaxWordSimilarity)/2
if usersData[user]['data'][timestamp]['is_video']:
# print( '這部影片在{}獲得{}分'.format(t, maxScore) )
videoScoreDic[t] += maxScore - 0.05
else:
# print( '這張圖片在{}獲得{}分'.format(t, maxScore) )
imageScoreDic[t] += maxScore - 0.05
else:
for t in myTypes:
maxScore = 0
standard = wn.synsets(t)
for k in standard:
for s in synsetWords:
if s.wup_similarity(k) is not None:
#print('{0}為計算詞性,{1}為目標詞性,分數為:{2}'.format(j,k,j.wup_similarity(k)))
if s.wup_similarity(k) > maxScore:
maxScore = s.wup_similarity(k)
print("{} vs {} = {}".format( s, k, maxScore ))
if usersData[user]['data'][timestamp]['is_video']:
# print( '這部影片在{}獲得{}分'.format(t, maxScore) )
videoScoreDic[t] += maxScore - 0.05
else:
# print( '這張圖片在{}獲得{}分'.format(t, maxScore) )
imageScoreDic[t] += maxScore - 0.05
# print('\n')
# print('\n')
# print("{}目前圖片個數 : {}".format(user, countImages))
# print("{}目前在每個分類的總分:".format(user))
# print(imageScoreDic)
# print('\n')
# print("{}目前影片個數 : {}".format(user, countVideos))
# print("{}目前在每個分類的總分:".format(user))
# print("{}目前在每個分類的總分:".format(user))
# print(videoScoreDic)
# print('\n\n')
if countPost != 0 and countPost % 50 == 0 :
print(countPost)
users = { load_dict[str(countPost)][i]['name']:i for i in range( 0, len(load_dict[str(countPost)]) ) }
try:
currentImgScoreDic = { t:round(imageScoreDic[t]/countImages*100, 3) for t in myTypes }
except :
currentImgScoreDic = {}
print("目前沒有圖片")
try:
currentVideoScoreDic = { t:round(videoScoreDic[t]/countVideos*100, 3) for t in myTypes }
except :
currentVideoScoreDic = {}
print("目前沒有影片")
if user in users:
load_dict[str(countPost)][ users[user] ]['follower'] = usersData[user]['followers']
load_dict[str(countPost)][ users[user] ]['like'] = round( countLike/countPost, 3)
load_dict[str(countPost)][ users[user] ]['comment'] = round(countComment/countPost,3)
load_dict[str(countPost)][ users[user] ]['image']['amount'] = countImages
load_dict[str(countPost)][ users[user] ]['image']['score'] = currentImgScoreDic
load_dict[str(countPost)][ users[user] ]['video']['amount'] = countVideos
load_dict[str(countPost)][ users[user] ]['video']['score'] = currentVideoScoreDic
load_dict[str(countPost)][ users[user] ]['ERate'] = round( ((countLike/countPost)+(countComment/countPost))/usersData[user]['followers'], 5 )
else:
new_dic = {}
new_dic['name'] = user
new_dic['follower'] = usersData[user]['followers']
new_dic['like'] = round( countLike/countPost, 3)
new_dic['comment'] = round(countComment/countPost,3)
new_dic['image'] = {}
new_dic['image']['amount'] = countImages
new_dic['image']['score'] = currentImgScoreDic
new_dic['video'] = {}
new_dic['video']['amount'] = countVideos
new_dic['video']['score'] = currentVideoScoreDic
new_dic['ERate'] = round( ((countLike/countPost)+(countComment/countPost))/usersData[user]['followers'], 5 )
load_dict[str(countPost)].append( new_dic )
if( countPost == 300 ):
break
if countPost < 300:
if countPost > 250:
countPost = 300
elif countPost > 200:
countPost = 250
elif countPost > 150:
countPost = 200
elif countPost > 100:
countPost = 150
elif countPost > 50:
countPost = 100
else:
countPost = 50
users = { load_dict[str(countPost-50)][i]['name']:i for i in range( 0, len(load_dict[str(countPost-50)]) ) }
finalDic = load_dict[str(countPost-50)][ users[user] ]
while countPost <= 300:
users = { load_dict[str(countPost)][i]['name']:i for i in range( 0, len(load_dict[str(countPost)]) ) }
if user in users:
load_dict[str(countPost)][ users[user] ] = finalDic
else:
load_dict[str(countPost)].append( finalDic )
countPost += 50
with open(scoreFile, "w") as dump_f:
json.dump(load_dict, dump_f)
if __name__ == '__main__':
getWordNetScore("wordNet")
# print( usersData )
|
[
"import os\nimport json\nfrom nltk.corpus import wordnet as wn\nfrom itertools import combinations #計算排列組合 \n\n\n# 需要被計算的分類\nmyTypes = ['animal', 'vehicle', 'food', 'fashion', 'dog', 'cat', 'car', 'motorcycle']\n\n# 計算完網紅權重存放的位置\nscorePath = \"..\\\\data\\\\score\"\n\n# getUsersData.py儲存網紅貼文資料的json檔案,拿來計算分數\nusersDataFile = \"..\\\\data\\\\usersData.json\"\nwith open(usersDataFile, 'r') as load_f:\n usersData = json.load(load_f)\n\n\ndef get_similar_words(words):\n\n words = [w.lower() for w in words]\n\n if len(words) > 1:\n maxScore = 0\n firstWord = ''\n secondWord = ''\n\n labelCom = list(combinations(words, 2)) #計算所有label內的排列組合\n for i in labelCom: #labelCom 為排列組合的結果\n labelMean1 = wn.synsets(i[0])#取出每個計算詞的詞性\n labelMean2 = wn.synsets(i[1])\n\n for j in labelMean1:\n for k in labelMean2:\n if j.wup_similarity(k) is not None:#因有可能出現計算結果為None的狀況 所以需要排除\n if j.wup_similarity(k) > maxScore:\n maxScore = j.wup_similarity(k)\n firstWord = j\n secondWord = k\n\n print(\"兩個詞的語意獲得最高分(語意相近)\")\n print(\"score : {}\".format(maxScore))\n print(\"firstWord : {}\".format(firstWord))\n print(\"secondWord : {}\".format(secondWord))\n print(\"\\n\")\n\n if type(firstWord) == type('') :\n return get_similar_words( list(words[0]) )\n else:\n print(firstWord, firstWord.definition())\n print(secondWord, secondWord.definition())\n print('\\n')\n return [firstWord, secondWord]\n\n else:\n synSetList = []\n for i in range(len(words)):\n labelMean1 = wn.synsets(words[i])\n for j in labelMean1:\n synSetList.append(j)\n\n return synSetList\n\n\ndef getWordNetScore(model):\n \n new_dic = {}\n scoreFile = (\"{}\\\\{}.json\".format( scorePath, model ) )\n print(scoreFile)\n if not os.path.exists(scoreFile):\n with open(scoreFile,\"w\") as dump_f:\n new_dic['50'] = list()\n new_dic['100'] = list()\n new_dic['150'] = list()\n new_dic['200'] = list()\n new_dic['250'] = list()\n new_dic['300'] = list()\n json.dump(new_dic,dump_f)\n \n with open(scoreFile,'r') as load_f:\n load_dict = json.load(load_f)\n\n for user in usersData:\n print('\\n')\n print( user )\n print('\\n')\n countPost = 0\n countLike = 0\n countComment = 0\n imageScoreDic = {}\n videoScoreDic = {}\n \n # 換帳號,圖片分類分數初始化\n countImages = 0\n for t in myTypes:\n imageScoreDic[t] = 0\n\n # 換帳號,影片分類分數初始化\n countVideos = 0\n for t in myTypes:\n videoScoreDic[t] = 0\n\n\n for timestamp in usersData[user]['data']:\n \n countPost += 1\n countLike += usersData[user]['data'][timestamp]['likes']\n countComment += usersData[user]['data'][timestamp]['comments']\n \n if usersData[user]['data'][timestamp]['is_video']:\n countVideos += 1\n else:\n countImages += 1\n\n if 'labels' not in usersData[user]['data'][timestamp]:\n print( user )\n print( timestamp )\n print( usersData[user]['data'][timestamp] )\n\n if len(usersData[user]['data'][timestamp]['labels']) > 0:\n\n synsetWords = get_similar_words(usersData[user]['data'][timestamp]['labels'])\n\n if len(synsetWords) == 2:\n for t in myTypes:\n standard = wn.synsets(t)\n firstWordMaxWordSimilarity = 0\n secondWordMaxWordSimilarity = 0\n \n for k in standard:\n if synsetWords[0].wup_similarity(k) is not None:\n if synsetWords[0].wup_similarity(k) > firstWordMaxWordSimilarity:\n firstWordMaxWordSimilarity = synsetWords[0].wup_similarity(k)\n print(\"{} vs {} = {}\".format( synsetWords[0], k, firstWordMaxWordSimilarity ))\n \n if synsetWords[1].wup_similarity(k) is not None:\n if synsetWords[1].wup_similarity(k) > secondWordMaxWordSimilarity:\n secondWordMaxWordSimilarity = synsetWords[1].wup_similarity(k)\n print(\"{} vs {} = {}\".format( synsetWords[1], k, secondWordMaxWordSimilarity ))\n \n maxScore = (firstWordMaxWordSimilarity+secondWordMaxWordSimilarity)/2\n if usersData[user]['data'][timestamp]['is_video']:\n # print( '這部影片在{}獲得{}分'.format(t, maxScore) )\n videoScoreDic[t] += maxScore - 0.05 \n else:\n # print( '這張圖片在{}獲得{}分'.format(t, maxScore) )\n imageScoreDic[t] += maxScore - 0.05\n else:\n\n for t in myTypes:\n maxScore = 0\n standard = wn.synsets(t)\n\n for k in standard:\n for s in synsetWords:\n if s.wup_similarity(k) is not None:\n #print('{0}為計算詞性,{1}為目標詞性,分數為:{2}'.format(j,k,j.wup_similarity(k)))\n if s.wup_similarity(k) > maxScore:\n maxScore = s.wup_similarity(k)\n print(\"{} vs {} = {}\".format( s, k, maxScore ))\n \n if usersData[user]['data'][timestamp]['is_video']:\n # print( '這部影片在{}獲得{}分'.format(t, maxScore) )\n videoScoreDic[t] += maxScore - 0.05 \n else:\n # print( '這張圖片在{}獲得{}分'.format(t, maxScore) )\n imageScoreDic[t] += maxScore - 0.05\n \n # print('\\n') \n \n \n # print('\\n')\n # print(\"{}目前圖片個數 : {}\".format(user, countImages))\n # print(\"{}目前在每個分類的總分:\".format(user))\n # print(imageScoreDic)\n # print('\\n')\n # print(\"{}目前影片個數 : {}\".format(user, countVideos))\n # print(\"{}目前在每個分類的總分:\".format(user))\n # print(\"{}目前在每個分類的總分:\".format(user))\n # print(videoScoreDic)\n # print('\\n\\n')\n\n if countPost != 0 and countPost % 50 == 0 :\n print(countPost)\n users = { load_dict[str(countPost)][i]['name']:i for i in range( 0, len(load_dict[str(countPost)]) ) }\n try:\n currentImgScoreDic = { t:round(imageScoreDic[t]/countImages*100, 3) for t in myTypes }\n except :\n currentImgScoreDic = {}\n print(\"目前沒有圖片\")\n try:\n currentVideoScoreDic = { t:round(videoScoreDic[t]/countVideos*100, 3) for t in myTypes }\n except :\n currentVideoScoreDic = {}\n print(\"目前沒有影片\")\n \n if user in users:\n load_dict[str(countPost)][ users[user] ]['follower'] = usersData[user]['followers']\n load_dict[str(countPost)][ users[user] ]['like'] = round( countLike/countPost, 3)\n load_dict[str(countPost)][ users[user] ]['comment'] = round(countComment/countPost,3)\n load_dict[str(countPost)][ users[user] ]['image']['amount'] = countImages\n load_dict[str(countPost)][ users[user] ]['image']['score'] = currentImgScoreDic\n load_dict[str(countPost)][ users[user] ]['video']['amount'] = countVideos\n load_dict[str(countPost)][ users[user] ]['video']['score'] = currentVideoScoreDic\n load_dict[str(countPost)][ users[user] ]['ERate'] = round( ((countLike/countPost)+(countComment/countPost))/usersData[user]['followers'], 5 )\n else:\n new_dic = {}\n new_dic['name'] = user\n new_dic['follower'] = usersData[user]['followers']\n new_dic['like'] = round( countLike/countPost, 3)\n new_dic['comment'] = round(countComment/countPost,3)\n new_dic['image'] = {}\n new_dic['image']['amount'] = countImages\n new_dic['image']['score'] = currentImgScoreDic\n new_dic['video'] = {}\n new_dic['video']['amount'] = countVideos\n new_dic['video']['score'] = currentVideoScoreDic\n new_dic['ERate'] = round( ((countLike/countPost)+(countComment/countPost))/usersData[user]['followers'], 5 )\n\n load_dict[str(countPost)].append( new_dic )\n \n if( countPost == 300 ):\n break\n\n if countPost < 300:\n \n if countPost > 250:\n countPost = 300\n elif countPost > 200:\n countPost = 250\n elif countPost > 150:\n countPost = 200\n elif countPost > 100:\n countPost = 150\n elif countPost > 50:\n countPost = 100\n else:\n countPost = 50\n \n users = { load_dict[str(countPost-50)][i]['name']:i for i in range( 0, len(load_dict[str(countPost-50)]) ) }\n finalDic = load_dict[str(countPost-50)][ users[user] ]\n while countPost <= 300:\n users = { load_dict[str(countPost)][i]['name']:i for i in range( 0, len(load_dict[str(countPost)]) ) }\n if user in users:\n load_dict[str(countPost)][ users[user] ] = finalDic\n else:\n load_dict[str(countPost)].append( finalDic )\n \n countPost += 50\n \n with open(scoreFile, \"w\") as dump_f:\n json.dump(load_dict, dump_f)\n\n\nif __name__ == '__main__':\n\n getWordNetScore(\"wordNet\")\n\n # print( usersData )\n",
"import os\nimport json\nfrom nltk.corpus import wordnet as wn\nfrom itertools import combinations\nmyTypes = ['animal', 'vehicle', 'food', 'fashion', 'dog', 'cat', 'car',\n 'motorcycle']\nscorePath = '..\\\\data\\\\score'\nusersDataFile = '..\\\\data\\\\usersData.json'\nwith open(usersDataFile, 'r') as load_f:\n usersData = json.load(load_f)\n\n\ndef get_similar_words(words):\n words = [w.lower() for w in words]\n if len(words) > 1:\n maxScore = 0\n firstWord = ''\n secondWord = ''\n labelCom = list(combinations(words, 2))\n for i in labelCom:\n labelMean1 = wn.synsets(i[0])\n labelMean2 = wn.synsets(i[1])\n for j in labelMean1:\n for k in labelMean2:\n if j.wup_similarity(k) is not None:\n if j.wup_similarity(k) > maxScore:\n maxScore = j.wup_similarity(k)\n firstWord = j\n secondWord = k\n print('兩個詞的語意獲得最高分(語意相近)')\n print('score : {}'.format(maxScore))\n print('firstWord : {}'.format(firstWord))\n print('secondWord : {}'.format(secondWord))\n print('\\n')\n if type(firstWord) == type(''):\n return get_similar_words(list(words[0]))\n else:\n print(firstWord, firstWord.definition())\n print(secondWord, secondWord.definition())\n print('\\n')\n return [firstWord, secondWord]\n else:\n synSetList = []\n for i in range(len(words)):\n labelMean1 = wn.synsets(words[i])\n for j in labelMean1:\n synSetList.append(j)\n return synSetList\n\n\ndef getWordNetScore(model):\n new_dic = {}\n scoreFile = '{}\\\\{}.json'.format(scorePath, model)\n print(scoreFile)\n if not os.path.exists(scoreFile):\n with open(scoreFile, 'w') as dump_f:\n new_dic['50'] = list()\n new_dic['100'] = list()\n new_dic['150'] = list()\n new_dic['200'] = list()\n new_dic['250'] = list()\n new_dic['300'] = list()\n json.dump(new_dic, dump_f)\n with open(scoreFile, 'r') as load_f:\n load_dict = json.load(load_f)\n for user in usersData:\n print('\\n')\n print(user)\n print('\\n')\n countPost = 0\n countLike = 0\n countComment = 0\n imageScoreDic = {}\n videoScoreDic = {}\n countImages = 0\n for t in myTypes:\n imageScoreDic[t] = 0\n countVideos = 0\n for t in myTypes:\n videoScoreDic[t] = 0\n for timestamp in usersData[user]['data']:\n countPost += 1\n countLike += usersData[user]['data'][timestamp]['likes']\n countComment += usersData[user]['data'][timestamp]['comments']\n if usersData[user]['data'][timestamp]['is_video']:\n countVideos += 1\n else:\n countImages += 1\n if 'labels' not in usersData[user]['data'][timestamp]:\n print(user)\n print(timestamp)\n print(usersData[user]['data'][timestamp])\n if len(usersData[user]['data'][timestamp]['labels']) > 0:\n synsetWords = get_similar_words(usersData[user]['data'][\n timestamp]['labels'])\n if len(synsetWords) == 2:\n for t in myTypes:\n standard = wn.synsets(t)\n firstWordMaxWordSimilarity = 0\n secondWordMaxWordSimilarity = 0\n for k in standard:\n if synsetWords[0].wup_similarity(k) is not None:\n if synsetWords[0].wup_similarity(k\n ) > firstWordMaxWordSimilarity:\n firstWordMaxWordSimilarity = synsetWords[0\n ].wup_similarity(k)\n print('{} vs {} = {}'.format(\n synsetWords[0], k,\n firstWordMaxWordSimilarity))\n if synsetWords[1].wup_similarity(k) is not None:\n if synsetWords[1].wup_similarity(k\n ) > secondWordMaxWordSimilarity:\n secondWordMaxWordSimilarity = synsetWords[1\n ].wup_similarity(k)\n print('{} vs {} = {}'.format(\n synsetWords[1], k,\n secondWordMaxWordSimilarity))\n maxScore = (firstWordMaxWordSimilarity +\n secondWordMaxWordSimilarity) / 2\n if usersData[user]['data'][timestamp]['is_video']:\n videoScoreDic[t] += maxScore - 0.05\n else:\n imageScoreDic[t] += maxScore - 0.05\n else:\n for t in myTypes:\n maxScore = 0\n standard = wn.synsets(t)\n for k in standard:\n for s in synsetWords:\n if s.wup_similarity(k) is not None:\n if s.wup_similarity(k) > maxScore:\n maxScore = s.wup_similarity(k)\n print('{} vs {} = {}'.format(s, k,\n maxScore))\n if usersData[user]['data'][timestamp]['is_video']:\n videoScoreDic[t] += maxScore - 0.05\n else:\n imageScoreDic[t] += maxScore - 0.05\n if countPost != 0 and countPost % 50 == 0:\n print(countPost)\n users = {load_dict[str(countPost)][i]['name']: i for i in\n range(0, len(load_dict[str(countPost)]))}\n try:\n currentImgScoreDic = {t: round(imageScoreDic[t] /\n countImages * 100, 3) for t in myTypes}\n except:\n currentImgScoreDic = {}\n print('目前沒有圖片')\n try:\n currentVideoScoreDic = {t: round(videoScoreDic[t] /\n countVideos * 100, 3) for t in myTypes}\n except:\n currentVideoScoreDic = {}\n print('目前沒有影片')\n if user in users:\n load_dict[str(countPost)][users[user]]['follower'\n ] = usersData[user]['followers']\n load_dict[str(countPost)][users[user]]['like'] = round(\n countLike / countPost, 3)\n load_dict[str(countPost)][users[user]]['comment'] = round(\n countComment / countPost, 3)\n load_dict[str(countPost)][users[user]]['image']['amount'\n ] = countImages\n load_dict[str(countPost)][users[user]]['image']['score'\n ] = currentImgScoreDic\n load_dict[str(countPost)][users[user]]['video']['amount'\n ] = countVideos\n load_dict[str(countPost)][users[user]]['video']['score'\n ] = currentVideoScoreDic\n load_dict[str(countPost)][users[user]]['ERate'] = round(\n (countLike / countPost + countComment / countPost) /\n usersData[user]['followers'], 5)\n else:\n new_dic = {}\n new_dic['name'] = user\n new_dic['follower'] = usersData[user]['followers']\n new_dic['like'] = round(countLike / countPost, 3)\n new_dic['comment'] = round(countComment / countPost, 3)\n new_dic['image'] = {}\n new_dic['image']['amount'] = countImages\n new_dic['image']['score'] = currentImgScoreDic\n new_dic['video'] = {}\n new_dic['video']['amount'] = countVideos\n new_dic['video']['score'] = currentVideoScoreDic\n new_dic['ERate'] = round((countLike / countPost + \n countComment / countPost) / usersData[user][\n 'followers'], 5)\n load_dict[str(countPost)].append(new_dic)\n if countPost == 300:\n break\n if countPost < 300:\n if countPost > 250:\n countPost = 300\n elif countPost > 200:\n countPost = 250\n elif countPost > 150:\n countPost = 200\n elif countPost > 100:\n countPost = 150\n elif countPost > 50:\n countPost = 100\n else:\n countPost = 50\n users = {load_dict[str(countPost - 50)][i]['name']: i for i in\n range(0, len(load_dict[str(countPost - 50)]))}\n finalDic = load_dict[str(countPost - 50)][users[user]]\n while countPost <= 300:\n users = {load_dict[str(countPost)][i]['name']: i for i in\n range(0, len(load_dict[str(countPost)]))}\n if user in users:\n load_dict[str(countPost)][users[user]] = finalDic\n else:\n load_dict[str(countPost)].append(finalDic)\n countPost += 50\n with open(scoreFile, 'w') as dump_f:\n json.dump(load_dict, dump_f)\n\n\nif __name__ == '__main__':\n getWordNetScore('wordNet')\n",
"<import token>\nmyTypes = ['animal', 'vehicle', 'food', 'fashion', 'dog', 'cat', 'car',\n 'motorcycle']\nscorePath = '..\\\\data\\\\score'\nusersDataFile = '..\\\\data\\\\usersData.json'\nwith open(usersDataFile, 'r') as load_f:\n usersData = json.load(load_f)\n\n\ndef get_similar_words(words):\n words = [w.lower() for w in words]\n if len(words) > 1:\n maxScore = 0\n firstWord = ''\n secondWord = ''\n labelCom = list(combinations(words, 2))\n for i in labelCom:\n labelMean1 = wn.synsets(i[0])\n labelMean2 = wn.synsets(i[1])\n for j in labelMean1:\n for k in labelMean2:\n if j.wup_similarity(k) is not None:\n if j.wup_similarity(k) > maxScore:\n maxScore = j.wup_similarity(k)\n firstWord = j\n secondWord = k\n print('兩個詞的語意獲得最高分(語意相近)')\n print('score : {}'.format(maxScore))\n print('firstWord : {}'.format(firstWord))\n print('secondWord : {}'.format(secondWord))\n print('\\n')\n if type(firstWord) == type(''):\n return get_similar_words(list(words[0]))\n else:\n print(firstWord, firstWord.definition())\n print(secondWord, secondWord.definition())\n print('\\n')\n return [firstWord, secondWord]\n else:\n synSetList = []\n for i in range(len(words)):\n labelMean1 = wn.synsets(words[i])\n for j in labelMean1:\n synSetList.append(j)\n return synSetList\n\n\ndef getWordNetScore(model):\n new_dic = {}\n scoreFile = '{}\\\\{}.json'.format(scorePath, model)\n print(scoreFile)\n if not os.path.exists(scoreFile):\n with open(scoreFile, 'w') as dump_f:\n new_dic['50'] = list()\n new_dic['100'] = list()\n new_dic['150'] = list()\n new_dic['200'] = list()\n new_dic['250'] = list()\n new_dic['300'] = list()\n json.dump(new_dic, dump_f)\n with open(scoreFile, 'r') as load_f:\n load_dict = json.load(load_f)\n for user in usersData:\n print('\\n')\n print(user)\n print('\\n')\n countPost = 0\n countLike = 0\n countComment = 0\n imageScoreDic = {}\n videoScoreDic = {}\n countImages = 0\n for t in myTypes:\n imageScoreDic[t] = 0\n countVideos = 0\n for t in myTypes:\n videoScoreDic[t] = 0\n for timestamp in usersData[user]['data']:\n countPost += 1\n countLike += usersData[user]['data'][timestamp]['likes']\n countComment += usersData[user]['data'][timestamp]['comments']\n if usersData[user]['data'][timestamp]['is_video']:\n countVideos += 1\n else:\n countImages += 1\n if 'labels' not in usersData[user]['data'][timestamp]:\n print(user)\n print(timestamp)\n print(usersData[user]['data'][timestamp])\n if len(usersData[user]['data'][timestamp]['labels']) > 0:\n synsetWords = get_similar_words(usersData[user]['data'][\n timestamp]['labels'])\n if len(synsetWords) == 2:\n for t in myTypes:\n standard = wn.synsets(t)\n firstWordMaxWordSimilarity = 0\n secondWordMaxWordSimilarity = 0\n for k in standard:\n if synsetWords[0].wup_similarity(k) is not None:\n if synsetWords[0].wup_similarity(k\n ) > firstWordMaxWordSimilarity:\n firstWordMaxWordSimilarity = synsetWords[0\n ].wup_similarity(k)\n print('{} vs {} = {}'.format(\n synsetWords[0], k,\n firstWordMaxWordSimilarity))\n if synsetWords[1].wup_similarity(k) is not None:\n if synsetWords[1].wup_similarity(k\n ) > secondWordMaxWordSimilarity:\n secondWordMaxWordSimilarity = synsetWords[1\n ].wup_similarity(k)\n print('{} vs {} = {}'.format(\n synsetWords[1], k,\n secondWordMaxWordSimilarity))\n maxScore = (firstWordMaxWordSimilarity +\n secondWordMaxWordSimilarity) / 2\n if usersData[user]['data'][timestamp]['is_video']:\n videoScoreDic[t] += maxScore - 0.05\n else:\n imageScoreDic[t] += maxScore - 0.05\n else:\n for t in myTypes:\n maxScore = 0\n standard = wn.synsets(t)\n for k in standard:\n for s in synsetWords:\n if s.wup_similarity(k) is not None:\n if s.wup_similarity(k) > maxScore:\n maxScore = s.wup_similarity(k)\n print('{} vs {} = {}'.format(s, k,\n maxScore))\n if usersData[user]['data'][timestamp]['is_video']:\n videoScoreDic[t] += maxScore - 0.05\n else:\n imageScoreDic[t] += maxScore - 0.05\n if countPost != 0 and countPost % 50 == 0:\n print(countPost)\n users = {load_dict[str(countPost)][i]['name']: i for i in\n range(0, len(load_dict[str(countPost)]))}\n try:\n currentImgScoreDic = {t: round(imageScoreDic[t] /\n countImages * 100, 3) for t in myTypes}\n except:\n currentImgScoreDic = {}\n print('目前沒有圖片')\n try:\n currentVideoScoreDic = {t: round(videoScoreDic[t] /\n countVideos * 100, 3) for t in myTypes}\n except:\n currentVideoScoreDic = {}\n print('目前沒有影片')\n if user in users:\n load_dict[str(countPost)][users[user]]['follower'\n ] = usersData[user]['followers']\n load_dict[str(countPost)][users[user]]['like'] = round(\n countLike / countPost, 3)\n load_dict[str(countPost)][users[user]]['comment'] = round(\n countComment / countPost, 3)\n load_dict[str(countPost)][users[user]]['image']['amount'\n ] = countImages\n load_dict[str(countPost)][users[user]]['image']['score'\n ] = currentImgScoreDic\n load_dict[str(countPost)][users[user]]['video']['amount'\n ] = countVideos\n load_dict[str(countPost)][users[user]]['video']['score'\n ] = currentVideoScoreDic\n load_dict[str(countPost)][users[user]]['ERate'] = round(\n (countLike / countPost + countComment / countPost) /\n usersData[user]['followers'], 5)\n else:\n new_dic = {}\n new_dic['name'] = user\n new_dic['follower'] = usersData[user]['followers']\n new_dic['like'] = round(countLike / countPost, 3)\n new_dic['comment'] = round(countComment / countPost, 3)\n new_dic['image'] = {}\n new_dic['image']['amount'] = countImages\n new_dic['image']['score'] = currentImgScoreDic\n new_dic['video'] = {}\n new_dic['video']['amount'] = countVideos\n new_dic['video']['score'] = currentVideoScoreDic\n new_dic['ERate'] = round((countLike / countPost + \n countComment / countPost) / usersData[user][\n 'followers'], 5)\n load_dict[str(countPost)].append(new_dic)\n if countPost == 300:\n break\n if countPost < 300:\n if countPost > 250:\n countPost = 300\n elif countPost > 200:\n countPost = 250\n elif countPost > 150:\n countPost = 200\n elif countPost > 100:\n countPost = 150\n elif countPost > 50:\n countPost = 100\n else:\n countPost = 50\n users = {load_dict[str(countPost - 50)][i]['name']: i for i in\n range(0, len(load_dict[str(countPost - 50)]))}\n finalDic = load_dict[str(countPost - 50)][users[user]]\n while countPost <= 300:\n users = {load_dict[str(countPost)][i]['name']: i for i in\n range(0, len(load_dict[str(countPost)]))}\n if user in users:\n load_dict[str(countPost)][users[user]] = finalDic\n else:\n load_dict[str(countPost)].append(finalDic)\n countPost += 50\n with open(scoreFile, 'w') as dump_f:\n json.dump(load_dict, dump_f)\n\n\nif __name__ == '__main__':\n getWordNetScore('wordNet')\n",
"<import token>\n<assignment token>\nwith open(usersDataFile, 'r') as load_f:\n usersData = json.load(load_f)\n\n\ndef get_similar_words(words):\n words = [w.lower() for w in words]\n if len(words) > 1:\n maxScore = 0\n firstWord = ''\n secondWord = ''\n labelCom = list(combinations(words, 2))\n for i in labelCom:\n labelMean1 = wn.synsets(i[0])\n labelMean2 = wn.synsets(i[1])\n for j in labelMean1:\n for k in labelMean2:\n if j.wup_similarity(k) is not None:\n if j.wup_similarity(k) > maxScore:\n maxScore = j.wup_similarity(k)\n firstWord = j\n secondWord = k\n print('兩個詞的語意獲得最高分(語意相近)')\n print('score : {}'.format(maxScore))\n print('firstWord : {}'.format(firstWord))\n print('secondWord : {}'.format(secondWord))\n print('\\n')\n if type(firstWord) == type(''):\n return get_similar_words(list(words[0]))\n else:\n print(firstWord, firstWord.definition())\n print(secondWord, secondWord.definition())\n print('\\n')\n return [firstWord, secondWord]\n else:\n synSetList = []\n for i in range(len(words)):\n labelMean1 = wn.synsets(words[i])\n for j in labelMean1:\n synSetList.append(j)\n return synSetList\n\n\ndef getWordNetScore(model):\n new_dic = {}\n scoreFile = '{}\\\\{}.json'.format(scorePath, model)\n print(scoreFile)\n if not os.path.exists(scoreFile):\n with open(scoreFile, 'w') as dump_f:\n new_dic['50'] = list()\n new_dic['100'] = list()\n new_dic['150'] = list()\n new_dic['200'] = list()\n new_dic['250'] = list()\n new_dic['300'] = list()\n json.dump(new_dic, dump_f)\n with open(scoreFile, 'r') as load_f:\n load_dict = json.load(load_f)\n for user in usersData:\n print('\\n')\n print(user)\n print('\\n')\n countPost = 0\n countLike = 0\n countComment = 0\n imageScoreDic = {}\n videoScoreDic = {}\n countImages = 0\n for t in myTypes:\n imageScoreDic[t] = 0\n countVideos = 0\n for t in myTypes:\n videoScoreDic[t] = 0\n for timestamp in usersData[user]['data']:\n countPost += 1\n countLike += usersData[user]['data'][timestamp]['likes']\n countComment += usersData[user]['data'][timestamp]['comments']\n if usersData[user]['data'][timestamp]['is_video']:\n countVideos += 1\n else:\n countImages += 1\n if 'labels' not in usersData[user]['data'][timestamp]:\n print(user)\n print(timestamp)\n print(usersData[user]['data'][timestamp])\n if len(usersData[user]['data'][timestamp]['labels']) > 0:\n synsetWords = get_similar_words(usersData[user]['data'][\n timestamp]['labels'])\n if len(synsetWords) == 2:\n for t in myTypes:\n standard = wn.synsets(t)\n firstWordMaxWordSimilarity = 0\n secondWordMaxWordSimilarity = 0\n for k in standard:\n if synsetWords[0].wup_similarity(k) is not None:\n if synsetWords[0].wup_similarity(k\n ) > firstWordMaxWordSimilarity:\n firstWordMaxWordSimilarity = synsetWords[0\n ].wup_similarity(k)\n print('{} vs {} = {}'.format(\n synsetWords[0], k,\n firstWordMaxWordSimilarity))\n if synsetWords[1].wup_similarity(k) is not None:\n if synsetWords[1].wup_similarity(k\n ) > secondWordMaxWordSimilarity:\n secondWordMaxWordSimilarity = synsetWords[1\n ].wup_similarity(k)\n print('{} vs {} = {}'.format(\n synsetWords[1], k,\n secondWordMaxWordSimilarity))\n maxScore = (firstWordMaxWordSimilarity +\n secondWordMaxWordSimilarity) / 2\n if usersData[user]['data'][timestamp]['is_video']:\n videoScoreDic[t] += maxScore - 0.05\n else:\n imageScoreDic[t] += maxScore - 0.05\n else:\n for t in myTypes:\n maxScore = 0\n standard = wn.synsets(t)\n for k in standard:\n for s in synsetWords:\n if s.wup_similarity(k) is not None:\n if s.wup_similarity(k) > maxScore:\n maxScore = s.wup_similarity(k)\n print('{} vs {} = {}'.format(s, k,\n maxScore))\n if usersData[user]['data'][timestamp]['is_video']:\n videoScoreDic[t] += maxScore - 0.05\n else:\n imageScoreDic[t] += maxScore - 0.05\n if countPost != 0 and countPost % 50 == 0:\n print(countPost)\n users = {load_dict[str(countPost)][i]['name']: i for i in\n range(0, len(load_dict[str(countPost)]))}\n try:\n currentImgScoreDic = {t: round(imageScoreDic[t] /\n countImages * 100, 3) for t in myTypes}\n except:\n currentImgScoreDic = {}\n print('目前沒有圖片')\n try:\n currentVideoScoreDic = {t: round(videoScoreDic[t] /\n countVideos * 100, 3) for t in myTypes}\n except:\n currentVideoScoreDic = {}\n print('目前沒有影片')\n if user in users:\n load_dict[str(countPost)][users[user]]['follower'\n ] = usersData[user]['followers']\n load_dict[str(countPost)][users[user]]['like'] = round(\n countLike / countPost, 3)\n load_dict[str(countPost)][users[user]]['comment'] = round(\n countComment / countPost, 3)\n load_dict[str(countPost)][users[user]]['image']['amount'\n ] = countImages\n load_dict[str(countPost)][users[user]]['image']['score'\n ] = currentImgScoreDic\n load_dict[str(countPost)][users[user]]['video']['amount'\n ] = countVideos\n load_dict[str(countPost)][users[user]]['video']['score'\n ] = currentVideoScoreDic\n load_dict[str(countPost)][users[user]]['ERate'] = round(\n (countLike / countPost + countComment / countPost) /\n usersData[user]['followers'], 5)\n else:\n new_dic = {}\n new_dic['name'] = user\n new_dic['follower'] = usersData[user]['followers']\n new_dic['like'] = round(countLike / countPost, 3)\n new_dic['comment'] = round(countComment / countPost, 3)\n new_dic['image'] = {}\n new_dic['image']['amount'] = countImages\n new_dic['image']['score'] = currentImgScoreDic\n new_dic['video'] = {}\n new_dic['video']['amount'] = countVideos\n new_dic['video']['score'] = currentVideoScoreDic\n new_dic['ERate'] = round((countLike / countPost + \n countComment / countPost) / usersData[user][\n 'followers'], 5)\n load_dict[str(countPost)].append(new_dic)\n if countPost == 300:\n break\n if countPost < 300:\n if countPost > 250:\n countPost = 300\n elif countPost > 200:\n countPost = 250\n elif countPost > 150:\n countPost = 200\n elif countPost > 100:\n countPost = 150\n elif countPost > 50:\n countPost = 100\n else:\n countPost = 50\n users = {load_dict[str(countPost - 50)][i]['name']: i for i in\n range(0, len(load_dict[str(countPost - 50)]))}\n finalDic = load_dict[str(countPost - 50)][users[user]]\n while countPost <= 300:\n users = {load_dict[str(countPost)][i]['name']: i for i in\n range(0, len(load_dict[str(countPost)]))}\n if user in users:\n load_dict[str(countPost)][users[user]] = finalDic\n else:\n load_dict[str(countPost)].append(finalDic)\n countPost += 50\n with open(scoreFile, 'w') as dump_f:\n json.dump(load_dict, dump_f)\n\n\nif __name__ == '__main__':\n getWordNetScore('wordNet')\n",
"<import token>\n<assignment token>\n<code token>\n\n\ndef get_similar_words(words):\n words = [w.lower() for w in words]\n if len(words) > 1:\n maxScore = 0\n firstWord = ''\n secondWord = ''\n labelCom = list(combinations(words, 2))\n for i in labelCom:\n labelMean1 = wn.synsets(i[0])\n labelMean2 = wn.synsets(i[1])\n for j in labelMean1:\n for k in labelMean2:\n if j.wup_similarity(k) is not None:\n if j.wup_similarity(k) > maxScore:\n maxScore = j.wup_similarity(k)\n firstWord = j\n secondWord = k\n print('兩個詞的語意獲得最高分(語意相近)')\n print('score : {}'.format(maxScore))\n print('firstWord : {}'.format(firstWord))\n print('secondWord : {}'.format(secondWord))\n print('\\n')\n if type(firstWord) == type(''):\n return get_similar_words(list(words[0]))\n else:\n print(firstWord, firstWord.definition())\n print(secondWord, secondWord.definition())\n print('\\n')\n return [firstWord, secondWord]\n else:\n synSetList = []\n for i in range(len(words)):\n labelMean1 = wn.synsets(words[i])\n for j in labelMean1:\n synSetList.append(j)\n return synSetList\n\n\ndef getWordNetScore(model):\n new_dic = {}\n scoreFile = '{}\\\\{}.json'.format(scorePath, model)\n print(scoreFile)\n if not os.path.exists(scoreFile):\n with open(scoreFile, 'w') as dump_f:\n new_dic['50'] = list()\n new_dic['100'] = list()\n new_dic['150'] = list()\n new_dic['200'] = list()\n new_dic['250'] = list()\n new_dic['300'] = list()\n json.dump(new_dic, dump_f)\n with open(scoreFile, 'r') as load_f:\n load_dict = json.load(load_f)\n for user in usersData:\n print('\\n')\n print(user)\n print('\\n')\n countPost = 0\n countLike = 0\n countComment = 0\n imageScoreDic = {}\n videoScoreDic = {}\n countImages = 0\n for t in myTypes:\n imageScoreDic[t] = 0\n countVideos = 0\n for t in myTypes:\n videoScoreDic[t] = 0\n for timestamp in usersData[user]['data']:\n countPost += 1\n countLike += usersData[user]['data'][timestamp]['likes']\n countComment += usersData[user]['data'][timestamp]['comments']\n if usersData[user]['data'][timestamp]['is_video']:\n countVideos += 1\n else:\n countImages += 1\n if 'labels' not in usersData[user]['data'][timestamp]:\n print(user)\n print(timestamp)\n print(usersData[user]['data'][timestamp])\n if len(usersData[user]['data'][timestamp]['labels']) > 0:\n synsetWords = get_similar_words(usersData[user]['data'][\n timestamp]['labels'])\n if len(synsetWords) == 2:\n for t in myTypes:\n standard = wn.synsets(t)\n firstWordMaxWordSimilarity = 0\n secondWordMaxWordSimilarity = 0\n for k in standard:\n if synsetWords[0].wup_similarity(k) is not None:\n if synsetWords[0].wup_similarity(k\n ) > firstWordMaxWordSimilarity:\n firstWordMaxWordSimilarity = synsetWords[0\n ].wup_similarity(k)\n print('{} vs {} = {}'.format(\n synsetWords[0], k,\n firstWordMaxWordSimilarity))\n if synsetWords[1].wup_similarity(k) is not None:\n if synsetWords[1].wup_similarity(k\n ) > secondWordMaxWordSimilarity:\n secondWordMaxWordSimilarity = synsetWords[1\n ].wup_similarity(k)\n print('{} vs {} = {}'.format(\n synsetWords[1], k,\n secondWordMaxWordSimilarity))\n maxScore = (firstWordMaxWordSimilarity +\n secondWordMaxWordSimilarity) / 2\n if usersData[user]['data'][timestamp]['is_video']:\n videoScoreDic[t] += maxScore - 0.05\n else:\n imageScoreDic[t] += maxScore - 0.05\n else:\n for t in myTypes:\n maxScore = 0\n standard = wn.synsets(t)\n for k in standard:\n for s in synsetWords:\n if s.wup_similarity(k) is not None:\n if s.wup_similarity(k) > maxScore:\n maxScore = s.wup_similarity(k)\n print('{} vs {} = {}'.format(s, k,\n maxScore))\n if usersData[user]['data'][timestamp]['is_video']:\n videoScoreDic[t] += maxScore - 0.05\n else:\n imageScoreDic[t] += maxScore - 0.05\n if countPost != 0 and countPost % 50 == 0:\n print(countPost)\n users = {load_dict[str(countPost)][i]['name']: i for i in\n range(0, len(load_dict[str(countPost)]))}\n try:\n currentImgScoreDic = {t: round(imageScoreDic[t] /\n countImages * 100, 3) for t in myTypes}\n except:\n currentImgScoreDic = {}\n print('目前沒有圖片')\n try:\n currentVideoScoreDic = {t: round(videoScoreDic[t] /\n countVideos * 100, 3) for t in myTypes}\n except:\n currentVideoScoreDic = {}\n print('目前沒有影片')\n if user in users:\n load_dict[str(countPost)][users[user]]['follower'\n ] = usersData[user]['followers']\n load_dict[str(countPost)][users[user]]['like'] = round(\n countLike / countPost, 3)\n load_dict[str(countPost)][users[user]]['comment'] = round(\n countComment / countPost, 3)\n load_dict[str(countPost)][users[user]]['image']['amount'\n ] = countImages\n load_dict[str(countPost)][users[user]]['image']['score'\n ] = currentImgScoreDic\n load_dict[str(countPost)][users[user]]['video']['amount'\n ] = countVideos\n load_dict[str(countPost)][users[user]]['video']['score'\n ] = currentVideoScoreDic\n load_dict[str(countPost)][users[user]]['ERate'] = round(\n (countLike / countPost + countComment / countPost) /\n usersData[user]['followers'], 5)\n else:\n new_dic = {}\n new_dic['name'] = user\n new_dic['follower'] = usersData[user]['followers']\n new_dic['like'] = round(countLike / countPost, 3)\n new_dic['comment'] = round(countComment / countPost, 3)\n new_dic['image'] = {}\n new_dic['image']['amount'] = countImages\n new_dic['image']['score'] = currentImgScoreDic\n new_dic['video'] = {}\n new_dic['video']['amount'] = countVideos\n new_dic['video']['score'] = currentVideoScoreDic\n new_dic['ERate'] = round((countLike / countPost + \n countComment / countPost) / usersData[user][\n 'followers'], 5)\n load_dict[str(countPost)].append(new_dic)\n if countPost == 300:\n break\n if countPost < 300:\n if countPost > 250:\n countPost = 300\n elif countPost > 200:\n countPost = 250\n elif countPost > 150:\n countPost = 200\n elif countPost > 100:\n countPost = 150\n elif countPost > 50:\n countPost = 100\n else:\n countPost = 50\n users = {load_dict[str(countPost - 50)][i]['name']: i for i in\n range(0, len(load_dict[str(countPost - 50)]))}\n finalDic = load_dict[str(countPost - 50)][users[user]]\n while countPost <= 300:\n users = {load_dict[str(countPost)][i]['name']: i for i in\n range(0, len(load_dict[str(countPost)]))}\n if user in users:\n load_dict[str(countPost)][users[user]] = finalDic\n else:\n load_dict[str(countPost)].append(finalDic)\n countPost += 50\n with open(scoreFile, 'w') as dump_f:\n json.dump(load_dict, dump_f)\n\n\n<code token>\n",
"<import token>\n<assignment token>\n<code token>\n\n\ndef get_similar_words(words):\n words = [w.lower() for w in words]\n if len(words) > 1:\n maxScore = 0\n firstWord = ''\n secondWord = ''\n labelCom = list(combinations(words, 2))\n for i in labelCom:\n labelMean1 = wn.synsets(i[0])\n labelMean2 = wn.synsets(i[1])\n for j in labelMean1:\n for k in labelMean2:\n if j.wup_similarity(k) is not None:\n if j.wup_similarity(k) > maxScore:\n maxScore = j.wup_similarity(k)\n firstWord = j\n secondWord = k\n print('兩個詞的語意獲得最高分(語意相近)')\n print('score : {}'.format(maxScore))\n print('firstWord : {}'.format(firstWord))\n print('secondWord : {}'.format(secondWord))\n print('\\n')\n if type(firstWord) == type(''):\n return get_similar_words(list(words[0]))\n else:\n print(firstWord, firstWord.definition())\n print(secondWord, secondWord.definition())\n print('\\n')\n return [firstWord, secondWord]\n else:\n synSetList = []\n for i in range(len(words)):\n labelMean1 = wn.synsets(words[i])\n for j in labelMean1:\n synSetList.append(j)\n return synSetList\n\n\n<function token>\n<code token>\n",
"<import token>\n<assignment token>\n<code token>\n<function token>\n<function token>\n<code token>\n"
] | false |
872 |
93d0d73d56b04bba505265958fccff229f5eaf49
|
# -*- coding: utf-8 -*-
import os
from flask import Flask, request,render_template,url_for
from flask_uploads import UploadSet, configure_uploads, IMAGES, patch_request_class
import sys
sys.path.insert(1, 'script')
from backend import model
import io
from PIL import Image
import base64
import numpy as np
app = Flask(__name__)
app.config['UPLOADED_PHOTOS_DEST'] = os.path.realpath('images')
photos = UploadSet('photos', IMAGES)
configure_uploads(app, photos)
patch_request_class(app)
@app.route('/', methods=['GET', 'POST'])
def upload_file():
if request.method == 'POST' and 'photo' in request.files:
filename = photos.save(request.files['photo'])
file_url = photos.url(filename)
path,label,element = model(file_url)
result = []
for el in path :
img = Image.fromarray((el * 255).astype(np.uint8))
file_object = io.BytesIO()
img.save(file_object, 'jpeg',quality=100)
figdata_jgp = base64.b64encode(file_object.getvalue())
result.append(figdata_jgp.decode('ascii'))
return render_template('display.html',image = file_url,label = element, results=zip(result,label))
return render_template('index.html')
app.run(threaded=False)
render_template('index.html')
|
[
"\n# -*- coding: utf-8 -*-\nimport os\nfrom flask import Flask, request,render_template,url_for\nfrom flask_uploads import UploadSet, configure_uploads, IMAGES, patch_request_class\nimport sys\nsys.path.insert(1, 'script')\nfrom backend import model\nimport io\nfrom PIL import Image\nimport base64\nimport numpy as np\n\n\n\n\napp = Flask(__name__)\napp.config['UPLOADED_PHOTOS_DEST'] = os.path.realpath('images')\n\n\n\nphotos = UploadSet('photos', IMAGES)\nconfigure_uploads(app, photos)\npatch_request_class(app) \n\[email protected]('/', methods=['GET', 'POST'])\ndef upload_file():\n if request.method == 'POST' and 'photo' in request.files:\n filename = photos.save(request.files['photo'])\n file_url = photos.url(filename)\n path,label,element = model(file_url)\n result = []\n for el in path :\n img = Image.fromarray((el * 255).astype(np.uint8))\n file_object = io.BytesIO()\n img.save(file_object, 'jpeg',quality=100)\n figdata_jgp = base64.b64encode(file_object.getvalue())\n result.append(figdata_jgp.decode('ascii'))\n return render_template('display.html',image = file_url,label = element, results=zip(result,label))\n return render_template('index.html')\n\n\napp.run(threaded=False)\nrender_template('index.html')\n",
"import os\nfrom flask import Flask, request, render_template, url_for\nfrom flask_uploads import UploadSet, configure_uploads, IMAGES, patch_request_class\nimport sys\nsys.path.insert(1, 'script')\nfrom backend import model\nimport io\nfrom PIL import Image\nimport base64\nimport numpy as np\napp = Flask(__name__)\napp.config['UPLOADED_PHOTOS_DEST'] = os.path.realpath('images')\nphotos = UploadSet('photos', IMAGES)\nconfigure_uploads(app, photos)\npatch_request_class(app)\n\n\[email protected]('/', methods=['GET', 'POST'])\ndef upload_file():\n if request.method == 'POST' and 'photo' in request.files:\n filename = photos.save(request.files['photo'])\n file_url = photos.url(filename)\n path, label, element = model(file_url)\n result = []\n for el in path:\n img = Image.fromarray((el * 255).astype(np.uint8))\n file_object = io.BytesIO()\n img.save(file_object, 'jpeg', quality=100)\n figdata_jgp = base64.b64encode(file_object.getvalue())\n result.append(figdata_jgp.decode('ascii'))\n return render_template('display.html', image=file_url, label=\n element, results=zip(result, label))\n return render_template('index.html')\n\n\napp.run(threaded=False)\nrender_template('index.html')\n",
"<import token>\nsys.path.insert(1, 'script')\n<import token>\napp = Flask(__name__)\napp.config['UPLOADED_PHOTOS_DEST'] = os.path.realpath('images')\nphotos = UploadSet('photos', IMAGES)\nconfigure_uploads(app, photos)\npatch_request_class(app)\n\n\[email protected]('/', methods=['GET', 'POST'])\ndef upload_file():\n if request.method == 'POST' and 'photo' in request.files:\n filename = photos.save(request.files['photo'])\n file_url = photos.url(filename)\n path, label, element = model(file_url)\n result = []\n for el in path:\n img = Image.fromarray((el * 255).astype(np.uint8))\n file_object = io.BytesIO()\n img.save(file_object, 'jpeg', quality=100)\n figdata_jgp = base64.b64encode(file_object.getvalue())\n result.append(figdata_jgp.decode('ascii'))\n return render_template('display.html', image=file_url, label=\n element, results=zip(result, label))\n return render_template('index.html')\n\n\napp.run(threaded=False)\nrender_template('index.html')\n",
"<import token>\nsys.path.insert(1, 'script')\n<import token>\n<assignment token>\nconfigure_uploads(app, photos)\npatch_request_class(app)\n\n\[email protected]('/', methods=['GET', 'POST'])\ndef upload_file():\n if request.method == 'POST' and 'photo' in request.files:\n filename = photos.save(request.files['photo'])\n file_url = photos.url(filename)\n path, label, element = model(file_url)\n result = []\n for el in path:\n img = Image.fromarray((el * 255).astype(np.uint8))\n file_object = io.BytesIO()\n img.save(file_object, 'jpeg', quality=100)\n figdata_jgp = base64.b64encode(file_object.getvalue())\n result.append(figdata_jgp.decode('ascii'))\n return render_template('display.html', image=file_url, label=\n element, results=zip(result, label))\n return render_template('index.html')\n\n\napp.run(threaded=False)\nrender_template('index.html')\n",
"<import token>\n<code token>\n<import token>\n<assignment token>\n<code token>\n\n\[email protected]('/', methods=['GET', 'POST'])\ndef upload_file():\n if request.method == 'POST' and 'photo' in request.files:\n filename = photos.save(request.files['photo'])\n file_url = photos.url(filename)\n path, label, element = model(file_url)\n result = []\n for el in path:\n img = Image.fromarray((el * 255).astype(np.uint8))\n file_object = io.BytesIO()\n img.save(file_object, 'jpeg', quality=100)\n figdata_jgp = base64.b64encode(file_object.getvalue())\n result.append(figdata_jgp.decode('ascii'))\n return render_template('display.html', image=file_url, label=\n element, results=zip(result, label))\n return render_template('index.html')\n\n\n<code token>\n",
"<import token>\n<code token>\n<import token>\n<assignment token>\n<code token>\n<function token>\n<code token>\n"
] | false |
873 |
ce4ecff2012cfda4a458912713b0330a218fa186
|
from states.state import State
class MoveDigState(State):
#init attributes of state
def __init__(self):
super().__init__("MoveDig", "ScanDig")
self.transitionReady = False
self.digSiteDistance = 0
#implementation for each state: overridden
def run(self, moveInstructions):
print("\n>run() not implemented\n")
#always begin with no transition
self.transitionReady = False
#track distance
#execute move instructions
#when in dig site then
self.transitionReady = True
#implementation for each state: overridden
def transition(self):
return self.transitionReady
|
[
"from states.state import State\n\nclass MoveDigState(State):\n #init attributes of state\n def __init__(self):\n super().__init__(\"MoveDig\", \"ScanDig\")\n self.transitionReady = False\n self.digSiteDistance = 0\n\n #implementation for each state: overridden\n def run(self, moveInstructions):\n print(\"\\n>run() not implemented\\n\")\n \n #always begin with no transition\n self.transitionReady = False\n\n #track distance\n #execute move instructions \n #when in dig site then\n self.transitionReady = True\n\n #implementation for each state: overridden\n def transition(self):\n return self.transitionReady",
"from states.state import State\n\n\nclass MoveDigState(State):\n\n def __init__(self):\n super().__init__('MoveDig', 'ScanDig')\n self.transitionReady = False\n self.digSiteDistance = 0\n\n def run(self, moveInstructions):\n print('\\n>run() not implemented\\n')\n self.transitionReady = False\n self.transitionReady = True\n\n def transition(self):\n return self.transitionReady\n",
"<import token>\n\n\nclass MoveDigState(State):\n\n def __init__(self):\n super().__init__('MoveDig', 'ScanDig')\n self.transitionReady = False\n self.digSiteDistance = 0\n\n def run(self, moveInstructions):\n print('\\n>run() not implemented\\n')\n self.transitionReady = False\n self.transitionReady = True\n\n def transition(self):\n return self.transitionReady\n",
"<import token>\n\n\nclass MoveDigState(State):\n\n def __init__(self):\n super().__init__('MoveDig', 'ScanDig')\n self.transitionReady = False\n self.digSiteDistance = 0\n\n def run(self, moveInstructions):\n print('\\n>run() not implemented\\n')\n self.transitionReady = False\n self.transitionReady = True\n <function token>\n",
"<import token>\n\n\nclass MoveDigState(State):\n\n def __init__(self):\n super().__init__('MoveDig', 'ScanDig')\n self.transitionReady = False\n self.digSiteDistance = 0\n <function token>\n <function token>\n",
"<import token>\n\n\nclass MoveDigState(State):\n <function token>\n <function token>\n <function token>\n",
"<import token>\n<class token>\n"
] | false |
874 |
612535d95e655f2e2d2c58f41b2aa99afa7fbcbc
|
# from the top
# clean up dependencies
from flask import Flask
app = Flask(__name__)
@app.route("/")
def index():
return "<h1>Congratulations, it's a web app!</h1>"
if __name__ == "__main__":
app.run(host="127.0.0.1", port=8080, debug=True)
|
[
"# from the top\n# clean up dependencies\n\nfrom flask import Flask\n\napp = Flask(__name__)\n\[email protected](\"/\")\ndef index():\n return \"<h1>Congratulations, it's a web app!</h1>\"\n\n\nif __name__ == \"__main__\":\n app.run(host=\"127.0.0.1\", port=8080, debug=True)\n\n",
"from flask import Flask\napp = Flask(__name__)\n\n\[email protected]('/')\ndef index():\n return \"<h1>Congratulations, it's a web app!</h1>\"\n\n\nif __name__ == '__main__':\n app.run(host='127.0.0.1', port=8080, debug=True)\n",
"<import token>\napp = Flask(__name__)\n\n\[email protected]('/')\ndef index():\n return \"<h1>Congratulations, it's a web app!</h1>\"\n\n\nif __name__ == '__main__':\n app.run(host='127.0.0.1', port=8080, debug=True)\n",
"<import token>\n<assignment token>\n\n\[email protected]('/')\ndef index():\n return \"<h1>Congratulations, it's a web app!</h1>\"\n\n\nif __name__ == '__main__':\n app.run(host='127.0.0.1', port=8080, debug=True)\n",
"<import token>\n<assignment token>\n\n\[email protected]('/')\ndef index():\n return \"<h1>Congratulations, it's a web app!</h1>\"\n\n\n<code token>\n",
"<import token>\n<assignment token>\n<function token>\n<code token>\n"
] | false |
875 |
2579b0c31c5f7cad361ed317f87cb8b0ffcb0098
|
'''
Created on Feb 21, 2013
@author: dharadarji
'''
def get_row(row_index):
entry = [1]
if row_index == 0:
return entry
tmp = []
for i in range(1, row_index + 2):
tmp = entry
print "i: ", i, "tmp: ", tmp
entry = []
entry.append(1)
for j in range(1, i-1):
print "j: ", j, "tmp[j]: ", tmp[0]
entry.append(tmp[j-1] + tmp[j])
entry.append(1)
print "entry: ", entry
print entry
get_row(3)
|
[
"'''\nCreated on Feb 21, 2013\n\n@author: dharadarji\n'''\n\ndef get_row(row_index):\n entry = [1]\n \n if row_index == 0:\n return entry\n \n tmp = []\n \n for i in range(1, row_index + 2):\n tmp = entry\n print \"i: \", i, \"tmp: \", tmp\n\n entry = []\n entry.append(1)\n \n for j in range(1, i-1):\n print \"j: \", j, \"tmp[j]: \", tmp[0]\n entry.append(tmp[j-1] + tmp[j])\n \n entry.append(1)\n print \"entry: \", entry\n print entry\n \nget_row(3)"
] | true |
876 |
f135d52e4d5e49f96869c4209b84f30ff72f6780
|
import praw
import pickle
import copy
class histogram:
def __init__(self, dictionary=None):
self.frequencies = {}
if dictionary is not None:
self.frequencies = copy.deepcopy(dictionary)
def get_sum(self):
the_sum = 0
for e in self.frequencies:
the_sum += self.frequencies[e]
return the_sum
def add_frequency(self, key, value):
if key in self.frequencies:
self.frequencies[key] += value
else:
self.frequencies[key] = value
def add_by_frequencies(self,frequencies):
for key in frequencies.frequencies:
self.add_frequency(key, frequencies.frequencies[key])
def multiply_frequency(self, key, value):
if key in self.frequencies:
self.frequencies[key] *= value
else:
self.frequencies[key] = 0.0
def multiply_by_frequencies(self, frequencies):
for key in frequencies.frequencies:
self.multiply_frequency(key, frequencies.frequencies[key])
def multiply_by_scalar(self, scalar):
for key in self.frequencies:
self.multiply_frequency(key,scalar)
def divide_frequency(self, key, value):
if key in self.frequencies:
if value != 0:
if self.frequencies[key] == 0:
self.frequencies[key] = 1.0
else:
self.frequencies[key] /= (0.0 + value)
else:
if self.frequencies[key] == 0:
self.frequencies[key] = 1.0
else:
self.frequencies[key] = float('inf')
else:
if value > 0:
self.frequencies[key] = 0.0
else:
self.frequencies[key] = 1.0
def divide_by_frequencies(self, frequencies):
for key in frequencies.frequencies:
self.divide_frequency(key, frequencies.frequencies[key])
class comment:
def __init__(self, comment):
if comment is not None and hasattr(comment,'author') and comment.author is not None and hasattr(comment.author, 'name'):
self.author_name = comment.author.name
else:
self.author_name = ''
self.subreddit = str(comment.subreddit.display_name.strip(' ').lower())
class user:
@staticmethod
def get_histogram(comments, author_name):
total_comments_by_author = 0
the_histogram = histogram()
for comment in comments:
if comment.author_name == author_name:
total_comments_by_author += 1
the_histogram.add_frequency(comment.subreddit, 1)
the_histogram.multiply_by_scalar(1.0 / total_comments_by_author)
#print author_name, " ", the_histogram.get_sum()
return the_histogram.frequencies
class community:
@staticmethod
def get_histogram(comments, subreddit_name):
total_comments_in_subreddit = 0
the_histogram = histogram()
for comment in comments:
if comment.subreddit == subreddit_name:
total_comments_in_subreddit += 1
the_histogram.add_frequency(comment.author_name, 1)
the_histogram.multiply_by_scalar(1.0 / total_comments_in_subreddit)
return the_histogram.frequencies
class data:
def __init__(self, comments, x_subs):
self.comments = comments
self.x_subs = x_subs
def remove_sub_data(subredditName):
the_data = pickle.load(open('data.pkl', 'rb'))
comments = the_data.comments
x_subs = the_data.x_subs
comments = [x for x in comments if x.subreddit.lower() != subredditName]
x_subs = [x for x in x_subs if x != subredditName]
the_data = data(comments, x_subs )
print x_subs
output = open('data.pkl', 'wb')
pickle.dump(the_data,output)
output.close()
def add_sub_data(subredditName, num_redditors):
user_agent = ("Testing Reddit Functionality by /u/Reddit_Projector https://github.com/joshlemer/RedditProject")
reddit = praw.Reddit(user_agent)
subreddit_object = reddit.get_subreddit(subredditName)
the_data = pickle.load(open('data.pkl', 'rb'))
comments = the_data.comments
x_subs = the_data.x_subs
y_comments = [comment(a) for a in subreddit_object.get_comments(limit=num_redditors)]
z_comments = []
redditors = []
i = 0
for y_com in y_comments:
print y_com.subreddit, " z = ", i
redditor = y_com.author_name
if redditor not in redditors:
try:
z_comments += [comment(a) for a in reddit.get_redditor(y_com.author_name).get_comments(limit=100)]
redditors.append(redditor)
except:
print "oops, that user is weird"
i += 1
comments += list(z_comments)
print "COMMENTS LENGTH: ", len(comments)
the_data = data(comments, x_subs + [subredditName] )
output = open('data.pkl', 'wb')
pickle.dump(the_data,output)
output.close()
if __name__ == "__main__":
user_agent = ("Testing Reddit Functionality by /u/Reddit_Projector https://github.com/joshlemer/RedditProject")
reddit = praw.Reddit(user_agent)
subredditName = 'all'
subreddit_object = reddit.get_subreddit(subredditName)
y = 5 #Comments per subreddit inspected
z = 100 #Comments per user inspected
#List of subreddits to be analyzed
# x_subs = [
# 'hiphopheads',
# 'metal',
# 'postrock',
# 'letstalkmusic' ]
#Commented code below is for pulling our x_subs from the most recent comments in /r/all
# x_comments = [comment(a) for a in subreddit_object.get_comments(limit=x)]
# i = 0
# for c in x_comments:
# print "x = ", i
# if c.subreddit not in x_subs:
# x_subs.append(c.subreddit)
# i += 1
#List of subreddits to be analyzed
x_subs = [
'hiphopheads',
'metal',
'postrock',
'letstalkmusic' ]
y_comments = []
i = 0
print "Getting ", y, " comments from each of the ", len(x_subs), " subreddits"
for x_sub in x_subs:
print "\tRetrieving ", 5, " comments from /r/", x_sub
subreddit_object = reddit.get_subreddit(x_sub)
y_comments += [comment(a) for a in subreddit_object.get_comments(limit=y)]
i += 1
z_comments = []
redditors = []
i = 0
print "Following commenters from original subs to gather their other reddit activity"
for y_com in y_comments:
redditor = y_com.author_name
print "\tAnalyzing user ", redditor, " (user ", i, "/", len(y_comments), ")"
if redditor not in redditors:
try:
z_comments += [comment(a) for a in reddit.get_redditor(y_com.author_name).get_comments(limit=z)]
redditors.append(redditor)
except:
print "\t\toops, that user is weird\n\t\tprobably deleted their comment or profile or something"
else:
print "\t\tAlready looked at this user, no need to make an other call."
i += 1
comments = list(z_comments)
print "COMMENTS LENGTH: ", len(comments)
the_data = data(comments, x_subs)
output = open('data.pkl', 'wb')
pickle.dump(the_data,output)
output.close()
|
[
"import praw\nimport pickle\nimport copy\n\nclass histogram:\n def __init__(self, dictionary=None):\n self.frequencies = {}\n if dictionary is not None:\n self.frequencies = copy.deepcopy(dictionary)\n\n def get_sum(self):\n the_sum = 0\n for e in self.frequencies:\n the_sum += self.frequencies[e]\n return the_sum\n\n def add_frequency(self, key, value):\n if key in self.frequencies:\n self.frequencies[key] += value\n else:\n self.frequencies[key] = value\n\n def add_by_frequencies(self,frequencies):\n for key in frequencies.frequencies:\n self.add_frequency(key, frequencies.frequencies[key])\n\n def multiply_frequency(self, key, value):\n if key in self.frequencies:\n self.frequencies[key] *= value\n else:\n self.frequencies[key] = 0.0\n\n def multiply_by_frequencies(self, frequencies):\n for key in frequencies.frequencies:\n self.multiply_frequency(key, frequencies.frequencies[key])\n\n def multiply_by_scalar(self, scalar):\n for key in self.frequencies:\n self.multiply_frequency(key,scalar)\n\n def divide_frequency(self, key, value):\n if key in self.frequencies:\n if value != 0:\n if self.frequencies[key] == 0:\n self.frequencies[key] = 1.0\n else:\n self.frequencies[key] /= (0.0 + value)\n else:\n if self.frequencies[key] == 0:\n self.frequencies[key] = 1.0\n else:\n self.frequencies[key] = float('inf')\n else:\n if value > 0:\n self.frequencies[key] = 0.0\n else:\n self.frequencies[key] = 1.0\n\n def divide_by_frequencies(self, frequencies):\n for key in frequencies.frequencies:\n self.divide_frequency(key, frequencies.frequencies[key])\n\n\nclass comment:\n def __init__(self, comment):\n if comment is not None and hasattr(comment,'author') and comment.author is not None and hasattr(comment.author, 'name'):\n self.author_name = comment.author.name\n else:\n self.author_name = ''\n\n self.subreddit = str(comment.subreddit.display_name.strip(' ').lower())\n\nclass user:\n @staticmethod\n def get_histogram(comments, author_name):\n total_comments_by_author = 0\n the_histogram = histogram()\n for comment in comments:\n if comment.author_name == author_name:\n total_comments_by_author += 1\n the_histogram.add_frequency(comment.subreddit, 1)\n the_histogram.multiply_by_scalar(1.0 / total_comments_by_author)\n #print author_name, \" \", the_histogram.get_sum()\n return the_histogram.frequencies\n\nclass community:\n @staticmethod\n def get_histogram(comments, subreddit_name):\n total_comments_in_subreddit = 0\n the_histogram = histogram()\n for comment in comments:\n if comment.subreddit == subreddit_name:\n total_comments_in_subreddit += 1\n the_histogram.add_frequency(comment.author_name, 1)\n the_histogram.multiply_by_scalar(1.0 / total_comments_in_subreddit)\n return the_histogram.frequencies\n\nclass data:\n def __init__(self, comments, x_subs):\n self.comments = comments\n self.x_subs = x_subs\n\n\ndef remove_sub_data(subredditName):\n the_data = pickle.load(open('data.pkl', 'rb'))\n comments = the_data.comments\n x_subs = the_data.x_subs\n\n comments = [x for x in comments if x.subreddit.lower() != subredditName]\n x_subs = [x for x in x_subs if x != subredditName]\n\n the_data = data(comments, x_subs )\n print x_subs\n output = open('data.pkl', 'wb')\n pickle.dump(the_data,output)\n output.close()\n\n\n\n\ndef add_sub_data(subredditName, num_redditors):\n user_agent = (\"Testing Reddit Functionality by /u/Reddit_Projector https://github.com/joshlemer/RedditProject\")\n reddit = praw.Reddit(user_agent)\n subreddit_object = reddit.get_subreddit(subredditName)\n\n the_data = pickle.load(open('data.pkl', 'rb'))\n comments = the_data.comments\n x_subs = the_data.x_subs\n y_comments = [comment(a) for a in subreddit_object.get_comments(limit=num_redditors)]\n\n z_comments = []\n redditors = []\n i = 0\n for y_com in y_comments:\n print y_com.subreddit, \" z = \", i\n redditor = y_com.author_name\n if redditor not in redditors:\n try:\n z_comments += [comment(a) for a in reddit.get_redditor(y_com.author_name).get_comments(limit=100)]\n redditors.append(redditor)\n except:\n print \"oops, that user is weird\"\n i += 1\n\n comments += list(z_comments)\n print \"COMMENTS LENGTH: \", len(comments)\n the_data = data(comments, x_subs + [subredditName] )\n output = open('data.pkl', 'wb')\n pickle.dump(the_data,output)\n output.close()\n\n\n\nif __name__ == \"__main__\":\n user_agent = (\"Testing Reddit Functionality by /u/Reddit_Projector https://github.com/joshlemer/RedditProject\")\n reddit = praw.Reddit(user_agent)\n subredditName = 'all'\n subreddit_object = reddit.get_subreddit(subredditName)\n y = 5 #Comments per subreddit inspected\n z = 100 #Comments per user inspected\n\n\n\n #List of subreddits to be analyzed\n # x_subs = [\n # 'hiphopheads',\n # 'metal',\n # 'postrock',\n # 'letstalkmusic' ]\n\n #Commented code below is for pulling our x_subs from the most recent comments in /r/all\n\n # x_comments = [comment(a) for a in subreddit_object.get_comments(limit=x)]\n # i = 0\n # for c in x_comments:\n # print \"x = \", i\n # if c.subreddit not in x_subs:\n # x_subs.append(c.subreddit)\n # i += 1\n\n #List of subreddits to be analyzed\n x_subs = [\n 'hiphopheads',\n 'metal',\n 'postrock',\n 'letstalkmusic' ]\n\n y_comments = []\n i = 0\n print \"Getting \", y, \" comments from each of the \", len(x_subs), \" subreddits\"\n for x_sub in x_subs:\n print \"\\tRetrieving \", 5, \" comments from /r/\", x_sub\n subreddit_object = reddit.get_subreddit(x_sub)\n y_comments += [comment(a) for a in subreddit_object.get_comments(limit=y)]\n i += 1\n\n z_comments = []\n redditors = []\n i = 0\n print \"Following commenters from original subs to gather their other reddit activity\"\n for y_com in y_comments:\n redditor = y_com.author_name\n print \"\\tAnalyzing user \", redditor, \" (user \", i, \"/\", len(y_comments), \")\"\n if redditor not in redditors:\n try:\n z_comments += [comment(a) for a in reddit.get_redditor(y_com.author_name).get_comments(limit=z)]\n redditors.append(redditor)\n except:\n print \"\\t\\toops, that user is weird\\n\\t\\tprobably deleted their comment or profile or something\"\n else:\n print \"\\t\\tAlready looked at this user, no need to make an other call.\"\n i += 1\n\n comments = list(z_comments)\n print \"COMMENTS LENGTH: \", len(comments)\n the_data = data(comments, x_subs)\n output = open('data.pkl', 'wb')\n pickle.dump(the_data,output)\n output.close()\n"
] | true |
877 |
1c85ccaacfb47808e9e74f2a18bfe3b309891cf4
|
#!/usr/bin/python
import pymysql
dbServerName = "127.0.0.1"
dbUser = "root"
dbPassword = "1448"
dbName = "TestDataBase2"
charSet = "utf8mb4"
cusrorType = pymysql.cursors.DictCursor
connectionObject = pymysql.connect(host=dbServerName, user=dbUser, password=dbPassword,
db=dbName, charset=charSet,cursorclass=cusrorType)
try:
# Create a cursor object
cursorObject = connectionObject.cursor()
# SQL query string
sqlQuery = "CREATE TABLE Liceu(id int, Nume varchar(32), Prenume varchar(32), Legitimatie int)"
# Execute the sqlQuery
cursorObject.execute(sqlQuery)
# SQL query string
sqlQuery = "show tables"
# Execute the sqlQuery
cursorObject.execute(sqlQuery)
#mycursor = mydb.cursor()
sql = "INSERT INTO Liceu(id, Nume, Prenume, Leg) VALUES (%n, %s, %s, %n)"
val = (5, 'Highway 21', 'sfsdfs', 53)
cursorObject.execute(sql, val)
cursorObject.commit()
print(mycursor.rowcount, "record inserted.")
#Fetch all the rows
rows = cursorObject.fetchall()
for row in rows:
print(row)
except Exception as e:
print("Exeception occured:{}".format(e))
finally:
connectionObject.close()
|
[
"#!/usr/bin/python\nimport pymysql\n\n\ndbServerName = \"127.0.0.1\"\n\ndbUser = \"root\"\n\ndbPassword = \"1448\"\n\ndbName = \"TestDataBase2\"\n\ncharSet = \"utf8mb4\"\n\ncusrorType = pymysql.cursors.DictCursor\n\n\n\nconnectionObject = pymysql.connect(host=dbServerName, user=dbUser, password=dbPassword,\n\n db=dbName, charset=charSet,cursorclass=cusrorType)\ntry:\n\n\n\n # Create a cursor object\n\n cursorObject = connectionObject.cursor()\n\n\n\n # SQL query string\n\n sqlQuery = \"CREATE TABLE Liceu(id int, Nume varchar(32), Prenume varchar(32), Legitimatie int)\"\n\n\n\n # Execute the sqlQuery\n\n cursorObject.execute(sqlQuery)\n\n\n\n # SQL query string\n\n sqlQuery = \"show tables\"\n\n\n\n # Execute the sqlQuery\n\n cursorObject.execute(sqlQuery)\n\n#mycursor = mydb.cursor()\n\nsql = \"INSERT INTO Liceu(id, Nume, Prenume, Leg) VALUES (%n, %s, %s, %n)\"\nval = (5, 'Highway 21', 'sfsdfs', 53)\ncursorObject.execute(sql, val)\n\ncursorObject.commit()\n\nprint(mycursor.rowcount, \"record inserted.\")\n\n\n\n #Fetch all the rows\n\n rows = cursorObject.fetchall()\n\n\n\n for row in rows:\n\n print(row)\n\nexcept Exception as e:\n\n print(\"Exeception occured:{}\".format(e))\n\n\nfinally:\n\n connectionObject.close()\n"
] | true |
878 |
33b8baf2ca819315eaa5f16c7986390acb4d6efd
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, unicode_literals
import urllib
def normalize_mac_address(address):
return address.lower().replace("-", ":")
def urlencode(s):
return urllib.quote(s.encode("utf-8"), "")
def urlencode_plus(s):
return urllib.quote_plus(s.encode("utf-8"), "")
|
[
"# -*- coding: utf-8 -*-\nfrom __future__ import absolute_import, division, unicode_literals\n\nimport urllib\n\n\ndef normalize_mac_address(address):\n return address.lower().replace(\"-\", \":\")\n\n\ndef urlencode(s):\n return urllib.quote(s.encode(\"utf-8\"), \"\")\n\n\ndef urlencode_plus(s):\n return urllib.quote_plus(s.encode(\"utf-8\"), \"\")\n",
"from __future__ import absolute_import, division, unicode_literals\nimport urllib\n\n\ndef normalize_mac_address(address):\n return address.lower().replace('-', ':')\n\n\ndef urlencode(s):\n return urllib.quote(s.encode('utf-8'), '')\n\n\ndef urlencode_plus(s):\n return urllib.quote_plus(s.encode('utf-8'), '')\n",
"<import token>\n\n\ndef normalize_mac_address(address):\n return address.lower().replace('-', ':')\n\n\ndef urlencode(s):\n return urllib.quote(s.encode('utf-8'), '')\n\n\ndef urlencode_plus(s):\n return urllib.quote_plus(s.encode('utf-8'), '')\n",
"<import token>\n\n\ndef normalize_mac_address(address):\n return address.lower().replace('-', ':')\n\n\n<function token>\n\n\ndef urlencode_plus(s):\n return urllib.quote_plus(s.encode('utf-8'), '')\n",
"<import token>\n\n\ndef normalize_mac_address(address):\n return address.lower().replace('-', ':')\n\n\n<function token>\n<function token>\n",
"<import token>\n<function token>\n<function token>\n<function token>\n"
] | false |
879 |
bf8ffe603b7c1e90deed6a69500ea5b7671e7270
|
# from suiron.core.SuironIO import SuironIO
# import cv2
# import os
# import time
# import json
# import numpy as np
# suironio = SuironIO(serial_location='/dev/ttyUSB0', baudrate=57600, port=5050)
# if __name__ == "__main__":
# while True:
# # suironio.record_inputs()
# print('turn90')
# suironio.servo_test(90)
# print('turn0')
# suironio.servo_test(0)
# print('turn-90')
# suironio.servo_test(-90)
# import socket
# import struct
# import pandas as pd
# sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# host = raw_input("Server hostname or ip? ")
# port = input("Server port? ")
# # sock.connect((host,port))
# sock.connect(('192.168.0.164',5051))
# while True:
# data = raw_input("message: ")
# # sock.send(data)
# raw_data = {
# 'image': [2,4,2,5,6,3,2,3],
# 'servo': [22,42,5,45,34,534,2,3],
# 'motor': [23423,324,32,324,324,2,4,2]
# }
# df = pd.DataFrame(raw_data, columns=['image', 'servo', 'motor'])
# df = df.to_csv()
# sock.sendall(struct.pack('>i', len(df))+df)
# # sock.sendall(struct.pack('>i', len(data))+data)
# print("response: ", sock.recv(1024))
import numpy as np
import cv2
import pandas as pd
from suiron.utils.functions import raw_to_cnn, cnn_to_raw, raw_motor_to_rgb
from suiron.utils.img_serializer import deserialize_image
# Visualize images
# With and without any predictions
def visualize_data(filename, width=72, height=48, depth=3, cnn_model=None):
"""
When cnn_model is specified it'll show what the cnn_model predicts (red)
as opposed to what inputs it actually received (green)
"""
data = pd.DataFrame.from_csv(filename)
for i in range(30):
cur_img = data['image'][i]
cur_steer = int(data['servo'][i])
cur_throttle = int(data['motor'][i])
# [1:-1] is used to remove '[' and ']' from string
cur_img_array = deserialize_image(cur_img)
# cur_img_array = cv2.resize(cur_img_array, (480, 320), interpolation=cv2.INTER_CUBIC)
image = cv2.cvtColor(cur_img_array, cv2.COLOR_RGB2BGR)
print(i)
cv2.imwrite('test'+str(i)+'.jpg', image)
import sys
import json
# from suiron.core.SuironVZ import visualize_data
from suiron.utils.file_finder import get_latest_filename
# Load image settings
with open('settings.json') as d:
SETTINGS = json.load(d)
# Visualize latest filename
filename = get_latest_filename()
# If we specified which file
if len(sys.argv) > 1:
filename = sys.argv[1]
visualize_data(filename, width=SETTINGS['width'], height=SETTINGS['height'], depth=SETTINGS['depth'])
|
[
"# from suiron.core.SuironIO import SuironIO\n# import cv2\n# import os\n# import time\n# import json\n# import numpy as np\n\n# suironio = SuironIO(serial_location='/dev/ttyUSB0', baudrate=57600, port=5050)\n\n# if __name__ == \"__main__\":\n# while True:\n# \t# suironio.record_inputs()\n# \tprint('turn90')\n# suironio.servo_test(90)\n# print('turn0')\n# suironio.servo_test(0)\n# print('turn-90')\n# suironio.servo_test(-90)\n\n# import socket\n# import struct\n# import pandas as pd\n\n# sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n# host = raw_input(\"Server hostname or ip? \")\n# port = input(\"Server port? \")\n# # sock.connect((host,port))\n# sock.connect(('192.168.0.164',5051))\n\n# while True:\n# data = raw_input(\"message: \")\n# # sock.send(data)\n# raw_data = {\n# \t 'image': [2,4,2,5,6,3,2,3], \n# \t 'servo': [22,42,5,45,34,534,2,3],\n# \t 'motor': [23423,324,32,324,324,2,4,2]\n# \t }\n# df = pd.DataFrame(raw_data, columns=['image', 'servo', 'motor'])\n# df = df.to_csv()\n# sock.sendall(struct.pack('>i', len(df))+df)\n# # sock.sendall(struct.pack('>i', len(data))+data)\n# print(\"response: \", sock.recv(1024))\n\nimport numpy as np\nimport cv2\nimport pandas as pd\n\nfrom suiron.utils.functions import raw_to_cnn, cnn_to_raw, raw_motor_to_rgb\nfrom suiron.utils.img_serializer import deserialize_image\n\n# Visualize images\n# With and without any predictions\ndef visualize_data(filename, width=72, height=48, depth=3, cnn_model=None):\n \"\"\"\n When cnn_model is specified it'll show what the cnn_model predicts (red)\n as opposed to what inputs it actually received (green)\n \"\"\"\n data = pd.DataFrame.from_csv(filename) \n\n for i in range(30):\n cur_img = data['image'][i]\n cur_steer = int(data['servo'][i])\n cur_throttle = int(data['motor'][i])\n \n # [1:-1] is used to remove '[' and ']' from string \n cur_img_array = deserialize_image(cur_img)\n # cur_img_array = cv2.resize(cur_img_array, (480, 320), interpolation=cv2.INTER_CUBIC)\n image = cv2.cvtColor(cur_img_array, cv2.COLOR_RGB2BGR)\n print(i)\n cv2.imwrite('test'+str(i)+'.jpg', image)\n\nimport sys\nimport json\n\n# from suiron.core.SuironVZ import visualize_data\nfrom suiron.utils.file_finder import get_latest_filename\n\n# Load image settings\nwith open('settings.json') as d:\n SETTINGS = json.load(d)\n\n# Visualize latest filename\nfilename = get_latest_filename() \n\n# If we specified which file\nif len(sys.argv) > 1:\n filename = sys.argv[1]\n\nvisualize_data(filename, width=SETTINGS['width'], height=SETTINGS['height'], depth=SETTINGS['depth'])",
"import numpy as np\nimport cv2\nimport pandas as pd\nfrom suiron.utils.functions import raw_to_cnn, cnn_to_raw, raw_motor_to_rgb\nfrom suiron.utils.img_serializer import deserialize_image\n\n\ndef visualize_data(filename, width=72, height=48, depth=3, cnn_model=None):\n \"\"\"\n When cnn_model is specified it'll show what the cnn_model predicts (red)\n as opposed to what inputs it actually received (green)\n \"\"\"\n data = pd.DataFrame.from_csv(filename)\n for i in range(30):\n cur_img = data['image'][i]\n cur_steer = int(data['servo'][i])\n cur_throttle = int(data['motor'][i])\n cur_img_array = deserialize_image(cur_img)\n image = cv2.cvtColor(cur_img_array, cv2.COLOR_RGB2BGR)\n print(i)\n cv2.imwrite('test' + str(i) + '.jpg', image)\n\n\nimport sys\nimport json\nfrom suiron.utils.file_finder import get_latest_filename\nwith open('settings.json') as d:\n SETTINGS = json.load(d)\nfilename = get_latest_filename()\nif len(sys.argv) > 1:\n filename = sys.argv[1]\nvisualize_data(filename, width=SETTINGS['width'], height=SETTINGS['height'],\n depth=SETTINGS['depth'])\n",
"<import token>\n\n\ndef visualize_data(filename, width=72, height=48, depth=3, cnn_model=None):\n \"\"\"\n When cnn_model is specified it'll show what the cnn_model predicts (red)\n as opposed to what inputs it actually received (green)\n \"\"\"\n data = pd.DataFrame.from_csv(filename)\n for i in range(30):\n cur_img = data['image'][i]\n cur_steer = int(data['servo'][i])\n cur_throttle = int(data['motor'][i])\n cur_img_array = deserialize_image(cur_img)\n image = cv2.cvtColor(cur_img_array, cv2.COLOR_RGB2BGR)\n print(i)\n cv2.imwrite('test' + str(i) + '.jpg', image)\n\n\n<import token>\nwith open('settings.json') as d:\n SETTINGS = json.load(d)\nfilename = get_latest_filename()\nif len(sys.argv) > 1:\n filename = sys.argv[1]\nvisualize_data(filename, width=SETTINGS['width'], height=SETTINGS['height'],\n depth=SETTINGS['depth'])\n",
"<import token>\n\n\ndef visualize_data(filename, width=72, height=48, depth=3, cnn_model=None):\n \"\"\"\n When cnn_model is specified it'll show what the cnn_model predicts (red)\n as opposed to what inputs it actually received (green)\n \"\"\"\n data = pd.DataFrame.from_csv(filename)\n for i in range(30):\n cur_img = data['image'][i]\n cur_steer = int(data['servo'][i])\n cur_throttle = int(data['motor'][i])\n cur_img_array = deserialize_image(cur_img)\n image = cv2.cvtColor(cur_img_array, cv2.COLOR_RGB2BGR)\n print(i)\n cv2.imwrite('test' + str(i) + '.jpg', image)\n\n\n<import token>\nwith open('settings.json') as d:\n SETTINGS = json.load(d)\n<assignment token>\nif len(sys.argv) > 1:\n filename = sys.argv[1]\nvisualize_data(filename, width=SETTINGS['width'], height=SETTINGS['height'],\n depth=SETTINGS['depth'])\n",
"<import token>\n\n\ndef visualize_data(filename, width=72, height=48, depth=3, cnn_model=None):\n \"\"\"\n When cnn_model is specified it'll show what the cnn_model predicts (red)\n as opposed to what inputs it actually received (green)\n \"\"\"\n data = pd.DataFrame.from_csv(filename)\n for i in range(30):\n cur_img = data['image'][i]\n cur_steer = int(data['servo'][i])\n cur_throttle = int(data['motor'][i])\n cur_img_array = deserialize_image(cur_img)\n image = cv2.cvtColor(cur_img_array, cv2.COLOR_RGB2BGR)\n print(i)\n cv2.imwrite('test' + str(i) + '.jpg', image)\n\n\n<import token>\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n<function token>\n<import token>\n<code token>\n<assignment token>\n<code token>\n"
] | false |
880 |
71ebc6e9218085e887eda7843b5489837ed45c97
|
import re
class Zout:
def __init__(self, aline):
self.Str = aline
self.Var = ''
self.StN = ''
self.ZN = ''
self.ZName = ''
self.Motion = ''
self.Ztype = ''
self.tozout(aline)
def tozout(self, aline):
"""transform station statement to Cylinder Outputs struct"""
# SetAusg(A120,5,A.St201_Y1_2_SwivelUnit_backward);
#front|back|up|down|left|right
pattern = re.compile(r'.*(?P<Var>A.*[sS]t(?P<StN>\d+)_Y(?P<ZN>\d+)_[24]_(?P<ZName>\w+)_'
r'(?P<Motion>open|close|forward|backward|up|upward|down|downward|left|leftward|right|rightward))\s*\).*')
match = pattern.match(aline)
if match:
#print('match')
self.Var = match.group('Var')
self.StN = match.group('StN')
self.ZN = match.group('ZN')
self.ZName = match.group('ZName')
self.Motion = match.group('Motion')
# if re.compile(r'^up|down|left|right$').match(self.Motion):
# self.Motion = self.Motion+'ward'
# obj = re.compile(r'up|down|left|right')
# if obj.match(self.Motion):
# print('match')
# self.Motion = obj.subn('ward',self.Motion)[0]
self.Motion = re.sub(r'^(up|down|left|right)$',r'\1ward', self.Motion)
isgrippermatch = re.compile(r'.*(open|close).*').match(aline)
if isgrippermatch:
self.Ztype = 'gripper'
else:
self.Ztype = 'not gripper'
def display(self):
print(self.Var)
class Zouts:
def __init__(self):
self.elements = []
def search(self, StN, ZN, Motion):
for elem in self.elements:
print('elem:')
print(str(type(elem.StN)) + str(type(StN)))
print(elem.StN + '->' + StN + ':' + str(elem.StN == StN))
print(elem.Motion + '->' + ':' + str(elem.Motion == Motion))
if elem.StN == StN and elem.ZN == ZN and elem.Motion == Motion:
print('match')
return elem
print('not match')
return None
def add(self, zout):
self.elements.append(zout)
def display(self):
for elem in self.elements:
print(elem.Var)
|
[
"import re\r\n\r\nclass Zout:\r\n def __init__(self, aline):\r\n self.Str = aline\r\n self.Var = ''\r\n self.StN = ''\r\n self.ZN = ''\r\n self.ZName = ''\r\n self.Motion = ''\r\n self.Ztype = ''\r\n self.tozout(aline)\r\n\r\n def tozout(self, aline):\r\n \"\"\"transform station statement to Cylinder Outputs struct\"\"\"\r\n # SetAusg(A120,5,A.St201_Y1_2_SwivelUnit_backward);\r\n #front|back|up|down|left|right\r\n pattern = re.compile(r'.*(?P<Var>A.*[sS]t(?P<StN>\\d+)_Y(?P<ZN>\\d+)_[24]_(?P<ZName>\\w+)_'\r\n r'(?P<Motion>open|close|forward|backward|up|upward|down|downward|left|leftward|right|rightward))\\s*\\).*')\r\n match = pattern.match(aline)\r\n if match:\r\n #print('match')\r\n self.Var = match.group('Var')\r\n self.StN = match.group('StN')\r\n self.ZN = match.group('ZN')\r\n self.ZName = match.group('ZName')\r\n self.Motion = match.group('Motion')\r\n # if re.compile(r'^up|down|left|right$').match(self.Motion):\r\n # self.Motion = self.Motion+'ward'\r\n # obj = re.compile(r'up|down|left|right')\r\n # if obj.match(self.Motion):\r\n # print('match')\r\n # self.Motion = obj.subn('ward',self.Motion)[0]\r\n self.Motion = re.sub(r'^(up|down|left|right)$',r'\\1ward', self.Motion)\r\n isgrippermatch = re.compile(r'.*(open|close).*').match(aline)\r\n if isgrippermatch:\r\n self.Ztype = 'gripper'\r\n else:\r\n self.Ztype = 'not gripper'\r\n\r\n def display(self):\r\n print(self.Var)\r\n\r\nclass Zouts:\r\n def __init__(self):\r\n self.elements = []\r\n\r\n def search(self, StN, ZN, Motion):\r\n for elem in self.elements:\r\n print('elem:')\r\n print(str(type(elem.StN)) + str(type(StN)))\r\n print(elem.StN + '->' + StN + ':' + str(elem.StN == StN))\r\n print(elem.Motion + '->' + ':' + str(elem.Motion == Motion))\r\n if elem.StN == StN and elem.ZN == ZN and elem.Motion == Motion:\r\n print('match')\r\n return elem\r\n print('not match')\r\n return None\r\n\r\n def add(self, zout):\r\n self.elements.append(zout)\r\n\r\n def display(self):\r\n for elem in self.elements:\r\n print(elem.Var)",
"import re\n\n\nclass Zout:\n\n def __init__(self, aline):\n self.Str = aline\n self.Var = ''\n self.StN = ''\n self.ZN = ''\n self.ZName = ''\n self.Motion = ''\n self.Ztype = ''\n self.tozout(aline)\n\n def tozout(self, aline):\n \"\"\"transform station statement to Cylinder Outputs struct\"\"\"\n pattern = re.compile(\n '.*(?P<Var>A.*[sS]t(?P<StN>\\\\d+)_Y(?P<ZN>\\\\d+)_[24]_(?P<ZName>\\\\w+)_(?P<Motion>open|close|forward|backward|up|upward|down|downward|left|leftward|right|rightward))\\\\s*\\\\).*'\n )\n match = pattern.match(aline)\n if match:\n self.Var = match.group('Var')\n self.StN = match.group('StN')\n self.ZN = match.group('ZN')\n self.ZName = match.group('ZName')\n self.Motion = match.group('Motion')\n self.Motion = re.sub('^(up|down|left|right)$', '\\\\1ward', self.\n Motion)\n isgrippermatch = re.compile('.*(open|close).*').match(aline)\n if isgrippermatch:\n self.Ztype = 'gripper'\n else:\n self.Ztype = 'not gripper'\n\n def display(self):\n print(self.Var)\n\n\nclass Zouts:\n\n def __init__(self):\n self.elements = []\n\n def search(self, StN, ZN, Motion):\n for elem in self.elements:\n print('elem:')\n print(str(type(elem.StN)) + str(type(StN)))\n print(elem.StN + '->' + StN + ':' + str(elem.StN == StN))\n print(elem.Motion + '->' + ':' + str(elem.Motion == Motion))\n if elem.StN == StN and elem.ZN == ZN and elem.Motion == Motion:\n print('match')\n return elem\n print('not match')\n return None\n\n def add(self, zout):\n self.elements.append(zout)\n\n def display(self):\n for elem in self.elements:\n print(elem.Var)\n",
"<import token>\n\n\nclass Zout:\n\n def __init__(self, aline):\n self.Str = aline\n self.Var = ''\n self.StN = ''\n self.ZN = ''\n self.ZName = ''\n self.Motion = ''\n self.Ztype = ''\n self.tozout(aline)\n\n def tozout(self, aline):\n \"\"\"transform station statement to Cylinder Outputs struct\"\"\"\n pattern = re.compile(\n '.*(?P<Var>A.*[sS]t(?P<StN>\\\\d+)_Y(?P<ZN>\\\\d+)_[24]_(?P<ZName>\\\\w+)_(?P<Motion>open|close|forward|backward|up|upward|down|downward|left|leftward|right|rightward))\\\\s*\\\\).*'\n )\n match = pattern.match(aline)\n if match:\n self.Var = match.group('Var')\n self.StN = match.group('StN')\n self.ZN = match.group('ZN')\n self.ZName = match.group('ZName')\n self.Motion = match.group('Motion')\n self.Motion = re.sub('^(up|down|left|right)$', '\\\\1ward', self.\n Motion)\n isgrippermatch = re.compile('.*(open|close).*').match(aline)\n if isgrippermatch:\n self.Ztype = 'gripper'\n else:\n self.Ztype = 'not gripper'\n\n def display(self):\n print(self.Var)\n\n\nclass Zouts:\n\n def __init__(self):\n self.elements = []\n\n def search(self, StN, ZN, Motion):\n for elem in self.elements:\n print('elem:')\n print(str(type(elem.StN)) + str(type(StN)))\n print(elem.StN + '->' + StN + ':' + str(elem.StN == StN))\n print(elem.Motion + '->' + ':' + str(elem.Motion == Motion))\n if elem.StN == StN and elem.ZN == ZN and elem.Motion == Motion:\n print('match')\n return elem\n print('not match')\n return None\n\n def add(self, zout):\n self.elements.append(zout)\n\n def display(self):\n for elem in self.elements:\n print(elem.Var)\n",
"<import token>\n\n\nclass Zout:\n\n def __init__(self, aline):\n self.Str = aline\n self.Var = ''\n self.StN = ''\n self.ZN = ''\n self.ZName = ''\n self.Motion = ''\n self.Ztype = ''\n self.tozout(aline)\n <function token>\n\n def display(self):\n print(self.Var)\n\n\nclass Zouts:\n\n def __init__(self):\n self.elements = []\n\n def search(self, StN, ZN, Motion):\n for elem in self.elements:\n print('elem:')\n print(str(type(elem.StN)) + str(type(StN)))\n print(elem.StN + '->' + StN + ':' + str(elem.StN == StN))\n print(elem.Motion + '->' + ':' + str(elem.Motion == Motion))\n if elem.StN == StN and elem.ZN == ZN and elem.Motion == Motion:\n print('match')\n return elem\n print('not match')\n return None\n\n def add(self, zout):\n self.elements.append(zout)\n\n def display(self):\n for elem in self.elements:\n print(elem.Var)\n",
"<import token>\n\n\nclass Zout:\n <function token>\n <function token>\n\n def display(self):\n print(self.Var)\n\n\nclass Zouts:\n\n def __init__(self):\n self.elements = []\n\n def search(self, StN, ZN, Motion):\n for elem in self.elements:\n print('elem:')\n print(str(type(elem.StN)) + str(type(StN)))\n print(elem.StN + '->' + StN + ':' + str(elem.StN == StN))\n print(elem.Motion + '->' + ':' + str(elem.Motion == Motion))\n if elem.StN == StN and elem.ZN == ZN and elem.Motion == Motion:\n print('match')\n return elem\n print('not match')\n return None\n\n def add(self, zout):\n self.elements.append(zout)\n\n def display(self):\n for elem in self.elements:\n print(elem.Var)\n",
"<import token>\n\n\nclass Zout:\n <function token>\n <function token>\n <function token>\n\n\nclass Zouts:\n\n def __init__(self):\n self.elements = []\n\n def search(self, StN, ZN, Motion):\n for elem in self.elements:\n print('elem:')\n print(str(type(elem.StN)) + str(type(StN)))\n print(elem.StN + '->' + StN + ':' + str(elem.StN == StN))\n print(elem.Motion + '->' + ':' + str(elem.Motion == Motion))\n if elem.StN == StN and elem.ZN == ZN and elem.Motion == Motion:\n print('match')\n return elem\n print('not match')\n return None\n\n def add(self, zout):\n self.elements.append(zout)\n\n def display(self):\n for elem in self.elements:\n print(elem.Var)\n",
"<import token>\n<class token>\n\n\nclass Zouts:\n\n def __init__(self):\n self.elements = []\n\n def search(self, StN, ZN, Motion):\n for elem in self.elements:\n print('elem:')\n print(str(type(elem.StN)) + str(type(StN)))\n print(elem.StN + '->' + StN + ':' + str(elem.StN == StN))\n print(elem.Motion + '->' + ':' + str(elem.Motion == Motion))\n if elem.StN == StN and elem.ZN == ZN and elem.Motion == Motion:\n print('match')\n return elem\n print('not match')\n return None\n\n def add(self, zout):\n self.elements.append(zout)\n\n def display(self):\n for elem in self.elements:\n print(elem.Var)\n",
"<import token>\n<class token>\n\n\nclass Zouts:\n\n def __init__(self):\n self.elements = []\n\n def search(self, StN, ZN, Motion):\n for elem in self.elements:\n print('elem:')\n print(str(type(elem.StN)) + str(type(StN)))\n print(elem.StN + '->' + StN + ':' + str(elem.StN == StN))\n print(elem.Motion + '->' + ':' + str(elem.Motion == Motion))\n if elem.StN == StN and elem.ZN == ZN and elem.Motion == Motion:\n print('match')\n return elem\n print('not match')\n return None\n <function token>\n\n def display(self):\n for elem in self.elements:\n print(elem.Var)\n",
"<import token>\n<class token>\n\n\nclass Zouts:\n\n def __init__(self):\n self.elements = []\n <function token>\n <function token>\n\n def display(self):\n for elem in self.elements:\n print(elem.Var)\n",
"<import token>\n<class token>\n\n\nclass Zouts:\n <function token>\n <function token>\n <function token>\n\n def display(self):\n for elem in self.elements:\n print(elem.Var)\n",
"<import token>\n<class token>\n\n\nclass Zouts:\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n<class token>\n<class token>\n"
] | false |
881 |
58f7810e2731721562e3459f92684589dc66862c
|
a = [3, 4, 2, 3, 5, 8, 23, 32, 35, 34, 4, 6, 9]
print("")
print("Lesson #2")
print("Program start:")
for i in a:
if i < 9:
print(i)
print("End")
|
[
"a = [3, 4, 2, 3, 5, 8, 23, 32, 35, 34, 4, 6, 9]\n\nprint(\"\")\nprint(\"Lesson #2\")\nprint(\"Program start:\")\nfor i in a:\n if i < 9:\n print(i)\nprint(\"End\")",
"a = [3, 4, 2, 3, 5, 8, 23, 32, 35, 34, 4, 6, 9]\nprint('')\nprint('Lesson #2')\nprint('Program start:')\nfor i in a:\n if i < 9:\n print(i)\nprint('End')\n",
"<assignment token>\nprint('')\nprint('Lesson #2')\nprint('Program start:')\nfor i in a:\n if i < 9:\n print(i)\nprint('End')\n",
"<assignment token>\n<code token>\n"
] | false |
882 |
aaa0ac5e31e2c10b5baba6077e952fff1a92ef82
|
# -*- coding: utf-8 -*-
"""
Created on Wed Aug 22 18:05:44 2018
@author: Administrator
"""
from sklearn.model_selection import cross_val_score, train_test_split
from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression
from sklearn.model_selection import StratifiedKFold
from sklearn.model_selection import GridSearchCV
iris = load_iris()
log_reg = LogisticRegression()
score = cross_val_score(log_reg, iris.data, iris.target,cv=10)
print("cross-vali score is: {}".format(score.mean()))
import mglearn
#mglearn.plots.plot_stratified_cross_validation()
kfold = StratifiedKFold(n_splits=5, shuffle=True)
for train_index, test_index in kfold.split(iris.data, iris.target):
print(train_index, test_index)
from sklearn.svm import SVC
def simple_grid(iris, kfold):
X_train,X_test, y_train, y_test = train_test_split(
iris.data, iris.target, test_size=0.3,random_state = 0)
best_score = 0
para_list = [0.001, 0.01, 0.1, 1, 10]
for gamma in para_list:
for C in para_list:
svm = SVC(gamma=gamma, C=C)
#svm.fit(X_train, y_train)
scores = cross_val_score(svm, iris.data, iris.target,cv=kfold)
score = scores.mean()
if score > best_score:
best_score = score
best_para = {'C':C, 'gamma':gamma}
print("best score is {:.2f}".format(best_score))
print("best parameters is {}".format(best_para))
score = cross_val_score(svm, iris.data, iris.target,cv=kfold)
print("CV-score is {}".format(score.mean(0)))
return best_para
para = simple_grid(iris, kfold)
para_grid = {"C":[0.001, 0.01, 0.1, 1, 10],
'gamma':[0.001, 0.01, 0.1, 1, 10]}
grid_search = GridSearchCV(SVC(), para_grid, cv = kfold)
X_train,X_test, y_train, y_test = train_test_split(
iris.data, iris.target, test_size=0.3,random_state = 0)
grid_search.fit(X_train, y_train)
print("best grid score is {:.2f}".format(grid_search.score(X_test,
y_test)))
import pandas as pd
results = pd.DataFrame(grid_search.cv_results_)
display(results.head())
print(cross_val_score(GridSearchCV(SVC(), para_grid, cv = kfold),
X_train,y_train, cv = kfold).mean())
y_pred = grid_search.predict(X_test,y_test)
from sklearn.metrics import classification_report
print(classification_report(y_test, y_pred))
|
[
"# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Wed Aug 22 18:05:44 2018\n\n@author: Administrator\n\"\"\"\n\nfrom sklearn.model_selection import cross_val_score, train_test_split\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\nfrom sklearn.model_selection import StratifiedKFold\nfrom sklearn.model_selection import GridSearchCV\n\niris = load_iris()\nlog_reg = LogisticRegression()\n\nscore = cross_val_score(log_reg, iris.data, iris.target,cv=10)\nprint(\"cross-vali score is: {}\".format(score.mean()))\n\nimport mglearn\n#mglearn.plots.plot_stratified_cross_validation()\n\nkfold = StratifiedKFold(n_splits=5, shuffle=True)\nfor train_index, test_index in kfold.split(iris.data, iris.target):\n print(train_index, test_index)\n \nfrom sklearn.svm import SVC\n\ndef simple_grid(iris, kfold):\n X_train,X_test, y_train, y_test = train_test_split(\n iris.data, iris.target, test_size=0.3,random_state = 0)\n best_score = 0\n para_list = [0.001, 0.01, 0.1, 1, 10]\n for gamma in para_list:\n for C in para_list:\n svm = SVC(gamma=gamma, C=C)\n #svm.fit(X_train, y_train)\n scores = cross_val_score(svm, iris.data, iris.target,cv=kfold)\n score = scores.mean()\n \n if score > best_score:\n best_score = score\n best_para = {'C':C, 'gamma':gamma}\n print(\"best score is {:.2f}\".format(best_score))\n print(\"best parameters is {}\".format(best_para))\n score = cross_val_score(svm, iris.data, iris.target,cv=kfold)\n \n print(\"CV-score is {}\".format(score.mean(0)))\n return best_para\n\npara = simple_grid(iris, kfold)\n\npara_grid = {\"C\":[0.001, 0.01, 0.1, 1, 10],\n 'gamma':[0.001, 0.01, 0.1, 1, 10]}\ngrid_search = GridSearchCV(SVC(), para_grid, cv = kfold)\nX_train,X_test, y_train, y_test = train_test_split(\n iris.data, iris.target, test_size=0.3,random_state = 0)\n\ngrid_search.fit(X_train, y_train)\nprint(\"best grid score is {:.2f}\".format(grid_search.score(X_test,\n y_test)))\n\nimport pandas as pd\nresults = pd.DataFrame(grid_search.cv_results_)\ndisplay(results.head())\n\nprint(cross_val_score(GridSearchCV(SVC(), para_grid, cv = kfold),\n X_train,y_train, cv = kfold).mean())\ny_pred = grid_search.predict(X_test,y_test)\n\nfrom sklearn.metrics import classification_report\nprint(classification_report(y_test, y_pred))",
"<docstring token>\nfrom sklearn.model_selection import cross_val_score, train_test_split\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\nfrom sklearn.model_selection import StratifiedKFold\nfrom sklearn.model_selection import GridSearchCV\niris = load_iris()\nlog_reg = LogisticRegression()\nscore = cross_val_score(log_reg, iris.data, iris.target, cv=10)\nprint('cross-vali score is: {}'.format(score.mean()))\nimport mglearn\nkfold = StratifiedKFold(n_splits=5, shuffle=True)\nfor train_index, test_index in kfold.split(iris.data, iris.target):\n print(train_index, test_index)\nfrom sklearn.svm import SVC\n\n\ndef simple_grid(iris, kfold):\n X_train, X_test, y_train, y_test = train_test_split(iris.data, iris.\n target, test_size=0.3, random_state=0)\n best_score = 0\n para_list = [0.001, 0.01, 0.1, 1, 10]\n for gamma in para_list:\n for C in para_list:\n svm = SVC(gamma=gamma, C=C)\n scores = cross_val_score(svm, iris.data, iris.target, cv=kfold)\n score = scores.mean()\n if score > best_score:\n best_score = score\n best_para = {'C': C, 'gamma': gamma}\n print('best score is {:.2f}'.format(best_score))\n print('best parameters is {}'.format(best_para))\n score = cross_val_score(svm, iris.data, iris.target, cv=kfold)\n print('CV-score is {}'.format(score.mean(0)))\n return best_para\n\n\npara = simple_grid(iris, kfold)\npara_grid = {'C': [0.001, 0.01, 0.1, 1, 10], 'gamma': [0.001, 0.01, 0.1, 1, 10]\n }\ngrid_search = GridSearchCV(SVC(), para_grid, cv=kfold)\nX_train, X_test, y_train, y_test = train_test_split(iris.data, iris.target,\n test_size=0.3, random_state=0)\ngrid_search.fit(X_train, y_train)\nprint('best grid score is {:.2f}'.format(grid_search.score(X_test, y_test)))\nimport pandas as pd\nresults = pd.DataFrame(grid_search.cv_results_)\ndisplay(results.head())\nprint(cross_val_score(GridSearchCV(SVC(), para_grid, cv=kfold), X_train,\n y_train, cv=kfold).mean())\ny_pred = grid_search.predict(X_test, y_test)\nfrom sklearn.metrics import classification_report\nprint(classification_report(y_test, y_pred))\n",
"<docstring token>\n<import token>\niris = load_iris()\nlog_reg = LogisticRegression()\nscore = cross_val_score(log_reg, iris.data, iris.target, cv=10)\nprint('cross-vali score is: {}'.format(score.mean()))\n<import token>\nkfold = StratifiedKFold(n_splits=5, shuffle=True)\nfor train_index, test_index in kfold.split(iris.data, iris.target):\n print(train_index, test_index)\n<import token>\n\n\ndef simple_grid(iris, kfold):\n X_train, X_test, y_train, y_test = train_test_split(iris.data, iris.\n target, test_size=0.3, random_state=0)\n best_score = 0\n para_list = [0.001, 0.01, 0.1, 1, 10]\n for gamma in para_list:\n for C in para_list:\n svm = SVC(gamma=gamma, C=C)\n scores = cross_val_score(svm, iris.data, iris.target, cv=kfold)\n score = scores.mean()\n if score > best_score:\n best_score = score\n best_para = {'C': C, 'gamma': gamma}\n print('best score is {:.2f}'.format(best_score))\n print('best parameters is {}'.format(best_para))\n score = cross_val_score(svm, iris.data, iris.target, cv=kfold)\n print('CV-score is {}'.format(score.mean(0)))\n return best_para\n\n\npara = simple_grid(iris, kfold)\npara_grid = {'C': [0.001, 0.01, 0.1, 1, 10], 'gamma': [0.001, 0.01, 0.1, 1, 10]\n }\ngrid_search = GridSearchCV(SVC(), para_grid, cv=kfold)\nX_train, X_test, y_train, y_test = train_test_split(iris.data, iris.target,\n test_size=0.3, random_state=0)\ngrid_search.fit(X_train, y_train)\nprint('best grid score is {:.2f}'.format(grid_search.score(X_test, y_test)))\n<import token>\nresults = pd.DataFrame(grid_search.cv_results_)\ndisplay(results.head())\nprint(cross_val_score(GridSearchCV(SVC(), para_grid, cv=kfold), X_train,\n y_train, cv=kfold).mean())\ny_pred = grid_search.predict(X_test, y_test)\n<import token>\nprint(classification_report(y_test, y_pred))\n",
"<docstring token>\n<import token>\n<assignment token>\nprint('cross-vali score is: {}'.format(score.mean()))\n<import token>\n<assignment token>\nfor train_index, test_index in kfold.split(iris.data, iris.target):\n print(train_index, test_index)\n<import token>\n\n\ndef simple_grid(iris, kfold):\n X_train, X_test, y_train, y_test = train_test_split(iris.data, iris.\n target, test_size=0.3, random_state=0)\n best_score = 0\n para_list = [0.001, 0.01, 0.1, 1, 10]\n for gamma in para_list:\n for C in para_list:\n svm = SVC(gamma=gamma, C=C)\n scores = cross_val_score(svm, iris.data, iris.target, cv=kfold)\n score = scores.mean()\n if score > best_score:\n best_score = score\n best_para = {'C': C, 'gamma': gamma}\n print('best score is {:.2f}'.format(best_score))\n print('best parameters is {}'.format(best_para))\n score = cross_val_score(svm, iris.data, iris.target, cv=kfold)\n print('CV-score is {}'.format(score.mean(0)))\n return best_para\n\n\n<assignment token>\ngrid_search.fit(X_train, y_train)\nprint('best grid score is {:.2f}'.format(grid_search.score(X_test, y_test)))\n<import token>\n<assignment token>\ndisplay(results.head())\nprint(cross_val_score(GridSearchCV(SVC(), para_grid, cv=kfold), X_train,\n y_train, cv=kfold).mean())\n<assignment token>\n<import token>\nprint(classification_report(y_test, y_pred))\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n\n\ndef simple_grid(iris, kfold):\n X_train, X_test, y_train, y_test = train_test_split(iris.data, iris.\n target, test_size=0.3, random_state=0)\n best_score = 0\n para_list = [0.001, 0.01, 0.1, 1, 10]\n for gamma in para_list:\n for C in para_list:\n svm = SVC(gamma=gamma, C=C)\n scores = cross_val_score(svm, iris.data, iris.target, cv=kfold)\n score = scores.mean()\n if score > best_score:\n best_score = score\n best_para = {'C': C, 'gamma': gamma}\n print('best score is {:.2f}'.format(best_score))\n print('best parameters is {}'.format(best_para))\n score = cross_val_score(svm, iris.data, iris.target, cv=kfold)\n print('CV-score is {}'.format(score.mean(0)))\n return best_para\n\n\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n<code token>\n<assignment token>\n<import token>\n<code token>\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<function token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n<code token>\n<assignment token>\n<import token>\n<code token>\n"
] | false |
883 |
ad3c5ed3d6a9aa83e69f53d3fec845e8e2b1c9c6
|
import pandas as pd
import numpy as np
import sys
def avg (x):
return [sum(x[i])/row for i in range(col)]
def sd (x):
return [np.std(x[i]) for i in range(col)]
def cov (x, md_x):
cov_xy=[[0 for r in range(col)] for c in range(col)]
for i in range(col):
for j in range (col):
for k in range (row):
cov_xy[i][j]+=((data[i][k]-md_x[i])*(data[j][k]-md_x[j]))/(row)
return(cov_xy)
def cor (cov, sd_x):
cor_xy=[[0 for r in range(col)] for c in range(col)]
for i in range(col):
for j in range (col):
cor_xy[i][j] = cov[i][j]/(sd_x[i]*sd_x[j])
print("cov= ",cov[i][j],"sd i", sd_x[i], " sd k", sd_x[j],"cov/sd", cov[i][j]/(sd_x[i]*sd_x[j]))
return(cor_xy)
if __name__ == "__main__":
argv=sys.argv[:]
if len(argv)<2:
print("1 argument required. Provide data file name")
sys.exit(0)
data=pd.read_csv(argv[1],header= None)
row=data.shape[0]
col=data.shape[1]
print("** dataset dimensions **")
print(row)
print(col)
mean=avg(data)
stdev=sd(data)
print(stdev)
covar=cov(data, mean)
correl=cor(covar, stdev)
print("---------CORRELATION MATRIX---------")
print(correl)
|
[
"import pandas as pd\nimport numpy as np\nimport sys\n\ndef avg (x):\n return [sum(x[i])/row for i in range(col)]\n\ndef sd (x):\n return [np.std(x[i]) for i in range(col)]\n\ndef cov (x, md_x):\n cov_xy=[[0 for r in range(col)] for c in range(col)]\n for i in range(col):\n for j in range (col):\n for k in range (row):\n cov_xy[i][j]+=((data[i][k]-md_x[i])*(data[j][k]-md_x[j]))/(row)\n return(cov_xy)\n\ndef cor (cov, sd_x):\n cor_xy=[[0 for r in range(col)] for c in range(col)]\n for i in range(col):\n for j in range (col):\n cor_xy[i][j] = cov[i][j]/(sd_x[i]*sd_x[j])\n print(\"cov= \",cov[i][j],\"sd i\", sd_x[i], \" sd k\", sd_x[j],\"cov/sd\", cov[i][j]/(sd_x[i]*sd_x[j]))\n return(cor_xy)\n\n\nif __name__ == \"__main__\":\n \n argv=sys.argv[:]\n \n if len(argv)<2:\n print(\"1 argument required. Provide data file name\")\n sys.exit(0)\n \n data=pd.read_csv(argv[1],header= None)\n row=data.shape[0]\n col=data.shape[1]\n print(\"** dataset dimensions **\")\n print(row)\n print(col)\n mean=avg(data)\n stdev=sd(data)\n print(stdev)\n \n covar=cov(data, mean)\n correl=cor(covar, stdev)\n print(\"---------CORRELATION MATRIX---------\")\n print(correl)\n \n\n",
"import pandas as pd\nimport numpy as np\nimport sys\n\n\ndef avg(x):\n return [(sum(x[i]) / row) for i in range(col)]\n\n\ndef sd(x):\n return [np.std(x[i]) for i in range(col)]\n\n\ndef cov(x, md_x):\n cov_xy = [[(0) for r in range(col)] for c in range(col)]\n for i in range(col):\n for j in range(col):\n for k in range(row):\n cov_xy[i][j] += (data[i][k] - md_x[i]) * (data[j][k] - md_x[j]\n ) / row\n return cov_xy\n\n\ndef cor(cov, sd_x):\n cor_xy = [[(0) for r in range(col)] for c in range(col)]\n for i in range(col):\n for j in range(col):\n cor_xy[i][j] = cov[i][j] / (sd_x[i] * sd_x[j])\n print('cov= ', cov[i][j], 'sd i', sd_x[i], ' sd k', sd_x[j],\n 'cov/sd', cov[i][j] / (sd_x[i] * sd_x[j]))\n return cor_xy\n\n\nif __name__ == '__main__':\n argv = sys.argv[:]\n if len(argv) < 2:\n print('1 argument required. Provide data file name')\n sys.exit(0)\n data = pd.read_csv(argv[1], header=None)\n row = data.shape[0]\n col = data.shape[1]\n print('** dataset dimensions **')\n print(row)\n print(col)\n mean = avg(data)\n stdev = sd(data)\n print(stdev)\n covar = cov(data, mean)\n correl = cor(covar, stdev)\n print('---------CORRELATION MATRIX---------')\n print(correl)\n",
"<import token>\n\n\ndef avg(x):\n return [(sum(x[i]) / row) for i in range(col)]\n\n\ndef sd(x):\n return [np.std(x[i]) for i in range(col)]\n\n\ndef cov(x, md_x):\n cov_xy = [[(0) for r in range(col)] for c in range(col)]\n for i in range(col):\n for j in range(col):\n for k in range(row):\n cov_xy[i][j] += (data[i][k] - md_x[i]) * (data[j][k] - md_x[j]\n ) / row\n return cov_xy\n\n\ndef cor(cov, sd_x):\n cor_xy = [[(0) for r in range(col)] for c in range(col)]\n for i in range(col):\n for j in range(col):\n cor_xy[i][j] = cov[i][j] / (sd_x[i] * sd_x[j])\n print('cov= ', cov[i][j], 'sd i', sd_x[i], ' sd k', sd_x[j],\n 'cov/sd', cov[i][j] / (sd_x[i] * sd_x[j]))\n return cor_xy\n\n\nif __name__ == '__main__':\n argv = sys.argv[:]\n if len(argv) < 2:\n print('1 argument required. Provide data file name')\n sys.exit(0)\n data = pd.read_csv(argv[1], header=None)\n row = data.shape[0]\n col = data.shape[1]\n print('** dataset dimensions **')\n print(row)\n print(col)\n mean = avg(data)\n stdev = sd(data)\n print(stdev)\n covar = cov(data, mean)\n correl = cor(covar, stdev)\n print('---------CORRELATION MATRIX---------')\n print(correl)\n",
"<import token>\n\n\ndef avg(x):\n return [(sum(x[i]) / row) for i in range(col)]\n\n\ndef sd(x):\n return [np.std(x[i]) for i in range(col)]\n\n\ndef cov(x, md_x):\n cov_xy = [[(0) for r in range(col)] for c in range(col)]\n for i in range(col):\n for j in range(col):\n for k in range(row):\n cov_xy[i][j] += (data[i][k] - md_x[i]) * (data[j][k] - md_x[j]\n ) / row\n return cov_xy\n\n\ndef cor(cov, sd_x):\n cor_xy = [[(0) for r in range(col)] for c in range(col)]\n for i in range(col):\n for j in range(col):\n cor_xy[i][j] = cov[i][j] / (sd_x[i] * sd_x[j])\n print('cov= ', cov[i][j], 'sd i', sd_x[i], ' sd k', sd_x[j],\n 'cov/sd', cov[i][j] / (sd_x[i] * sd_x[j]))\n return cor_xy\n\n\n<code token>\n",
"<import token>\n\n\ndef avg(x):\n return [(sum(x[i]) / row) for i in range(col)]\n\n\ndef sd(x):\n return [np.std(x[i]) for i in range(col)]\n\n\ndef cov(x, md_x):\n cov_xy = [[(0) for r in range(col)] for c in range(col)]\n for i in range(col):\n for j in range(col):\n for k in range(row):\n cov_xy[i][j] += (data[i][k] - md_x[i]) * (data[j][k] - md_x[j]\n ) / row\n return cov_xy\n\n\n<function token>\n<code token>\n",
"<import token>\n\n\ndef avg(x):\n return [(sum(x[i]) / row) for i in range(col)]\n\n\n<function token>\n\n\ndef cov(x, md_x):\n cov_xy = [[(0) for r in range(col)] for c in range(col)]\n for i in range(col):\n for j in range(col):\n for k in range(row):\n cov_xy[i][j] += (data[i][k] - md_x[i]) * (data[j][k] - md_x[j]\n ) / row\n return cov_xy\n\n\n<function token>\n<code token>\n",
"<import token>\n<function token>\n<function token>\n\n\ndef cov(x, md_x):\n cov_xy = [[(0) for r in range(col)] for c in range(col)]\n for i in range(col):\n for j in range(col):\n for k in range(row):\n cov_xy[i][j] += (data[i][k] - md_x[i]) * (data[j][k] - md_x[j]\n ) / row\n return cov_xy\n\n\n<function token>\n<code token>\n",
"<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n"
] | false |
884 |
d267c8cbe51fb1bacc9404a1385f1daa4a0db7f2
|
import pandas as pd
import numpy as np
import math
from sklearn.datasets import load_digits, load_iris, load_boston, load_breast_cancer
from sklearn.model_selection import train_test_split
from sklearn.metrics import pairwise_distances
class KMeans():
def __init__(self, k = 5, max_iters = 100, random_seed = 42):
self.k = k
self.max_iters = max_iters
# Set random seed
np.random.seed(random_seed)
def _initialise_centroids(self, X):
random_indices = np.random.permutation(X.shape[0])
random_indices = random_indices[:self.k]
self.centroids = X[random_indices]
def _euclidien_distance(self, x):
return np.sum((x - self.centroids)**2, axis = 1)
def _assign_clusters(self, X):
cluster_distances = pairwise_distances(X, self.centroids, metric = 'euclidean')
cluster_labels = np.argmin(cluster_distances, axis = 1)
return cluster_labels
def _update_centroids(self, X, cluster_labels):
for cluster in range(self.k):
# Get all data points of a cluster
X_cluster = X[cluster_labels == cluster]
# Update the cluster's centroid
cluster_mean = np.mean(X_cluster, axis = 0)
self.centroids[cluster] = cluster_mean
def fit(self, X):
# Initialise random centroids
self._initialise_centroids(X)
iterations = 0
while iterations <= self.max_iters:
iterations += 1
# Assign clusters to data
cluster_labels = self._assign_clusters(X)
# Update centroids
self._update_centroids(X, cluster_labels)
def predict(self, X):
return self._assign_clusters(X)
# Load data
data = load_breast_cancer()
X, y = data.data, data.target
X_train, X_test = train_test_split(X, test_size = 0.1)
# Fit model
model = KMeans(k = 5)
model.fit(X_train)
# Predict
y_pred = model.predict(X_test)
print(y_pred)
|
[
"import pandas as pd\nimport numpy as np\nimport math\nfrom sklearn.datasets import load_digits, load_iris, load_boston, load_breast_cancer\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.metrics import pairwise_distances\n\n\nclass KMeans():\n\n def __init__(self, k = 5, max_iters = 100, random_seed = 42):\n self.k = k\n self.max_iters = max_iters\n\n # Set random seed\n np.random.seed(random_seed)\n\n def _initialise_centroids(self, X):\n random_indices = np.random.permutation(X.shape[0])\n random_indices = random_indices[:self.k]\n self.centroids = X[random_indices]\n\n def _euclidien_distance(self, x):\n return np.sum((x - self.centroids)**2, axis = 1)\n\n def _assign_clusters(self, X):\n cluster_distances = pairwise_distances(X, self.centroids, metric = 'euclidean')\n cluster_labels = np.argmin(cluster_distances, axis = 1)\n return cluster_labels\n\n def _update_centroids(self, X, cluster_labels):\n for cluster in range(self.k):\n\n # Get all data points of a cluster\n X_cluster = X[cluster_labels == cluster]\n\n # Update the cluster's centroid\n cluster_mean = np.mean(X_cluster, axis = 0)\n self.centroids[cluster] = cluster_mean\n\n def fit(self, X):\n\n # Initialise random centroids\n self._initialise_centroids(X)\n\n iterations = 0\n while iterations <= self.max_iters:\n iterations += 1\n\n # Assign clusters to data\n cluster_labels = self._assign_clusters(X)\n\n # Update centroids\n self._update_centroids(X, cluster_labels)\n\n def predict(self, X):\n return self._assign_clusters(X)\n\n\n# Load data\ndata = load_breast_cancer()\nX, y = data.data, data.target\nX_train, X_test = train_test_split(X, test_size = 0.1)\n\n# Fit model\nmodel = KMeans(k = 5)\nmodel.fit(X_train)\n\n# Predict\ny_pred = model.predict(X_test)\nprint(y_pred)\n",
"import pandas as pd\nimport numpy as np\nimport math\nfrom sklearn.datasets import load_digits, load_iris, load_boston, load_breast_cancer\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.metrics import pairwise_distances\n\n\nclass KMeans:\n\n def __init__(self, k=5, max_iters=100, random_seed=42):\n self.k = k\n self.max_iters = max_iters\n np.random.seed(random_seed)\n\n def _initialise_centroids(self, X):\n random_indices = np.random.permutation(X.shape[0])\n random_indices = random_indices[:self.k]\n self.centroids = X[random_indices]\n\n def _euclidien_distance(self, x):\n return np.sum((x - self.centroids) ** 2, axis=1)\n\n def _assign_clusters(self, X):\n cluster_distances = pairwise_distances(X, self.centroids, metric=\n 'euclidean')\n cluster_labels = np.argmin(cluster_distances, axis=1)\n return cluster_labels\n\n def _update_centroids(self, X, cluster_labels):\n for cluster in range(self.k):\n X_cluster = X[cluster_labels == cluster]\n cluster_mean = np.mean(X_cluster, axis=0)\n self.centroids[cluster] = cluster_mean\n\n def fit(self, X):\n self._initialise_centroids(X)\n iterations = 0\n while iterations <= self.max_iters:\n iterations += 1\n cluster_labels = self._assign_clusters(X)\n self._update_centroids(X, cluster_labels)\n\n def predict(self, X):\n return self._assign_clusters(X)\n\n\ndata = load_breast_cancer()\nX, y = data.data, data.target\nX_train, X_test = train_test_split(X, test_size=0.1)\nmodel = KMeans(k=5)\nmodel.fit(X_train)\ny_pred = model.predict(X_test)\nprint(y_pred)\n",
"<import token>\n\n\nclass KMeans:\n\n def __init__(self, k=5, max_iters=100, random_seed=42):\n self.k = k\n self.max_iters = max_iters\n np.random.seed(random_seed)\n\n def _initialise_centroids(self, X):\n random_indices = np.random.permutation(X.shape[0])\n random_indices = random_indices[:self.k]\n self.centroids = X[random_indices]\n\n def _euclidien_distance(self, x):\n return np.sum((x - self.centroids) ** 2, axis=1)\n\n def _assign_clusters(self, X):\n cluster_distances = pairwise_distances(X, self.centroids, metric=\n 'euclidean')\n cluster_labels = np.argmin(cluster_distances, axis=1)\n return cluster_labels\n\n def _update_centroids(self, X, cluster_labels):\n for cluster in range(self.k):\n X_cluster = X[cluster_labels == cluster]\n cluster_mean = np.mean(X_cluster, axis=0)\n self.centroids[cluster] = cluster_mean\n\n def fit(self, X):\n self._initialise_centroids(X)\n iterations = 0\n while iterations <= self.max_iters:\n iterations += 1\n cluster_labels = self._assign_clusters(X)\n self._update_centroids(X, cluster_labels)\n\n def predict(self, X):\n return self._assign_clusters(X)\n\n\ndata = load_breast_cancer()\nX, y = data.data, data.target\nX_train, X_test = train_test_split(X, test_size=0.1)\nmodel = KMeans(k=5)\nmodel.fit(X_train)\ny_pred = model.predict(X_test)\nprint(y_pred)\n",
"<import token>\n\n\nclass KMeans:\n\n def __init__(self, k=5, max_iters=100, random_seed=42):\n self.k = k\n self.max_iters = max_iters\n np.random.seed(random_seed)\n\n def _initialise_centroids(self, X):\n random_indices = np.random.permutation(X.shape[0])\n random_indices = random_indices[:self.k]\n self.centroids = X[random_indices]\n\n def _euclidien_distance(self, x):\n return np.sum((x - self.centroids) ** 2, axis=1)\n\n def _assign_clusters(self, X):\n cluster_distances = pairwise_distances(X, self.centroids, metric=\n 'euclidean')\n cluster_labels = np.argmin(cluster_distances, axis=1)\n return cluster_labels\n\n def _update_centroids(self, X, cluster_labels):\n for cluster in range(self.k):\n X_cluster = X[cluster_labels == cluster]\n cluster_mean = np.mean(X_cluster, axis=0)\n self.centroids[cluster] = cluster_mean\n\n def fit(self, X):\n self._initialise_centroids(X)\n iterations = 0\n while iterations <= self.max_iters:\n iterations += 1\n cluster_labels = self._assign_clusters(X)\n self._update_centroids(X, cluster_labels)\n\n def predict(self, X):\n return self._assign_clusters(X)\n\n\n<assignment token>\nmodel.fit(X_train)\n<assignment token>\nprint(y_pred)\n",
"<import token>\n\n\nclass KMeans:\n\n def __init__(self, k=5, max_iters=100, random_seed=42):\n self.k = k\n self.max_iters = max_iters\n np.random.seed(random_seed)\n\n def _initialise_centroids(self, X):\n random_indices = np.random.permutation(X.shape[0])\n random_indices = random_indices[:self.k]\n self.centroids = X[random_indices]\n\n def _euclidien_distance(self, x):\n return np.sum((x - self.centroids) ** 2, axis=1)\n\n def _assign_clusters(self, X):\n cluster_distances = pairwise_distances(X, self.centroids, metric=\n 'euclidean')\n cluster_labels = np.argmin(cluster_distances, axis=1)\n return cluster_labels\n\n def _update_centroids(self, X, cluster_labels):\n for cluster in range(self.k):\n X_cluster = X[cluster_labels == cluster]\n cluster_mean = np.mean(X_cluster, axis=0)\n self.centroids[cluster] = cluster_mean\n\n def fit(self, X):\n self._initialise_centroids(X)\n iterations = 0\n while iterations <= self.max_iters:\n iterations += 1\n cluster_labels = self._assign_clusters(X)\n self._update_centroids(X, cluster_labels)\n\n def predict(self, X):\n return self._assign_clusters(X)\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n\n\nclass KMeans:\n <function token>\n\n def _initialise_centroids(self, X):\n random_indices = np.random.permutation(X.shape[0])\n random_indices = random_indices[:self.k]\n self.centroids = X[random_indices]\n\n def _euclidien_distance(self, x):\n return np.sum((x - self.centroids) ** 2, axis=1)\n\n def _assign_clusters(self, X):\n cluster_distances = pairwise_distances(X, self.centroids, metric=\n 'euclidean')\n cluster_labels = np.argmin(cluster_distances, axis=1)\n return cluster_labels\n\n def _update_centroids(self, X, cluster_labels):\n for cluster in range(self.k):\n X_cluster = X[cluster_labels == cluster]\n cluster_mean = np.mean(X_cluster, axis=0)\n self.centroids[cluster] = cluster_mean\n\n def fit(self, X):\n self._initialise_centroids(X)\n iterations = 0\n while iterations <= self.max_iters:\n iterations += 1\n cluster_labels = self._assign_clusters(X)\n self._update_centroids(X, cluster_labels)\n\n def predict(self, X):\n return self._assign_clusters(X)\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n\n\nclass KMeans:\n <function token>\n\n def _initialise_centroids(self, X):\n random_indices = np.random.permutation(X.shape[0])\n random_indices = random_indices[:self.k]\n self.centroids = X[random_indices]\n\n def _euclidien_distance(self, x):\n return np.sum((x - self.centroids) ** 2, axis=1)\n\n def _assign_clusters(self, X):\n cluster_distances = pairwise_distances(X, self.centroids, metric=\n 'euclidean')\n cluster_labels = np.argmin(cluster_distances, axis=1)\n return cluster_labels\n <function token>\n\n def fit(self, X):\n self._initialise_centroids(X)\n iterations = 0\n while iterations <= self.max_iters:\n iterations += 1\n cluster_labels = self._assign_clusters(X)\n self._update_centroids(X, cluster_labels)\n\n def predict(self, X):\n return self._assign_clusters(X)\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n\n\nclass KMeans:\n <function token>\n <function token>\n\n def _euclidien_distance(self, x):\n return np.sum((x - self.centroids) ** 2, axis=1)\n\n def _assign_clusters(self, X):\n cluster_distances = pairwise_distances(X, self.centroids, metric=\n 'euclidean')\n cluster_labels = np.argmin(cluster_distances, axis=1)\n return cluster_labels\n <function token>\n\n def fit(self, X):\n self._initialise_centroids(X)\n iterations = 0\n while iterations <= self.max_iters:\n iterations += 1\n cluster_labels = self._assign_clusters(X)\n self._update_centroids(X, cluster_labels)\n\n def predict(self, X):\n return self._assign_clusters(X)\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n\n\nclass KMeans:\n <function token>\n <function token>\n\n def _euclidien_distance(self, x):\n return np.sum((x - self.centroids) ** 2, axis=1)\n\n def _assign_clusters(self, X):\n cluster_distances = pairwise_distances(X, self.centroids, metric=\n 'euclidean')\n cluster_labels = np.argmin(cluster_distances, axis=1)\n return cluster_labels\n <function token>\n\n def fit(self, X):\n self._initialise_centroids(X)\n iterations = 0\n while iterations <= self.max_iters:\n iterations += 1\n cluster_labels = self._assign_clusters(X)\n self._update_centroids(X, cluster_labels)\n <function token>\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n\n\nclass KMeans:\n <function token>\n <function token>\n <function token>\n\n def _assign_clusters(self, X):\n cluster_distances = pairwise_distances(X, self.centroids, metric=\n 'euclidean')\n cluster_labels = np.argmin(cluster_distances, axis=1)\n return cluster_labels\n <function token>\n\n def fit(self, X):\n self._initialise_centroids(X)\n iterations = 0\n while iterations <= self.max_iters:\n iterations += 1\n cluster_labels = self._assign_clusters(X)\n self._update_centroids(X, cluster_labels)\n <function token>\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n\n\nclass KMeans:\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def fit(self, X):\n self._initialise_centroids(X)\n iterations = 0\n while iterations <= self.max_iters:\n iterations += 1\n cluster_labels = self._assign_clusters(X)\n self._update_centroids(X, cluster_labels)\n <function token>\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n\n\nclass KMeans:\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n<class token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n"
] | false |
885 |
9189c1dd21b0858df3138bcf4fc7568b378e6271
|
import os
import unittest
from mock import Mock
from tfsnippet.utils import *
class HumanizeDurationTestCase(unittest.TestCase):
cases = [
(0.0, '0 sec'),
(1e-8, '1e-08 sec'),
(0.1, '0.1 sec'),
(1.0, '1 sec'),
(1, '1 sec'),
(1.1, '1.1 secs'),
(59, '59 secs'),
(59.9, '59.9 secs'),
(60, '1 min'),
(61, '1 min 1 sec'),
(62, '1 min 2 secs'),
(119, '1 min 59 secs'),
(120, '2 mins'),
(121, '2 mins 1 sec'),
(122, '2 mins 2 secs'),
(3599, '59 mins 59 secs'),
(3600, '1 hr'),
(3601, '1 hr 1 sec'),
(3661, '1 hr 1 min 1 sec'),
(86399, '23 hrs 59 mins 59 secs'),
(86400, '1 day'),
(86401, '1 day 1 sec'),
(172799, '1 day 23 hrs 59 mins 59 secs'),
(259199, '2 days 23 hrs 59 mins 59 secs'),
]
def test_positive(self):
for seconds, answer in self.cases:
result = humanize_duration(seconds)
self.assertEqual(
result, answer,
msg='humanize_duraion({!r}) is expected to be {!r}, '
'but got {!r}.'.format(seconds, answer, result)
)
def test_negative(self):
for seconds, answer in self.cases[1:]:
seconds = -seconds
answer = answer + ' ago'
result = humanize_duration(seconds)
self.assertEqual(
result, answer,
msg='humanize_duraion({!r}) is expected to be {!r}, '
'but got {!r}.'.format(seconds, answer, result)
)
class CamelToUnderscoreTestCase(unittest.TestCase):
def assert_convert(self, camel, underscore):
self.assertEqual(
camel_to_underscore(camel),
underscore,
msg='{!r} should be converted to {!r}'.format(camel, underscore)
)
def test_camel_to_underscore(self):
examples = [
('simpleTest', 'simple_test'),
('easy', 'easy'),
('HTML', 'html'),
('simpleXML', 'simple_xml'),
('PDFLoad', 'pdf_load'),
('startMIDDLELast', 'start_middle_last'),
('AString', 'a_string'),
('Some4Numbers234', 'some4_numbers234'),
('TEST123String', 'test123_string'),
]
for camel, underscore in examples:
self.assert_convert(camel, underscore)
self.assert_convert(underscore, underscore)
self.assert_convert('_{}_'.format(camel),
'_{}_'.format(underscore))
self.assert_convert('_{}_'.format(underscore),
'_{}_'.format(underscore))
self.assert_convert('__{}__'.format(camel),
'__{}__'.format(underscore))
self.assert_convert('__{}__'.format(underscore),
'__{}__'.format(underscore))
self.assert_convert(
'_'.join([s.capitalize() for s in underscore.split('_')]),
underscore
)
self.assert_convert(
'_'.join([s.upper() for s in underscore.split('_')]),
underscore
)
class NotSetTestCase(unittest.TestCase):
def test_repr(self):
self.assertEqual(repr(NOT_SET), 'NOT_SET')
class _CachedPropertyHelper(object):
def __init__(self, value):
self.value = value
@cached_property('_cached_value')
def cached_value(self):
return self.value
class CachedPropertyTestCase(unittest.TestCase):
def test_cached_property(self):
o = _CachedPropertyHelper(0)
self.assertFalse(hasattr(o, '_cached_value'))
o.value = 123
self.assertEqual(o.cached_value, 123)
self.assertTrue(hasattr(o, '_cached_value'))
self.assertEqual(o._cached_value, 123)
o.value = 456
self.assertEqual(o.cached_value, 123)
self.assertEqual(o._cached_value, 123)
def test_clear_cached_property(self):
o = _CachedPropertyHelper(123)
_ = o.cached_value
clear_cached_property(o, '_cached_value')
o.value = 456
self.assertFalse(hasattr(o, '_cached_value'))
self.assertEqual(o.cached_value, 456)
self.assertEqual(o._cached_value, 456)
class MaybeCloseTestCase(unittest.TestCase):
def test_maybe_close(self):
# test having `close()`
f = Mock(close=Mock(return_value=None))
with maybe_close(f):
self.assertFalse(f.close.called)
self.assertTrue(f.close.called)
# test having not `close()`
with maybe_close(1):
pass
class IterFilesTestCase(unittest.TestCase):
def test_iter_files(self):
names = ['a/1.txt', 'a/2.txt', 'a/b/1.txt', 'a/b/2.txt',
'b/1.txt', 'b/2.txt', 'c.txt']
with TemporaryDirectory() as tempdir:
for name in names:
f_path = os.path.join(tempdir, name)
f_dir = os.path.split(f_path)[0]
makedirs(f_dir, exist_ok=True)
with open(f_path, 'wb') as f:
f.write(b'')
self.assertListEqual(names, sorted(iter_files(tempdir)))
self.assertListEqual(names, sorted(iter_files(tempdir + '/a/../')))
if __name__ == '__main__':
unittest.main()
|
[
"import os\nimport unittest\n\nfrom mock import Mock\n\nfrom tfsnippet.utils import *\n\n\nclass HumanizeDurationTestCase(unittest.TestCase):\n cases = [\n (0.0, '0 sec'),\n (1e-8, '1e-08 sec'),\n (0.1, '0.1 sec'),\n (1.0, '1 sec'),\n (1, '1 sec'),\n (1.1, '1.1 secs'),\n (59, '59 secs'),\n (59.9, '59.9 secs'),\n (60, '1 min'),\n (61, '1 min 1 sec'),\n (62, '1 min 2 secs'),\n (119, '1 min 59 secs'),\n (120, '2 mins'),\n (121, '2 mins 1 sec'),\n (122, '2 mins 2 secs'),\n (3599, '59 mins 59 secs'),\n (3600, '1 hr'),\n (3601, '1 hr 1 sec'),\n (3661, '1 hr 1 min 1 sec'),\n (86399, '23 hrs 59 mins 59 secs'),\n (86400, '1 day'),\n (86401, '1 day 1 sec'),\n (172799, '1 day 23 hrs 59 mins 59 secs'),\n (259199, '2 days 23 hrs 59 mins 59 secs'),\n ]\n\n def test_positive(self):\n for seconds, answer in self.cases:\n result = humanize_duration(seconds)\n self.assertEqual(\n result, answer,\n msg='humanize_duraion({!r}) is expected to be {!r}, '\n 'but got {!r}.'.format(seconds, answer, result)\n )\n\n def test_negative(self):\n for seconds, answer in self.cases[1:]:\n seconds = -seconds\n answer = answer + ' ago'\n result = humanize_duration(seconds)\n self.assertEqual(\n result, answer,\n msg='humanize_duraion({!r}) is expected to be {!r}, '\n 'but got {!r}.'.format(seconds, answer, result)\n )\n\n\nclass CamelToUnderscoreTestCase(unittest.TestCase):\n def assert_convert(self, camel, underscore):\n self.assertEqual(\n camel_to_underscore(camel),\n underscore,\n msg='{!r} should be converted to {!r}'.format(camel, underscore)\n )\n\n def test_camel_to_underscore(self):\n examples = [\n ('simpleTest', 'simple_test'),\n ('easy', 'easy'),\n ('HTML', 'html'),\n ('simpleXML', 'simple_xml'),\n ('PDFLoad', 'pdf_load'),\n ('startMIDDLELast', 'start_middle_last'),\n ('AString', 'a_string'),\n ('Some4Numbers234', 'some4_numbers234'),\n ('TEST123String', 'test123_string'),\n ]\n for camel, underscore in examples:\n self.assert_convert(camel, underscore)\n self.assert_convert(underscore, underscore)\n self.assert_convert('_{}_'.format(camel),\n '_{}_'.format(underscore))\n self.assert_convert('_{}_'.format(underscore),\n '_{}_'.format(underscore))\n self.assert_convert('__{}__'.format(camel),\n '__{}__'.format(underscore))\n self.assert_convert('__{}__'.format(underscore),\n '__{}__'.format(underscore))\n self.assert_convert(\n '_'.join([s.capitalize() for s in underscore.split('_')]),\n underscore\n )\n self.assert_convert(\n '_'.join([s.upper() for s in underscore.split('_')]),\n underscore\n )\n\n\nclass NotSetTestCase(unittest.TestCase):\n\n def test_repr(self):\n self.assertEqual(repr(NOT_SET), 'NOT_SET')\n\n\nclass _CachedPropertyHelper(object):\n\n def __init__(self, value):\n self.value = value\n\n @cached_property('_cached_value')\n def cached_value(self):\n return self.value\n\n\nclass CachedPropertyTestCase(unittest.TestCase):\n\n def test_cached_property(self):\n o = _CachedPropertyHelper(0)\n self.assertFalse(hasattr(o, '_cached_value'))\n o.value = 123\n self.assertEqual(o.cached_value, 123)\n self.assertTrue(hasattr(o, '_cached_value'))\n self.assertEqual(o._cached_value, 123)\n o.value = 456\n self.assertEqual(o.cached_value, 123)\n self.assertEqual(o._cached_value, 123)\n\n def test_clear_cached_property(self):\n o = _CachedPropertyHelper(123)\n _ = o.cached_value\n clear_cached_property(o, '_cached_value')\n o.value = 456\n self.assertFalse(hasattr(o, '_cached_value'))\n self.assertEqual(o.cached_value, 456)\n self.assertEqual(o._cached_value, 456)\n\n\nclass MaybeCloseTestCase(unittest.TestCase):\n\n def test_maybe_close(self):\n # test having `close()`\n f = Mock(close=Mock(return_value=None))\n with maybe_close(f):\n self.assertFalse(f.close.called)\n self.assertTrue(f.close.called)\n\n # test having not `close()`\n with maybe_close(1):\n pass\n\n\nclass IterFilesTestCase(unittest.TestCase):\n\n def test_iter_files(self):\n names = ['a/1.txt', 'a/2.txt', 'a/b/1.txt', 'a/b/2.txt',\n 'b/1.txt', 'b/2.txt', 'c.txt']\n\n with TemporaryDirectory() as tempdir:\n for name in names:\n f_path = os.path.join(tempdir, name)\n f_dir = os.path.split(f_path)[0]\n makedirs(f_dir, exist_ok=True)\n with open(f_path, 'wb') as f:\n f.write(b'')\n\n self.assertListEqual(names, sorted(iter_files(tempdir)))\n self.assertListEqual(names, sorted(iter_files(tempdir + '/a/../')))\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"import os\nimport unittest\nfrom mock import Mock\nfrom tfsnippet.utils import *\n\n\nclass HumanizeDurationTestCase(unittest.TestCase):\n cases = [(0.0, '0 sec'), (1e-08, '1e-08 sec'), (0.1, '0.1 sec'), (1.0,\n '1 sec'), (1, '1 sec'), (1.1, '1.1 secs'), (59, '59 secs'), (59.9,\n '59.9 secs'), (60, '1 min'), (61, '1 min 1 sec'), (62,\n '1 min 2 secs'), (119, '1 min 59 secs'), (120, '2 mins'), (121,\n '2 mins 1 sec'), (122, '2 mins 2 secs'), (3599, '59 mins 59 secs'),\n (3600, '1 hr'), (3601, '1 hr 1 sec'), (3661, '1 hr 1 min 1 sec'), (\n 86399, '23 hrs 59 mins 59 secs'), (86400, '1 day'), (86401,\n '1 day 1 sec'), (172799, '1 day 23 hrs 59 mins 59 secs'), (259199,\n '2 days 23 hrs 59 mins 59 secs')]\n\n def test_positive(self):\n for seconds, answer in self.cases:\n result = humanize_duration(seconds)\n self.assertEqual(result, answer, msg=\n 'humanize_duraion({!r}) is expected to be {!r}, but got {!r}.'\n .format(seconds, answer, result))\n\n def test_negative(self):\n for seconds, answer in self.cases[1:]:\n seconds = -seconds\n answer = answer + ' ago'\n result = humanize_duration(seconds)\n self.assertEqual(result, answer, msg=\n 'humanize_duraion({!r}) is expected to be {!r}, but got {!r}.'\n .format(seconds, answer, result))\n\n\nclass CamelToUnderscoreTestCase(unittest.TestCase):\n\n def assert_convert(self, camel, underscore):\n self.assertEqual(camel_to_underscore(camel), underscore, msg=\n '{!r} should be converted to {!r}'.format(camel, underscore))\n\n def test_camel_to_underscore(self):\n examples = [('simpleTest', 'simple_test'), ('easy', 'easy'), (\n 'HTML', 'html'), ('simpleXML', 'simple_xml'), ('PDFLoad',\n 'pdf_load'), ('startMIDDLELast', 'start_middle_last'), (\n 'AString', 'a_string'), ('Some4Numbers234', 'some4_numbers234'),\n ('TEST123String', 'test123_string')]\n for camel, underscore in examples:\n self.assert_convert(camel, underscore)\n self.assert_convert(underscore, underscore)\n self.assert_convert('_{}_'.format(camel), '_{}_'.format(underscore)\n )\n self.assert_convert('_{}_'.format(underscore), '_{}_'.format(\n underscore))\n self.assert_convert('__{}__'.format(camel), '__{}__'.format(\n underscore))\n self.assert_convert('__{}__'.format(underscore), '__{}__'.\n format(underscore))\n self.assert_convert('_'.join([s.capitalize() for s in\n underscore.split('_')]), underscore)\n self.assert_convert('_'.join([s.upper() for s in underscore.\n split('_')]), underscore)\n\n\nclass NotSetTestCase(unittest.TestCase):\n\n def test_repr(self):\n self.assertEqual(repr(NOT_SET), 'NOT_SET')\n\n\nclass _CachedPropertyHelper(object):\n\n def __init__(self, value):\n self.value = value\n\n @cached_property('_cached_value')\n def cached_value(self):\n return self.value\n\n\nclass CachedPropertyTestCase(unittest.TestCase):\n\n def test_cached_property(self):\n o = _CachedPropertyHelper(0)\n self.assertFalse(hasattr(o, '_cached_value'))\n o.value = 123\n self.assertEqual(o.cached_value, 123)\n self.assertTrue(hasattr(o, '_cached_value'))\n self.assertEqual(o._cached_value, 123)\n o.value = 456\n self.assertEqual(o.cached_value, 123)\n self.assertEqual(o._cached_value, 123)\n\n def test_clear_cached_property(self):\n o = _CachedPropertyHelper(123)\n _ = o.cached_value\n clear_cached_property(o, '_cached_value')\n o.value = 456\n self.assertFalse(hasattr(o, '_cached_value'))\n self.assertEqual(o.cached_value, 456)\n self.assertEqual(o._cached_value, 456)\n\n\nclass MaybeCloseTestCase(unittest.TestCase):\n\n def test_maybe_close(self):\n f = Mock(close=Mock(return_value=None))\n with maybe_close(f):\n self.assertFalse(f.close.called)\n self.assertTrue(f.close.called)\n with maybe_close(1):\n pass\n\n\nclass IterFilesTestCase(unittest.TestCase):\n\n def test_iter_files(self):\n names = ['a/1.txt', 'a/2.txt', 'a/b/1.txt', 'a/b/2.txt', 'b/1.txt',\n 'b/2.txt', 'c.txt']\n with TemporaryDirectory() as tempdir:\n for name in names:\n f_path = os.path.join(tempdir, name)\n f_dir = os.path.split(f_path)[0]\n makedirs(f_dir, exist_ok=True)\n with open(f_path, 'wb') as f:\n f.write(b'')\n self.assertListEqual(names, sorted(iter_files(tempdir)))\n self.assertListEqual(names, sorted(iter_files(tempdir + '/a/../')))\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"<import token>\n\n\nclass HumanizeDurationTestCase(unittest.TestCase):\n cases = [(0.0, '0 sec'), (1e-08, '1e-08 sec'), (0.1, '0.1 sec'), (1.0,\n '1 sec'), (1, '1 sec'), (1.1, '1.1 secs'), (59, '59 secs'), (59.9,\n '59.9 secs'), (60, '1 min'), (61, '1 min 1 sec'), (62,\n '1 min 2 secs'), (119, '1 min 59 secs'), (120, '2 mins'), (121,\n '2 mins 1 sec'), (122, '2 mins 2 secs'), (3599, '59 mins 59 secs'),\n (3600, '1 hr'), (3601, '1 hr 1 sec'), (3661, '1 hr 1 min 1 sec'), (\n 86399, '23 hrs 59 mins 59 secs'), (86400, '1 day'), (86401,\n '1 day 1 sec'), (172799, '1 day 23 hrs 59 mins 59 secs'), (259199,\n '2 days 23 hrs 59 mins 59 secs')]\n\n def test_positive(self):\n for seconds, answer in self.cases:\n result = humanize_duration(seconds)\n self.assertEqual(result, answer, msg=\n 'humanize_duraion({!r}) is expected to be {!r}, but got {!r}.'\n .format(seconds, answer, result))\n\n def test_negative(self):\n for seconds, answer in self.cases[1:]:\n seconds = -seconds\n answer = answer + ' ago'\n result = humanize_duration(seconds)\n self.assertEqual(result, answer, msg=\n 'humanize_duraion({!r}) is expected to be {!r}, but got {!r}.'\n .format(seconds, answer, result))\n\n\nclass CamelToUnderscoreTestCase(unittest.TestCase):\n\n def assert_convert(self, camel, underscore):\n self.assertEqual(camel_to_underscore(camel), underscore, msg=\n '{!r} should be converted to {!r}'.format(camel, underscore))\n\n def test_camel_to_underscore(self):\n examples = [('simpleTest', 'simple_test'), ('easy', 'easy'), (\n 'HTML', 'html'), ('simpleXML', 'simple_xml'), ('PDFLoad',\n 'pdf_load'), ('startMIDDLELast', 'start_middle_last'), (\n 'AString', 'a_string'), ('Some4Numbers234', 'some4_numbers234'),\n ('TEST123String', 'test123_string')]\n for camel, underscore in examples:\n self.assert_convert(camel, underscore)\n self.assert_convert(underscore, underscore)\n self.assert_convert('_{}_'.format(camel), '_{}_'.format(underscore)\n )\n self.assert_convert('_{}_'.format(underscore), '_{}_'.format(\n underscore))\n self.assert_convert('__{}__'.format(camel), '__{}__'.format(\n underscore))\n self.assert_convert('__{}__'.format(underscore), '__{}__'.\n format(underscore))\n self.assert_convert('_'.join([s.capitalize() for s in\n underscore.split('_')]), underscore)\n self.assert_convert('_'.join([s.upper() for s in underscore.\n split('_')]), underscore)\n\n\nclass NotSetTestCase(unittest.TestCase):\n\n def test_repr(self):\n self.assertEqual(repr(NOT_SET), 'NOT_SET')\n\n\nclass _CachedPropertyHelper(object):\n\n def __init__(self, value):\n self.value = value\n\n @cached_property('_cached_value')\n def cached_value(self):\n return self.value\n\n\nclass CachedPropertyTestCase(unittest.TestCase):\n\n def test_cached_property(self):\n o = _CachedPropertyHelper(0)\n self.assertFalse(hasattr(o, '_cached_value'))\n o.value = 123\n self.assertEqual(o.cached_value, 123)\n self.assertTrue(hasattr(o, '_cached_value'))\n self.assertEqual(o._cached_value, 123)\n o.value = 456\n self.assertEqual(o.cached_value, 123)\n self.assertEqual(o._cached_value, 123)\n\n def test_clear_cached_property(self):\n o = _CachedPropertyHelper(123)\n _ = o.cached_value\n clear_cached_property(o, '_cached_value')\n o.value = 456\n self.assertFalse(hasattr(o, '_cached_value'))\n self.assertEqual(o.cached_value, 456)\n self.assertEqual(o._cached_value, 456)\n\n\nclass MaybeCloseTestCase(unittest.TestCase):\n\n def test_maybe_close(self):\n f = Mock(close=Mock(return_value=None))\n with maybe_close(f):\n self.assertFalse(f.close.called)\n self.assertTrue(f.close.called)\n with maybe_close(1):\n pass\n\n\nclass IterFilesTestCase(unittest.TestCase):\n\n def test_iter_files(self):\n names = ['a/1.txt', 'a/2.txt', 'a/b/1.txt', 'a/b/2.txt', 'b/1.txt',\n 'b/2.txt', 'c.txt']\n with TemporaryDirectory() as tempdir:\n for name in names:\n f_path = os.path.join(tempdir, name)\n f_dir = os.path.split(f_path)[0]\n makedirs(f_dir, exist_ok=True)\n with open(f_path, 'wb') as f:\n f.write(b'')\n self.assertListEqual(names, sorted(iter_files(tempdir)))\n self.assertListEqual(names, sorted(iter_files(tempdir + '/a/../')))\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"<import token>\n\n\nclass HumanizeDurationTestCase(unittest.TestCase):\n cases = [(0.0, '0 sec'), (1e-08, '1e-08 sec'), (0.1, '0.1 sec'), (1.0,\n '1 sec'), (1, '1 sec'), (1.1, '1.1 secs'), (59, '59 secs'), (59.9,\n '59.9 secs'), (60, '1 min'), (61, '1 min 1 sec'), (62,\n '1 min 2 secs'), (119, '1 min 59 secs'), (120, '2 mins'), (121,\n '2 mins 1 sec'), (122, '2 mins 2 secs'), (3599, '59 mins 59 secs'),\n (3600, '1 hr'), (3601, '1 hr 1 sec'), (3661, '1 hr 1 min 1 sec'), (\n 86399, '23 hrs 59 mins 59 secs'), (86400, '1 day'), (86401,\n '1 day 1 sec'), (172799, '1 day 23 hrs 59 mins 59 secs'), (259199,\n '2 days 23 hrs 59 mins 59 secs')]\n\n def test_positive(self):\n for seconds, answer in self.cases:\n result = humanize_duration(seconds)\n self.assertEqual(result, answer, msg=\n 'humanize_duraion({!r}) is expected to be {!r}, but got {!r}.'\n .format(seconds, answer, result))\n\n def test_negative(self):\n for seconds, answer in self.cases[1:]:\n seconds = -seconds\n answer = answer + ' ago'\n result = humanize_duration(seconds)\n self.assertEqual(result, answer, msg=\n 'humanize_duraion({!r}) is expected to be {!r}, but got {!r}.'\n .format(seconds, answer, result))\n\n\nclass CamelToUnderscoreTestCase(unittest.TestCase):\n\n def assert_convert(self, camel, underscore):\n self.assertEqual(camel_to_underscore(camel), underscore, msg=\n '{!r} should be converted to {!r}'.format(camel, underscore))\n\n def test_camel_to_underscore(self):\n examples = [('simpleTest', 'simple_test'), ('easy', 'easy'), (\n 'HTML', 'html'), ('simpleXML', 'simple_xml'), ('PDFLoad',\n 'pdf_load'), ('startMIDDLELast', 'start_middle_last'), (\n 'AString', 'a_string'), ('Some4Numbers234', 'some4_numbers234'),\n ('TEST123String', 'test123_string')]\n for camel, underscore in examples:\n self.assert_convert(camel, underscore)\n self.assert_convert(underscore, underscore)\n self.assert_convert('_{}_'.format(camel), '_{}_'.format(underscore)\n )\n self.assert_convert('_{}_'.format(underscore), '_{}_'.format(\n underscore))\n self.assert_convert('__{}__'.format(camel), '__{}__'.format(\n underscore))\n self.assert_convert('__{}__'.format(underscore), '__{}__'.\n format(underscore))\n self.assert_convert('_'.join([s.capitalize() for s in\n underscore.split('_')]), underscore)\n self.assert_convert('_'.join([s.upper() for s in underscore.\n split('_')]), underscore)\n\n\nclass NotSetTestCase(unittest.TestCase):\n\n def test_repr(self):\n self.assertEqual(repr(NOT_SET), 'NOT_SET')\n\n\nclass _CachedPropertyHelper(object):\n\n def __init__(self, value):\n self.value = value\n\n @cached_property('_cached_value')\n def cached_value(self):\n return self.value\n\n\nclass CachedPropertyTestCase(unittest.TestCase):\n\n def test_cached_property(self):\n o = _CachedPropertyHelper(0)\n self.assertFalse(hasattr(o, '_cached_value'))\n o.value = 123\n self.assertEqual(o.cached_value, 123)\n self.assertTrue(hasattr(o, '_cached_value'))\n self.assertEqual(o._cached_value, 123)\n o.value = 456\n self.assertEqual(o.cached_value, 123)\n self.assertEqual(o._cached_value, 123)\n\n def test_clear_cached_property(self):\n o = _CachedPropertyHelper(123)\n _ = o.cached_value\n clear_cached_property(o, '_cached_value')\n o.value = 456\n self.assertFalse(hasattr(o, '_cached_value'))\n self.assertEqual(o.cached_value, 456)\n self.assertEqual(o._cached_value, 456)\n\n\nclass MaybeCloseTestCase(unittest.TestCase):\n\n def test_maybe_close(self):\n f = Mock(close=Mock(return_value=None))\n with maybe_close(f):\n self.assertFalse(f.close.called)\n self.assertTrue(f.close.called)\n with maybe_close(1):\n pass\n\n\nclass IterFilesTestCase(unittest.TestCase):\n\n def test_iter_files(self):\n names = ['a/1.txt', 'a/2.txt', 'a/b/1.txt', 'a/b/2.txt', 'b/1.txt',\n 'b/2.txt', 'c.txt']\n with TemporaryDirectory() as tempdir:\n for name in names:\n f_path = os.path.join(tempdir, name)\n f_dir = os.path.split(f_path)[0]\n makedirs(f_dir, exist_ok=True)\n with open(f_path, 'wb') as f:\n f.write(b'')\n self.assertListEqual(names, sorted(iter_files(tempdir)))\n self.assertListEqual(names, sorted(iter_files(tempdir + '/a/../')))\n\n\n<code token>\n",
"<import token>\n\n\nclass HumanizeDurationTestCase(unittest.TestCase):\n <assignment token>\n\n def test_positive(self):\n for seconds, answer in self.cases:\n result = humanize_duration(seconds)\n self.assertEqual(result, answer, msg=\n 'humanize_duraion({!r}) is expected to be {!r}, but got {!r}.'\n .format(seconds, answer, result))\n\n def test_negative(self):\n for seconds, answer in self.cases[1:]:\n seconds = -seconds\n answer = answer + ' ago'\n result = humanize_duration(seconds)\n self.assertEqual(result, answer, msg=\n 'humanize_duraion({!r}) is expected to be {!r}, but got {!r}.'\n .format(seconds, answer, result))\n\n\nclass CamelToUnderscoreTestCase(unittest.TestCase):\n\n def assert_convert(self, camel, underscore):\n self.assertEqual(camel_to_underscore(camel), underscore, msg=\n '{!r} should be converted to {!r}'.format(camel, underscore))\n\n def test_camel_to_underscore(self):\n examples = [('simpleTest', 'simple_test'), ('easy', 'easy'), (\n 'HTML', 'html'), ('simpleXML', 'simple_xml'), ('PDFLoad',\n 'pdf_load'), ('startMIDDLELast', 'start_middle_last'), (\n 'AString', 'a_string'), ('Some4Numbers234', 'some4_numbers234'),\n ('TEST123String', 'test123_string')]\n for camel, underscore in examples:\n self.assert_convert(camel, underscore)\n self.assert_convert(underscore, underscore)\n self.assert_convert('_{}_'.format(camel), '_{}_'.format(underscore)\n )\n self.assert_convert('_{}_'.format(underscore), '_{}_'.format(\n underscore))\n self.assert_convert('__{}__'.format(camel), '__{}__'.format(\n underscore))\n self.assert_convert('__{}__'.format(underscore), '__{}__'.\n format(underscore))\n self.assert_convert('_'.join([s.capitalize() for s in\n underscore.split('_')]), underscore)\n self.assert_convert('_'.join([s.upper() for s in underscore.\n split('_')]), underscore)\n\n\nclass NotSetTestCase(unittest.TestCase):\n\n def test_repr(self):\n self.assertEqual(repr(NOT_SET), 'NOT_SET')\n\n\nclass _CachedPropertyHelper(object):\n\n def __init__(self, value):\n self.value = value\n\n @cached_property('_cached_value')\n def cached_value(self):\n return self.value\n\n\nclass CachedPropertyTestCase(unittest.TestCase):\n\n def test_cached_property(self):\n o = _CachedPropertyHelper(0)\n self.assertFalse(hasattr(o, '_cached_value'))\n o.value = 123\n self.assertEqual(o.cached_value, 123)\n self.assertTrue(hasattr(o, '_cached_value'))\n self.assertEqual(o._cached_value, 123)\n o.value = 456\n self.assertEqual(o.cached_value, 123)\n self.assertEqual(o._cached_value, 123)\n\n def test_clear_cached_property(self):\n o = _CachedPropertyHelper(123)\n _ = o.cached_value\n clear_cached_property(o, '_cached_value')\n o.value = 456\n self.assertFalse(hasattr(o, '_cached_value'))\n self.assertEqual(o.cached_value, 456)\n self.assertEqual(o._cached_value, 456)\n\n\nclass MaybeCloseTestCase(unittest.TestCase):\n\n def test_maybe_close(self):\n f = Mock(close=Mock(return_value=None))\n with maybe_close(f):\n self.assertFalse(f.close.called)\n self.assertTrue(f.close.called)\n with maybe_close(1):\n pass\n\n\nclass IterFilesTestCase(unittest.TestCase):\n\n def test_iter_files(self):\n names = ['a/1.txt', 'a/2.txt', 'a/b/1.txt', 'a/b/2.txt', 'b/1.txt',\n 'b/2.txt', 'c.txt']\n with TemporaryDirectory() as tempdir:\n for name in names:\n f_path = os.path.join(tempdir, name)\n f_dir = os.path.split(f_path)[0]\n makedirs(f_dir, exist_ok=True)\n with open(f_path, 'wb') as f:\n f.write(b'')\n self.assertListEqual(names, sorted(iter_files(tempdir)))\n self.assertListEqual(names, sorted(iter_files(tempdir + '/a/../')))\n\n\n<code token>\n",
"<import token>\n\n\nclass HumanizeDurationTestCase(unittest.TestCase):\n <assignment token>\n\n def test_positive(self):\n for seconds, answer in self.cases:\n result = humanize_duration(seconds)\n self.assertEqual(result, answer, msg=\n 'humanize_duraion({!r}) is expected to be {!r}, but got {!r}.'\n .format(seconds, answer, result))\n <function token>\n\n\nclass CamelToUnderscoreTestCase(unittest.TestCase):\n\n def assert_convert(self, camel, underscore):\n self.assertEqual(camel_to_underscore(camel), underscore, msg=\n '{!r} should be converted to {!r}'.format(camel, underscore))\n\n def test_camel_to_underscore(self):\n examples = [('simpleTest', 'simple_test'), ('easy', 'easy'), (\n 'HTML', 'html'), ('simpleXML', 'simple_xml'), ('PDFLoad',\n 'pdf_load'), ('startMIDDLELast', 'start_middle_last'), (\n 'AString', 'a_string'), ('Some4Numbers234', 'some4_numbers234'),\n ('TEST123String', 'test123_string')]\n for camel, underscore in examples:\n self.assert_convert(camel, underscore)\n self.assert_convert(underscore, underscore)\n self.assert_convert('_{}_'.format(camel), '_{}_'.format(underscore)\n )\n self.assert_convert('_{}_'.format(underscore), '_{}_'.format(\n underscore))\n self.assert_convert('__{}__'.format(camel), '__{}__'.format(\n underscore))\n self.assert_convert('__{}__'.format(underscore), '__{}__'.\n format(underscore))\n self.assert_convert('_'.join([s.capitalize() for s in\n underscore.split('_')]), underscore)\n self.assert_convert('_'.join([s.upper() for s in underscore.\n split('_')]), underscore)\n\n\nclass NotSetTestCase(unittest.TestCase):\n\n def test_repr(self):\n self.assertEqual(repr(NOT_SET), 'NOT_SET')\n\n\nclass _CachedPropertyHelper(object):\n\n def __init__(self, value):\n self.value = value\n\n @cached_property('_cached_value')\n def cached_value(self):\n return self.value\n\n\nclass CachedPropertyTestCase(unittest.TestCase):\n\n def test_cached_property(self):\n o = _CachedPropertyHelper(0)\n self.assertFalse(hasattr(o, '_cached_value'))\n o.value = 123\n self.assertEqual(o.cached_value, 123)\n self.assertTrue(hasattr(o, '_cached_value'))\n self.assertEqual(o._cached_value, 123)\n o.value = 456\n self.assertEqual(o.cached_value, 123)\n self.assertEqual(o._cached_value, 123)\n\n def test_clear_cached_property(self):\n o = _CachedPropertyHelper(123)\n _ = o.cached_value\n clear_cached_property(o, '_cached_value')\n o.value = 456\n self.assertFalse(hasattr(o, '_cached_value'))\n self.assertEqual(o.cached_value, 456)\n self.assertEqual(o._cached_value, 456)\n\n\nclass MaybeCloseTestCase(unittest.TestCase):\n\n def test_maybe_close(self):\n f = Mock(close=Mock(return_value=None))\n with maybe_close(f):\n self.assertFalse(f.close.called)\n self.assertTrue(f.close.called)\n with maybe_close(1):\n pass\n\n\nclass IterFilesTestCase(unittest.TestCase):\n\n def test_iter_files(self):\n names = ['a/1.txt', 'a/2.txt', 'a/b/1.txt', 'a/b/2.txt', 'b/1.txt',\n 'b/2.txt', 'c.txt']\n with TemporaryDirectory() as tempdir:\n for name in names:\n f_path = os.path.join(tempdir, name)\n f_dir = os.path.split(f_path)[0]\n makedirs(f_dir, exist_ok=True)\n with open(f_path, 'wb') as f:\n f.write(b'')\n self.assertListEqual(names, sorted(iter_files(tempdir)))\n self.assertListEqual(names, sorted(iter_files(tempdir + '/a/../')))\n\n\n<code token>\n",
"<import token>\n\n\nclass HumanizeDurationTestCase(unittest.TestCase):\n <assignment token>\n <function token>\n <function token>\n\n\nclass CamelToUnderscoreTestCase(unittest.TestCase):\n\n def assert_convert(self, camel, underscore):\n self.assertEqual(camel_to_underscore(camel), underscore, msg=\n '{!r} should be converted to {!r}'.format(camel, underscore))\n\n def test_camel_to_underscore(self):\n examples = [('simpleTest', 'simple_test'), ('easy', 'easy'), (\n 'HTML', 'html'), ('simpleXML', 'simple_xml'), ('PDFLoad',\n 'pdf_load'), ('startMIDDLELast', 'start_middle_last'), (\n 'AString', 'a_string'), ('Some4Numbers234', 'some4_numbers234'),\n ('TEST123String', 'test123_string')]\n for camel, underscore in examples:\n self.assert_convert(camel, underscore)\n self.assert_convert(underscore, underscore)\n self.assert_convert('_{}_'.format(camel), '_{}_'.format(underscore)\n )\n self.assert_convert('_{}_'.format(underscore), '_{}_'.format(\n underscore))\n self.assert_convert('__{}__'.format(camel), '__{}__'.format(\n underscore))\n self.assert_convert('__{}__'.format(underscore), '__{}__'.\n format(underscore))\n self.assert_convert('_'.join([s.capitalize() for s in\n underscore.split('_')]), underscore)\n self.assert_convert('_'.join([s.upper() for s in underscore.\n split('_')]), underscore)\n\n\nclass NotSetTestCase(unittest.TestCase):\n\n def test_repr(self):\n self.assertEqual(repr(NOT_SET), 'NOT_SET')\n\n\nclass _CachedPropertyHelper(object):\n\n def __init__(self, value):\n self.value = value\n\n @cached_property('_cached_value')\n def cached_value(self):\n return self.value\n\n\nclass CachedPropertyTestCase(unittest.TestCase):\n\n def test_cached_property(self):\n o = _CachedPropertyHelper(0)\n self.assertFalse(hasattr(o, '_cached_value'))\n o.value = 123\n self.assertEqual(o.cached_value, 123)\n self.assertTrue(hasattr(o, '_cached_value'))\n self.assertEqual(o._cached_value, 123)\n o.value = 456\n self.assertEqual(o.cached_value, 123)\n self.assertEqual(o._cached_value, 123)\n\n def test_clear_cached_property(self):\n o = _CachedPropertyHelper(123)\n _ = o.cached_value\n clear_cached_property(o, '_cached_value')\n o.value = 456\n self.assertFalse(hasattr(o, '_cached_value'))\n self.assertEqual(o.cached_value, 456)\n self.assertEqual(o._cached_value, 456)\n\n\nclass MaybeCloseTestCase(unittest.TestCase):\n\n def test_maybe_close(self):\n f = Mock(close=Mock(return_value=None))\n with maybe_close(f):\n self.assertFalse(f.close.called)\n self.assertTrue(f.close.called)\n with maybe_close(1):\n pass\n\n\nclass IterFilesTestCase(unittest.TestCase):\n\n def test_iter_files(self):\n names = ['a/1.txt', 'a/2.txt', 'a/b/1.txt', 'a/b/2.txt', 'b/1.txt',\n 'b/2.txt', 'c.txt']\n with TemporaryDirectory() as tempdir:\n for name in names:\n f_path = os.path.join(tempdir, name)\n f_dir = os.path.split(f_path)[0]\n makedirs(f_dir, exist_ok=True)\n with open(f_path, 'wb') as f:\n f.write(b'')\n self.assertListEqual(names, sorted(iter_files(tempdir)))\n self.assertListEqual(names, sorted(iter_files(tempdir + '/a/../')))\n\n\n<code token>\n",
"<import token>\n<class token>\n\n\nclass CamelToUnderscoreTestCase(unittest.TestCase):\n\n def assert_convert(self, camel, underscore):\n self.assertEqual(camel_to_underscore(camel), underscore, msg=\n '{!r} should be converted to {!r}'.format(camel, underscore))\n\n def test_camel_to_underscore(self):\n examples = [('simpleTest', 'simple_test'), ('easy', 'easy'), (\n 'HTML', 'html'), ('simpleXML', 'simple_xml'), ('PDFLoad',\n 'pdf_load'), ('startMIDDLELast', 'start_middle_last'), (\n 'AString', 'a_string'), ('Some4Numbers234', 'some4_numbers234'),\n ('TEST123String', 'test123_string')]\n for camel, underscore in examples:\n self.assert_convert(camel, underscore)\n self.assert_convert(underscore, underscore)\n self.assert_convert('_{}_'.format(camel), '_{}_'.format(underscore)\n )\n self.assert_convert('_{}_'.format(underscore), '_{}_'.format(\n underscore))\n self.assert_convert('__{}__'.format(camel), '__{}__'.format(\n underscore))\n self.assert_convert('__{}__'.format(underscore), '__{}__'.\n format(underscore))\n self.assert_convert('_'.join([s.capitalize() for s in\n underscore.split('_')]), underscore)\n self.assert_convert('_'.join([s.upper() for s in underscore.\n split('_')]), underscore)\n\n\nclass NotSetTestCase(unittest.TestCase):\n\n def test_repr(self):\n self.assertEqual(repr(NOT_SET), 'NOT_SET')\n\n\nclass _CachedPropertyHelper(object):\n\n def __init__(self, value):\n self.value = value\n\n @cached_property('_cached_value')\n def cached_value(self):\n return self.value\n\n\nclass CachedPropertyTestCase(unittest.TestCase):\n\n def test_cached_property(self):\n o = _CachedPropertyHelper(0)\n self.assertFalse(hasattr(o, '_cached_value'))\n o.value = 123\n self.assertEqual(o.cached_value, 123)\n self.assertTrue(hasattr(o, '_cached_value'))\n self.assertEqual(o._cached_value, 123)\n o.value = 456\n self.assertEqual(o.cached_value, 123)\n self.assertEqual(o._cached_value, 123)\n\n def test_clear_cached_property(self):\n o = _CachedPropertyHelper(123)\n _ = o.cached_value\n clear_cached_property(o, '_cached_value')\n o.value = 456\n self.assertFalse(hasattr(o, '_cached_value'))\n self.assertEqual(o.cached_value, 456)\n self.assertEqual(o._cached_value, 456)\n\n\nclass MaybeCloseTestCase(unittest.TestCase):\n\n def test_maybe_close(self):\n f = Mock(close=Mock(return_value=None))\n with maybe_close(f):\n self.assertFalse(f.close.called)\n self.assertTrue(f.close.called)\n with maybe_close(1):\n pass\n\n\nclass IterFilesTestCase(unittest.TestCase):\n\n def test_iter_files(self):\n names = ['a/1.txt', 'a/2.txt', 'a/b/1.txt', 'a/b/2.txt', 'b/1.txt',\n 'b/2.txt', 'c.txt']\n with TemporaryDirectory() as tempdir:\n for name in names:\n f_path = os.path.join(tempdir, name)\n f_dir = os.path.split(f_path)[0]\n makedirs(f_dir, exist_ok=True)\n with open(f_path, 'wb') as f:\n f.write(b'')\n self.assertListEqual(names, sorted(iter_files(tempdir)))\n self.assertListEqual(names, sorted(iter_files(tempdir + '/a/../')))\n\n\n<code token>\n",
"<import token>\n<class token>\n\n\nclass CamelToUnderscoreTestCase(unittest.TestCase):\n <function token>\n\n def test_camel_to_underscore(self):\n examples = [('simpleTest', 'simple_test'), ('easy', 'easy'), (\n 'HTML', 'html'), ('simpleXML', 'simple_xml'), ('PDFLoad',\n 'pdf_load'), ('startMIDDLELast', 'start_middle_last'), (\n 'AString', 'a_string'), ('Some4Numbers234', 'some4_numbers234'),\n ('TEST123String', 'test123_string')]\n for camel, underscore in examples:\n self.assert_convert(camel, underscore)\n self.assert_convert(underscore, underscore)\n self.assert_convert('_{}_'.format(camel), '_{}_'.format(underscore)\n )\n self.assert_convert('_{}_'.format(underscore), '_{}_'.format(\n underscore))\n self.assert_convert('__{}__'.format(camel), '__{}__'.format(\n underscore))\n self.assert_convert('__{}__'.format(underscore), '__{}__'.\n format(underscore))\n self.assert_convert('_'.join([s.capitalize() for s in\n underscore.split('_')]), underscore)\n self.assert_convert('_'.join([s.upper() for s in underscore.\n split('_')]), underscore)\n\n\nclass NotSetTestCase(unittest.TestCase):\n\n def test_repr(self):\n self.assertEqual(repr(NOT_SET), 'NOT_SET')\n\n\nclass _CachedPropertyHelper(object):\n\n def __init__(self, value):\n self.value = value\n\n @cached_property('_cached_value')\n def cached_value(self):\n return self.value\n\n\nclass CachedPropertyTestCase(unittest.TestCase):\n\n def test_cached_property(self):\n o = _CachedPropertyHelper(0)\n self.assertFalse(hasattr(o, '_cached_value'))\n o.value = 123\n self.assertEqual(o.cached_value, 123)\n self.assertTrue(hasattr(o, '_cached_value'))\n self.assertEqual(o._cached_value, 123)\n o.value = 456\n self.assertEqual(o.cached_value, 123)\n self.assertEqual(o._cached_value, 123)\n\n def test_clear_cached_property(self):\n o = _CachedPropertyHelper(123)\n _ = o.cached_value\n clear_cached_property(o, '_cached_value')\n o.value = 456\n self.assertFalse(hasattr(o, '_cached_value'))\n self.assertEqual(o.cached_value, 456)\n self.assertEqual(o._cached_value, 456)\n\n\nclass MaybeCloseTestCase(unittest.TestCase):\n\n def test_maybe_close(self):\n f = Mock(close=Mock(return_value=None))\n with maybe_close(f):\n self.assertFalse(f.close.called)\n self.assertTrue(f.close.called)\n with maybe_close(1):\n pass\n\n\nclass IterFilesTestCase(unittest.TestCase):\n\n def test_iter_files(self):\n names = ['a/1.txt', 'a/2.txt', 'a/b/1.txt', 'a/b/2.txt', 'b/1.txt',\n 'b/2.txt', 'c.txt']\n with TemporaryDirectory() as tempdir:\n for name in names:\n f_path = os.path.join(tempdir, name)\n f_dir = os.path.split(f_path)[0]\n makedirs(f_dir, exist_ok=True)\n with open(f_path, 'wb') as f:\n f.write(b'')\n self.assertListEqual(names, sorted(iter_files(tempdir)))\n self.assertListEqual(names, sorted(iter_files(tempdir + '/a/../')))\n\n\n<code token>\n",
"<import token>\n<class token>\n\n\nclass CamelToUnderscoreTestCase(unittest.TestCase):\n <function token>\n <function token>\n\n\nclass NotSetTestCase(unittest.TestCase):\n\n def test_repr(self):\n self.assertEqual(repr(NOT_SET), 'NOT_SET')\n\n\nclass _CachedPropertyHelper(object):\n\n def __init__(self, value):\n self.value = value\n\n @cached_property('_cached_value')\n def cached_value(self):\n return self.value\n\n\nclass CachedPropertyTestCase(unittest.TestCase):\n\n def test_cached_property(self):\n o = _CachedPropertyHelper(0)\n self.assertFalse(hasattr(o, '_cached_value'))\n o.value = 123\n self.assertEqual(o.cached_value, 123)\n self.assertTrue(hasattr(o, '_cached_value'))\n self.assertEqual(o._cached_value, 123)\n o.value = 456\n self.assertEqual(o.cached_value, 123)\n self.assertEqual(o._cached_value, 123)\n\n def test_clear_cached_property(self):\n o = _CachedPropertyHelper(123)\n _ = o.cached_value\n clear_cached_property(o, '_cached_value')\n o.value = 456\n self.assertFalse(hasattr(o, '_cached_value'))\n self.assertEqual(o.cached_value, 456)\n self.assertEqual(o._cached_value, 456)\n\n\nclass MaybeCloseTestCase(unittest.TestCase):\n\n def test_maybe_close(self):\n f = Mock(close=Mock(return_value=None))\n with maybe_close(f):\n self.assertFalse(f.close.called)\n self.assertTrue(f.close.called)\n with maybe_close(1):\n pass\n\n\nclass IterFilesTestCase(unittest.TestCase):\n\n def test_iter_files(self):\n names = ['a/1.txt', 'a/2.txt', 'a/b/1.txt', 'a/b/2.txt', 'b/1.txt',\n 'b/2.txt', 'c.txt']\n with TemporaryDirectory() as tempdir:\n for name in names:\n f_path = os.path.join(tempdir, name)\n f_dir = os.path.split(f_path)[0]\n makedirs(f_dir, exist_ok=True)\n with open(f_path, 'wb') as f:\n f.write(b'')\n self.assertListEqual(names, sorted(iter_files(tempdir)))\n self.assertListEqual(names, sorted(iter_files(tempdir + '/a/../')))\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n\n\nclass NotSetTestCase(unittest.TestCase):\n\n def test_repr(self):\n self.assertEqual(repr(NOT_SET), 'NOT_SET')\n\n\nclass _CachedPropertyHelper(object):\n\n def __init__(self, value):\n self.value = value\n\n @cached_property('_cached_value')\n def cached_value(self):\n return self.value\n\n\nclass CachedPropertyTestCase(unittest.TestCase):\n\n def test_cached_property(self):\n o = _CachedPropertyHelper(0)\n self.assertFalse(hasattr(o, '_cached_value'))\n o.value = 123\n self.assertEqual(o.cached_value, 123)\n self.assertTrue(hasattr(o, '_cached_value'))\n self.assertEqual(o._cached_value, 123)\n o.value = 456\n self.assertEqual(o.cached_value, 123)\n self.assertEqual(o._cached_value, 123)\n\n def test_clear_cached_property(self):\n o = _CachedPropertyHelper(123)\n _ = o.cached_value\n clear_cached_property(o, '_cached_value')\n o.value = 456\n self.assertFalse(hasattr(o, '_cached_value'))\n self.assertEqual(o.cached_value, 456)\n self.assertEqual(o._cached_value, 456)\n\n\nclass MaybeCloseTestCase(unittest.TestCase):\n\n def test_maybe_close(self):\n f = Mock(close=Mock(return_value=None))\n with maybe_close(f):\n self.assertFalse(f.close.called)\n self.assertTrue(f.close.called)\n with maybe_close(1):\n pass\n\n\nclass IterFilesTestCase(unittest.TestCase):\n\n def test_iter_files(self):\n names = ['a/1.txt', 'a/2.txt', 'a/b/1.txt', 'a/b/2.txt', 'b/1.txt',\n 'b/2.txt', 'c.txt']\n with TemporaryDirectory() as tempdir:\n for name in names:\n f_path = os.path.join(tempdir, name)\n f_dir = os.path.split(f_path)[0]\n makedirs(f_dir, exist_ok=True)\n with open(f_path, 'wb') as f:\n f.write(b'')\n self.assertListEqual(names, sorted(iter_files(tempdir)))\n self.assertListEqual(names, sorted(iter_files(tempdir + '/a/../')))\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n\n\nclass NotSetTestCase(unittest.TestCase):\n <function token>\n\n\nclass _CachedPropertyHelper(object):\n\n def __init__(self, value):\n self.value = value\n\n @cached_property('_cached_value')\n def cached_value(self):\n return self.value\n\n\nclass CachedPropertyTestCase(unittest.TestCase):\n\n def test_cached_property(self):\n o = _CachedPropertyHelper(0)\n self.assertFalse(hasattr(o, '_cached_value'))\n o.value = 123\n self.assertEqual(o.cached_value, 123)\n self.assertTrue(hasattr(o, '_cached_value'))\n self.assertEqual(o._cached_value, 123)\n o.value = 456\n self.assertEqual(o.cached_value, 123)\n self.assertEqual(o._cached_value, 123)\n\n def test_clear_cached_property(self):\n o = _CachedPropertyHelper(123)\n _ = o.cached_value\n clear_cached_property(o, '_cached_value')\n o.value = 456\n self.assertFalse(hasattr(o, '_cached_value'))\n self.assertEqual(o.cached_value, 456)\n self.assertEqual(o._cached_value, 456)\n\n\nclass MaybeCloseTestCase(unittest.TestCase):\n\n def test_maybe_close(self):\n f = Mock(close=Mock(return_value=None))\n with maybe_close(f):\n self.assertFalse(f.close.called)\n self.assertTrue(f.close.called)\n with maybe_close(1):\n pass\n\n\nclass IterFilesTestCase(unittest.TestCase):\n\n def test_iter_files(self):\n names = ['a/1.txt', 'a/2.txt', 'a/b/1.txt', 'a/b/2.txt', 'b/1.txt',\n 'b/2.txt', 'c.txt']\n with TemporaryDirectory() as tempdir:\n for name in names:\n f_path = os.path.join(tempdir, name)\n f_dir = os.path.split(f_path)[0]\n makedirs(f_dir, exist_ok=True)\n with open(f_path, 'wb') as f:\n f.write(b'')\n self.assertListEqual(names, sorted(iter_files(tempdir)))\n self.assertListEqual(names, sorted(iter_files(tempdir + '/a/../')))\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass _CachedPropertyHelper(object):\n\n def __init__(self, value):\n self.value = value\n\n @cached_property('_cached_value')\n def cached_value(self):\n return self.value\n\n\nclass CachedPropertyTestCase(unittest.TestCase):\n\n def test_cached_property(self):\n o = _CachedPropertyHelper(0)\n self.assertFalse(hasattr(o, '_cached_value'))\n o.value = 123\n self.assertEqual(o.cached_value, 123)\n self.assertTrue(hasattr(o, '_cached_value'))\n self.assertEqual(o._cached_value, 123)\n o.value = 456\n self.assertEqual(o.cached_value, 123)\n self.assertEqual(o._cached_value, 123)\n\n def test_clear_cached_property(self):\n o = _CachedPropertyHelper(123)\n _ = o.cached_value\n clear_cached_property(o, '_cached_value')\n o.value = 456\n self.assertFalse(hasattr(o, '_cached_value'))\n self.assertEqual(o.cached_value, 456)\n self.assertEqual(o._cached_value, 456)\n\n\nclass MaybeCloseTestCase(unittest.TestCase):\n\n def test_maybe_close(self):\n f = Mock(close=Mock(return_value=None))\n with maybe_close(f):\n self.assertFalse(f.close.called)\n self.assertTrue(f.close.called)\n with maybe_close(1):\n pass\n\n\nclass IterFilesTestCase(unittest.TestCase):\n\n def test_iter_files(self):\n names = ['a/1.txt', 'a/2.txt', 'a/b/1.txt', 'a/b/2.txt', 'b/1.txt',\n 'b/2.txt', 'c.txt']\n with TemporaryDirectory() as tempdir:\n for name in names:\n f_path = os.path.join(tempdir, name)\n f_dir = os.path.split(f_path)[0]\n makedirs(f_dir, exist_ok=True)\n with open(f_path, 'wb') as f:\n f.write(b'')\n self.assertListEqual(names, sorted(iter_files(tempdir)))\n self.assertListEqual(names, sorted(iter_files(tempdir + '/a/../')))\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass _CachedPropertyHelper(object):\n <function token>\n\n @cached_property('_cached_value')\n def cached_value(self):\n return self.value\n\n\nclass CachedPropertyTestCase(unittest.TestCase):\n\n def test_cached_property(self):\n o = _CachedPropertyHelper(0)\n self.assertFalse(hasattr(o, '_cached_value'))\n o.value = 123\n self.assertEqual(o.cached_value, 123)\n self.assertTrue(hasattr(o, '_cached_value'))\n self.assertEqual(o._cached_value, 123)\n o.value = 456\n self.assertEqual(o.cached_value, 123)\n self.assertEqual(o._cached_value, 123)\n\n def test_clear_cached_property(self):\n o = _CachedPropertyHelper(123)\n _ = o.cached_value\n clear_cached_property(o, '_cached_value')\n o.value = 456\n self.assertFalse(hasattr(o, '_cached_value'))\n self.assertEqual(o.cached_value, 456)\n self.assertEqual(o._cached_value, 456)\n\n\nclass MaybeCloseTestCase(unittest.TestCase):\n\n def test_maybe_close(self):\n f = Mock(close=Mock(return_value=None))\n with maybe_close(f):\n self.assertFalse(f.close.called)\n self.assertTrue(f.close.called)\n with maybe_close(1):\n pass\n\n\nclass IterFilesTestCase(unittest.TestCase):\n\n def test_iter_files(self):\n names = ['a/1.txt', 'a/2.txt', 'a/b/1.txt', 'a/b/2.txt', 'b/1.txt',\n 'b/2.txt', 'c.txt']\n with TemporaryDirectory() as tempdir:\n for name in names:\n f_path = os.path.join(tempdir, name)\n f_dir = os.path.split(f_path)[0]\n makedirs(f_dir, exist_ok=True)\n with open(f_path, 'wb') as f:\n f.write(b'')\n self.assertListEqual(names, sorted(iter_files(tempdir)))\n self.assertListEqual(names, sorted(iter_files(tempdir + '/a/../')))\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass _CachedPropertyHelper(object):\n <function token>\n <function token>\n\n\nclass CachedPropertyTestCase(unittest.TestCase):\n\n def test_cached_property(self):\n o = _CachedPropertyHelper(0)\n self.assertFalse(hasattr(o, '_cached_value'))\n o.value = 123\n self.assertEqual(o.cached_value, 123)\n self.assertTrue(hasattr(o, '_cached_value'))\n self.assertEqual(o._cached_value, 123)\n o.value = 456\n self.assertEqual(o.cached_value, 123)\n self.assertEqual(o._cached_value, 123)\n\n def test_clear_cached_property(self):\n o = _CachedPropertyHelper(123)\n _ = o.cached_value\n clear_cached_property(o, '_cached_value')\n o.value = 456\n self.assertFalse(hasattr(o, '_cached_value'))\n self.assertEqual(o.cached_value, 456)\n self.assertEqual(o._cached_value, 456)\n\n\nclass MaybeCloseTestCase(unittest.TestCase):\n\n def test_maybe_close(self):\n f = Mock(close=Mock(return_value=None))\n with maybe_close(f):\n self.assertFalse(f.close.called)\n self.assertTrue(f.close.called)\n with maybe_close(1):\n pass\n\n\nclass IterFilesTestCase(unittest.TestCase):\n\n def test_iter_files(self):\n names = ['a/1.txt', 'a/2.txt', 'a/b/1.txt', 'a/b/2.txt', 'b/1.txt',\n 'b/2.txt', 'c.txt']\n with TemporaryDirectory() as tempdir:\n for name in names:\n f_path = os.path.join(tempdir, name)\n f_dir = os.path.split(f_path)[0]\n makedirs(f_dir, exist_ok=True)\n with open(f_path, 'wb') as f:\n f.write(b'')\n self.assertListEqual(names, sorted(iter_files(tempdir)))\n self.assertListEqual(names, sorted(iter_files(tempdir + '/a/../')))\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass CachedPropertyTestCase(unittest.TestCase):\n\n def test_cached_property(self):\n o = _CachedPropertyHelper(0)\n self.assertFalse(hasattr(o, '_cached_value'))\n o.value = 123\n self.assertEqual(o.cached_value, 123)\n self.assertTrue(hasattr(o, '_cached_value'))\n self.assertEqual(o._cached_value, 123)\n o.value = 456\n self.assertEqual(o.cached_value, 123)\n self.assertEqual(o._cached_value, 123)\n\n def test_clear_cached_property(self):\n o = _CachedPropertyHelper(123)\n _ = o.cached_value\n clear_cached_property(o, '_cached_value')\n o.value = 456\n self.assertFalse(hasattr(o, '_cached_value'))\n self.assertEqual(o.cached_value, 456)\n self.assertEqual(o._cached_value, 456)\n\n\nclass MaybeCloseTestCase(unittest.TestCase):\n\n def test_maybe_close(self):\n f = Mock(close=Mock(return_value=None))\n with maybe_close(f):\n self.assertFalse(f.close.called)\n self.assertTrue(f.close.called)\n with maybe_close(1):\n pass\n\n\nclass IterFilesTestCase(unittest.TestCase):\n\n def test_iter_files(self):\n names = ['a/1.txt', 'a/2.txt', 'a/b/1.txt', 'a/b/2.txt', 'b/1.txt',\n 'b/2.txt', 'c.txt']\n with TemporaryDirectory() as tempdir:\n for name in names:\n f_path = os.path.join(tempdir, name)\n f_dir = os.path.split(f_path)[0]\n makedirs(f_dir, exist_ok=True)\n with open(f_path, 'wb') as f:\n f.write(b'')\n self.assertListEqual(names, sorted(iter_files(tempdir)))\n self.assertListEqual(names, sorted(iter_files(tempdir + '/a/../')))\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass CachedPropertyTestCase(unittest.TestCase):\n <function token>\n\n def test_clear_cached_property(self):\n o = _CachedPropertyHelper(123)\n _ = o.cached_value\n clear_cached_property(o, '_cached_value')\n o.value = 456\n self.assertFalse(hasattr(o, '_cached_value'))\n self.assertEqual(o.cached_value, 456)\n self.assertEqual(o._cached_value, 456)\n\n\nclass MaybeCloseTestCase(unittest.TestCase):\n\n def test_maybe_close(self):\n f = Mock(close=Mock(return_value=None))\n with maybe_close(f):\n self.assertFalse(f.close.called)\n self.assertTrue(f.close.called)\n with maybe_close(1):\n pass\n\n\nclass IterFilesTestCase(unittest.TestCase):\n\n def test_iter_files(self):\n names = ['a/1.txt', 'a/2.txt', 'a/b/1.txt', 'a/b/2.txt', 'b/1.txt',\n 'b/2.txt', 'c.txt']\n with TemporaryDirectory() as tempdir:\n for name in names:\n f_path = os.path.join(tempdir, name)\n f_dir = os.path.split(f_path)[0]\n makedirs(f_dir, exist_ok=True)\n with open(f_path, 'wb') as f:\n f.write(b'')\n self.assertListEqual(names, sorted(iter_files(tempdir)))\n self.assertListEqual(names, sorted(iter_files(tempdir + '/a/../')))\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass CachedPropertyTestCase(unittest.TestCase):\n <function token>\n <function token>\n\n\nclass MaybeCloseTestCase(unittest.TestCase):\n\n def test_maybe_close(self):\n f = Mock(close=Mock(return_value=None))\n with maybe_close(f):\n self.assertFalse(f.close.called)\n self.assertTrue(f.close.called)\n with maybe_close(1):\n pass\n\n\nclass IterFilesTestCase(unittest.TestCase):\n\n def test_iter_files(self):\n names = ['a/1.txt', 'a/2.txt', 'a/b/1.txt', 'a/b/2.txt', 'b/1.txt',\n 'b/2.txt', 'c.txt']\n with TemporaryDirectory() as tempdir:\n for name in names:\n f_path = os.path.join(tempdir, name)\n f_dir = os.path.split(f_path)[0]\n makedirs(f_dir, exist_ok=True)\n with open(f_path, 'wb') as f:\n f.write(b'')\n self.assertListEqual(names, sorted(iter_files(tempdir)))\n self.assertListEqual(names, sorted(iter_files(tempdir + '/a/../')))\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass MaybeCloseTestCase(unittest.TestCase):\n\n def test_maybe_close(self):\n f = Mock(close=Mock(return_value=None))\n with maybe_close(f):\n self.assertFalse(f.close.called)\n self.assertTrue(f.close.called)\n with maybe_close(1):\n pass\n\n\nclass IterFilesTestCase(unittest.TestCase):\n\n def test_iter_files(self):\n names = ['a/1.txt', 'a/2.txt', 'a/b/1.txt', 'a/b/2.txt', 'b/1.txt',\n 'b/2.txt', 'c.txt']\n with TemporaryDirectory() as tempdir:\n for name in names:\n f_path = os.path.join(tempdir, name)\n f_dir = os.path.split(f_path)[0]\n makedirs(f_dir, exist_ok=True)\n with open(f_path, 'wb') as f:\n f.write(b'')\n self.assertListEqual(names, sorted(iter_files(tempdir)))\n self.assertListEqual(names, sorted(iter_files(tempdir + '/a/../')))\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass MaybeCloseTestCase(unittest.TestCase):\n <function token>\n\n\nclass IterFilesTestCase(unittest.TestCase):\n\n def test_iter_files(self):\n names = ['a/1.txt', 'a/2.txt', 'a/b/1.txt', 'a/b/2.txt', 'b/1.txt',\n 'b/2.txt', 'c.txt']\n with TemporaryDirectory() as tempdir:\n for name in names:\n f_path = os.path.join(tempdir, name)\n f_dir = os.path.split(f_path)[0]\n makedirs(f_dir, exist_ok=True)\n with open(f_path, 'wb') as f:\n f.write(b'')\n self.assertListEqual(names, sorted(iter_files(tempdir)))\n self.assertListEqual(names, sorted(iter_files(tempdir + '/a/../')))\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass IterFilesTestCase(unittest.TestCase):\n\n def test_iter_files(self):\n names = ['a/1.txt', 'a/2.txt', 'a/b/1.txt', 'a/b/2.txt', 'b/1.txt',\n 'b/2.txt', 'c.txt']\n with TemporaryDirectory() as tempdir:\n for name in names:\n f_path = os.path.join(tempdir, name)\n f_dir = os.path.split(f_path)[0]\n makedirs(f_dir, exist_ok=True)\n with open(f_path, 'wb') as f:\n f.write(b'')\n self.assertListEqual(names, sorted(iter_files(tempdir)))\n self.assertListEqual(names, sorted(iter_files(tempdir + '/a/../')))\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass IterFilesTestCase(unittest.TestCase):\n <function token>\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<code token>\n"
] | false |
886 |
feed412278d9e711e49ef209ece0876c1de4a873
|
# -*- coding: UTF-8 -*-
# File name: ukWorkingDays
# Created by JKChang
# 29/07/2020, 11:20
# Tag:
# Description:
from datetime import date,timedelta,datetime
from workalendar.europe import UnitedKingdom
cal = UnitedKingdom()
print(cal.holidays(2020))
def workingDate(start,end):
cal = UnitedKingdom()
res = []
delta = end - start
for i in range(delta.days +1):
day = start + timedelta(days=i)
if cal.is_working_day(day) or day.weekday() < 5:
res.append(day)
else:
pass
return res
start = datetime.today()
end = datetime(2020, 12, 23)
r = workingDate(start,end)
for d in r:
print(d.strftime('%d-%B-%Y'))
print('\n'*3)
|
[
"# -*- coding: UTF-8 -*-\n# File name: ukWorkingDays\n# Created by JKChang\n# 29/07/2020, 11:20\n# Tag:\n# Description:\n\nfrom datetime import date,timedelta,datetime\nfrom workalendar.europe import UnitedKingdom\n\ncal = UnitedKingdom()\nprint(cal.holidays(2020))\n\ndef workingDate(start,end):\n cal = UnitedKingdom()\n res = []\n delta = end - start\n for i in range(delta.days +1):\n day = start + timedelta(days=i)\n if cal.is_working_day(day) or day.weekday() < 5:\n res.append(day)\n else:\n pass\n return res\n\nstart = datetime.today()\nend = datetime(2020, 12, 23)\nr = workingDate(start,end)\nfor d in r:\n print(d.strftime('%d-%B-%Y'))\n print('\\n'*3)\n",
"from datetime import date, timedelta, datetime\nfrom workalendar.europe import UnitedKingdom\ncal = UnitedKingdom()\nprint(cal.holidays(2020))\n\n\ndef workingDate(start, end):\n cal = UnitedKingdom()\n res = []\n delta = end - start\n for i in range(delta.days + 1):\n day = start + timedelta(days=i)\n if cal.is_working_day(day) or day.weekday() < 5:\n res.append(day)\n else:\n pass\n return res\n\n\nstart = datetime.today()\nend = datetime(2020, 12, 23)\nr = workingDate(start, end)\nfor d in r:\n print(d.strftime('%d-%B-%Y'))\n print('\\n' * 3)\n",
"<import token>\ncal = UnitedKingdom()\nprint(cal.holidays(2020))\n\n\ndef workingDate(start, end):\n cal = UnitedKingdom()\n res = []\n delta = end - start\n for i in range(delta.days + 1):\n day = start + timedelta(days=i)\n if cal.is_working_day(day) or day.weekday() < 5:\n res.append(day)\n else:\n pass\n return res\n\n\nstart = datetime.today()\nend = datetime(2020, 12, 23)\nr = workingDate(start, end)\nfor d in r:\n print(d.strftime('%d-%B-%Y'))\n print('\\n' * 3)\n",
"<import token>\n<assignment token>\nprint(cal.holidays(2020))\n\n\ndef workingDate(start, end):\n cal = UnitedKingdom()\n res = []\n delta = end - start\n for i in range(delta.days + 1):\n day = start + timedelta(days=i)\n if cal.is_working_day(day) or day.weekday() < 5:\n res.append(day)\n else:\n pass\n return res\n\n\n<assignment token>\nfor d in r:\n print(d.strftime('%d-%B-%Y'))\n print('\\n' * 3)\n",
"<import token>\n<assignment token>\n<code token>\n\n\ndef workingDate(start, end):\n cal = UnitedKingdom()\n res = []\n delta = end - start\n for i in range(delta.days + 1):\n day = start + timedelta(days=i)\n if cal.is_working_day(day) or day.weekday() < 5:\n res.append(day)\n else:\n pass\n return res\n\n\n<assignment token>\n<code token>\n",
"<import token>\n<assignment token>\n<code token>\n<function token>\n<assignment token>\n<code token>\n"
] | false |
887 |
662fc9d64b9046180cf70ce4b26ac2b9665dba0e
|
# -*- coding=UTF-8 -*-
'''
Created on 20180127
@author: Harry
'''
import datetime
# today = datetime.date.today()
# weekday = today.weekday()
#
# if weekday == 0:
# print "周一"
# else:
# print "other days"
nowtime=datetime.datetime.now()
detaday = datetime.timedelta(days=-1)
da_days= nowtime + detaday
print da_days.strftime('%Y-%m-%d')
|
[
"# -*- coding=UTF-8 -*-\n\n'''\nCreated on 20180127\n\n@author: Harry\n'''\n\nimport datetime\n \n# today = datetime.date.today() \n# weekday = today.weekday() \n# \n# if weekday == 0:\n# print \"周一\"\n# else:\n# print \"other days\"\n\nnowtime=datetime.datetime.now() \ndetaday = datetime.timedelta(days=-1)\nda_days= nowtime + detaday\n\nprint da_days.strftime('%Y-%m-%d')\n\n \n "
] | true |
888 |
ccee0e3c47fd3809e0670be24aaa6fd0a9bad3bc
|
# -*- coding: utf-8 -*-
class Library(object):
def __init__(self, backend):
self._backend = backend
@property
def cache(self):
return self._backend.cache
def cache_key(self, key):
return self._backend.cache_key(key)
def get_url(self, track):
raise NotImplementedError()
|
[
"# -*- coding: utf-8 -*-\n\n\nclass Library(object):\n\n def __init__(self, backend):\n self._backend = backend\n\n @property\n def cache(self):\n return self._backend.cache\n\n def cache_key(self, key):\n return self._backend.cache_key(key)\n\n def get_url(self, track):\n raise NotImplementedError()\n",
"class Library(object):\n\n def __init__(self, backend):\n self._backend = backend\n\n @property\n def cache(self):\n return self._backend.cache\n\n def cache_key(self, key):\n return self._backend.cache_key(key)\n\n def get_url(self, track):\n raise NotImplementedError()\n",
"class Library(object):\n\n def __init__(self, backend):\n self._backend = backend\n <function token>\n\n def cache_key(self, key):\n return self._backend.cache_key(key)\n\n def get_url(self, track):\n raise NotImplementedError()\n",
"class Library(object):\n\n def __init__(self, backend):\n self._backend = backend\n <function token>\n <function token>\n\n def get_url(self, track):\n raise NotImplementedError()\n",
"class Library(object):\n <function token>\n <function token>\n <function token>\n\n def get_url(self, track):\n raise NotImplementedError()\n",
"class Library(object):\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<class token>\n"
] | false |
889 |
f799fdfde537bbe8f6c49a5e1a15cf6f910a0d45
|
#!/usr/bin/python3
"""Unittest for max_integer([..])
"""
import unittest
max_integer = __import__('6-max_integer').max_integer
class TestMaxInteger(unittest.TestCase):
""" Interactive tests """
def test_max(self):
"""Tests max_integer"""
self.assertEqual(max_integer([1, 2, 3]), 3)
self.assertEqual(max_integer([6, 2, 6]), 6)
self.assertEqual(max_integer([0, 0, 0]), 0)
self.assertEqual(max_integer([1, 5, 3]), 5)
self.assertEqual(max_integer([1, 2, -3]), 2)
self.assertEqual(max_integer([-1, -2, -3]), -1)
self.assertEqual(max_integer([2]), 2)
self.assertEqual(max_integer([]), None)
if __name__ == '__main__':
unittest.main()
|
[
"#!/usr/bin/python3\n\"\"\"Unittest for max_integer([..])\n\"\"\"\nimport unittest\nmax_integer = __import__('6-max_integer').max_integer\n\n\nclass TestMaxInteger(unittest.TestCase):\n \"\"\" Interactive tests \"\"\"\n def test_max(self):\n \"\"\"Tests max_integer\"\"\"\n self.assertEqual(max_integer([1, 2, 3]), 3)\n self.assertEqual(max_integer([6, 2, 6]), 6)\n self.assertEqual(max_integer([0, 0, 0]), 0)\n self.assertEqual(max_integer([1, 5, 3]), 5)\n self.assertEqual(max_integer([1, 2, -3]), 2)\n self.assertEqual(max_integer([-1, -2, -3]), -1)\n self.assertEqual(max_integer([2]), 2)\n self.assertEqual(max_integer([]), None)\n\nif __name__ == '__main__':\n unittest.main()\n",
"<docstring token>\nimport unittest\nmax_integer = __import__('6-max_integer').max_integer\n\n\nclass TestMaxInteger(unittest.TestCase):\n \"\"\" Interactive tests \"\"\"\n\n def test_max(self):\n \"\"\"Tests max_integer\"\"\"\n self.assertEqual(max_integer([1, 2, 3]), 3)\n self.assertEqual(max_integer([6, 2, 6]), 6)\n self.assertEqual(max_integer([0, 0, 0]), 0)\n self.assertEqual(max_integer([1, 5, 3]), 5)\n self.assertEqual(max_integer([1, 2, -3]), 2)\n self.assertEqual(max_integer([-1, -2, -3]), -1)\n self.assertEqual(max_integer([2]), 2)\n self.assertEqual(max_integer([]), None)\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"<docstring token>\n<import token>\nmax_integer = __import__('6-max_integer').max_integer\n\n\nclass TestMaxInteger(unittest.TestCase):\n \"\"\" Interactive tests \"\"\"\n\n def test_max(self):\n \"\"\"Tests max_integer\"\"\"\n self.assertEqual(max_integer([1, 2, 3]), 3)\n self.assertEqual(max_integer([6, 2, 6]), 6)\n self.assertEqual(max_integer([0, 0, 0]), 0)\n self.assertEqual(max_integer([1, 5, 3]), 5)\n self.assertEqual(max_integer([1, 2, -3]), 2)\n self.assertEqual(max_integer([-1, -2, -3]), -1)\n self.assertEqual(max_integer([2]), 2)\n self.assertEqual(max_integer([]), None)\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\nclass TestMaxInteger(unittest.TestCase):\n \"\"\" Interactive tests \"\"\"\n\n def test_max(self):\n \"\"\"Tests max_integer\"\"\"\n self.assertEqual(max_integer([1, 2, 3]), 3)\n self.assertEqual(max_integer([6, 2, 6]), 6)\n self.assertEqual(max_integer([0, 0, 0]), 0)\n self.assertEqual(max_integer([1, 5, 3]), 5)\n self.assertEqual(max_integer([1, 2, -3]), 2)\n self.assertEqual(max_integer([-1, -2, -3]), -1)\n self.assertEqual(max_integer([2]), 2)\n self.assertEqual(max_integer([]), None)\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\nclass TestMaxInteger(unittest.TestCase):\n \"\"\" Interactive tests \"\"\"\n\n def test_max(self):\n \"\"\"Tests max_integer\"\"\"\n self.assertEqual(max_integer([1, 2, 3]), 3)\n self.assertEqual(max_integer([6, 2, 6]), 6)\n self.assertEqual(max_integer([0, 0, 0]), 0)\n self.assertEqual(max_integer([1, 5, 3]), 5)\n self.assertEqual(max_integer([1, 2, -3]), 2)\n self.assertEqual(max_integer([-1, -2, -3]), -1)\n self.assertEqual(max_integer([2]), 2)\n self.assertEqual(max_integer([]), None)\n\n\n<code token>\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\nclass TestMaxInteger(unittest.TestCase):\n <docstring token>\n\n def test_max(self):\n \"\"\"Tests max_integer\"\"\"\n self.assertEqual(max_integer([1, 2, 3]), 3)\n self.assertEqual(max_integer([6, 2, 6]), 6)\n self.assertEqual(max_integer([0, 0, 0]), 0)\n self.assertEqual(max_integer([1, 5, 3]), 5)\n self.assertEqual(max_integer([1, 2, -3]), 2)\n self.assertEqual(max_integer([-1, -2, -3]), -1)\n self.assertEqual(max_integer([2]), 2)\n self.assertEqual(max_integer([]), None)\n\n\n<code token>\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\nclass TestMaxInteger(unittest.TestCase):\n <docstring token>\n <function token>\n\n\n<code token>\n",
"<docstring token>\n<import token>\n<assignment token>\n<class token>\n<code token>\n"
] | false |
890 |
edd70f55e76418911d304d6eb41a6d2a93005a58
|
from api import url, key, opposite
import requests
import json
import time
import os
from miner import mine
from cpu import *
class Player:
def __init__(self):
data = self._get_status()
time.sleep(data['cooldown'])
self.name = data['name']
self.cooldown = data['cooldown']
self.encumbrance = data['encumbrance']
self.strength = data['strength']
self.speed = data['speed']
self.gold = data['gold']
self.bodywear = data['bodywear']
self.footwear = data['footwear']
self.inventory = data['inventory']
self.abilities = data['abilities']
self.status = data['status']
self.has_mined = data['has_mined']
self.errors = data['errors']
self.messages = data['messages']
self.snitches = data['snitches'] if data['snitches'] else 0
self.current_room = self.check_room()
self.world = "dark" if self.current_room['room_id'] > 499 else "light"
self.map = self._read_file('map.txt')
self.graph = self._read_file('graph.txt')
def _get_status(self):
r = requests.post(f"{url}/api/adv/status/",
headers={'Authorization': f"Token {key}", "Content-Type": "application/json"})
return r.json()
def _read_file(self, filepath):
if self.world == 'dark':
filepath = 'dark_' + filepath
if not os.path.exists(filepath):
f = open(filepath, 'w+')
room = self.current_room
if 'graph' in filepath:
room = {room['room_id']: {d: '?' for d in room['exits']}}
self._write_file(filepath, {self.current_room['room_id']: room})
with open(filepath, 'r') as f:
data = json.load(f)
return data
def _write_file(self, filepath, data):
if self.world == 'dark' and 'dark' not in filepath:
filepath = 'dark_' + filepath
with open(filepath, 'w+') as outfile:
json.dump(data, outfile)
def check_room(self):
r = requests.get(f"{url}/api/adv/init/",
headers={'Authorization': f"Token {key}"})
data = r.json()
if 'players' in data:
del data['players']
return data
def check_self(self, cause=None):
data = self._get_status()
cleaned = {**data} # How cool is the spread operator!
cleaned['status'].append("Glasowyn's hands stand Empty and Effervescent, see them filled.") if len(
cleaned['status']) < 1 else None
cleaned["world"] = self.world
cut = ['has_mined', 'errors', ]
for k in cut:
del cleaned[k]
if cause == "item pick up":
ret = f" You are now held down by the weight of {cleaned['encumbrance']} Stones.\n Your Experience and equipment Grant you the ability to\n carry {cleaned['strength']} stones before you need to take longer rests.\n Your bag now carries {cleaned['inventory']}"
print(ret + f"\n Your ghost seems to have the space to carry an additional item if you would like" if "carry" in cleaned['abilities'] and len(
cleaned['status']) else ret)
else:
print('\n'+"*"*22+' '+"Your Current State"+' '+"*"*22)
for item in cleaned.items():
print(f"{item[0]}: {item[1]}")
print("*"*64+'\n')
self.name = data['name']
self.cooldown = data['cooldown']
self.encumbrance = data['encumbrance']
self.strength = data['strength']
self.speed = data['speed']
self.gold = data['gold']
self.bodywear = data['bodywear']
self.footwear = data['footwear']
self.inventory = data['inventory']
self.abilities = data['abilities']
self.status = data['status']
self.has_mined = data['has_mined']
self.errors = data['errors']
self.messages = data['messages']
self.snitches = data['snitches'] if data['snitches'] else 0
self.map = self._read_file('map.txt')
self.graph = self._read_file('graph.txt')
def dash(self, direction, num_rooms, room_ids):
if "dash" not in self.abilities:
print("Error! You can't dash yet!")
return
time.sleep(self.cooldown)
curr_id = self.current_room['room_id']
print("\n======================================")
print(f"Dashing {direction} from room {curr_id}...")
json = {"direction": direction,
"num_rooms": num_rooms, "next_room_ids": room_ids}
r = requests.post(f"{url}/api/adv/dash/", headers={
'Authorization': f"Token {key}", "Content-Type": "application/json"}, json=json)
next_room = r.json()
if 'players' in next_room:
del next_room['players']
next_id = next_room['room_id']
# update map with room info
self.map[next_id] = next_room
self._write_file('map.txt', self.map)
# change current room and update cooldown
self.current_room = next_room
self.cooldown = self.current_room['cooldown']
if self.world == 'dark' and 'golden snitch' in next_room['items']:
try:
self.pick_up_loot('golden snitch')
except:
print("Somebody already got that snitch!")
elif self.world == 'light' and len(next_room['items']):
for item in next_room['items']:
self.pick_up_loot(item)
for message in next_room['messages']:
print(f"{message}")
print(f"Now the player is in {self.current_room['room_id']}")
print(f"Cooldown before next action: {self.cooldown} seconds")
print("======================================\n")
def travel(self, direction, method="move"):
time.sleep(self.cooldown)
curr_id = self.current_room['room_id']
print("\n======================================")
if "fly" in self.abilities and self.map[str(curr_id)]['terrain'] in ['MOUNTAIN', 'NORMAL']:
method = "fly"
print(f"Flying {direction} from room {curr_id}...")
else:
print(f"Walking {direction} from room {curr_id}...")
if direction not in self.graph[str(curr_id)]:
print("Error! Not a valid direction from the current room")
else:
json = {"direction": direction}
if self.graph[str(curr_id)][direction] != "?":
json['next_room_id'] = str(self.graph[str(curr_id)][direction])
next_room = requests.post(f"{url}/api/adv/{method}/", headers={
'Authorization': f"Token {key}", "Content-Type": "application/json"}, json=json).json()
# change current room and update cooldown
self.current_room = next_room
self.cooldown = self.current_room['cooldown']
if self.world != 'dark':
# Code for looting any items in the room if the space is available
if len(next_room['items']) > 0 and self.encumbrance < self.strength:
for item in next_room['items']:
time.sleep(next_room['cooldown'])
self.pick_up_loot(item)
else:
if 'golden snitch' in next_room['items']:
self.pick_up_loot('golden snitch')
if 'players' in next_room:
del next_room['players']
next_id = next_room['room_id']
# add to graph and map, in addition to making graph connections
if str(next_id) not in self.graph:
print(f"New room! # {next_id}")
self.graph[str(next_id)] = {
e: '?' for e in next_room['exits']}
# make graph connections and update graph
self.graph[str(curr_id)][direction] = next_id
self.graph[str(next_id)][opposite[direction]] = curr_id
self._write_file('graph.txt', self.graph)
# update map with room info
self.map[next_id] = next_room
self._write_file('map.txt', self.map)
for message in next_room['messages']:
print(f"{message}")
print(f"Now the player is in {self.current_room['room_id']}")
print(f"Cooldown before next action: {self.cooldown} seconds")
if len(self.graph) < 500:
print(
f"Total number of rooms explored so far: {len(self.graph)}")
print("======================================\n")
def get_coin(self):
time.sleep(self.cooldown)
data = mine()
self.cooldown = data['cooldown']
if len(data['errors']) > 0:
self.get_coin()
def pick_up_loot(self, item):
print(f"Looting {item}")
json = {"name": item}
if self.encumbrance < self.strength:
time.sleep(self.cooldown)
req = requests.post(f"{url}/api/adv/take/", headers={
'Authorization': f"Token {key}", "Content-Type": "application/json"}, json=json).json()
self.cooldown = req['cooldown']
time.sleep(self.cooldown)
self.check_self("item pick up") if self.world == 'light' else print(' Success!\n '+req['messages'][0] if len(req['messages']) > 0 else print(
" Oh NO!\n just as quickly as you arrived, the Golden Snitch disappeared to the next room and out of grasp!"))
else:
if "carry" in self.abilities:
if len(self.status) != 0:
print(
"It seems your Bag is full and Glasowyn is already carring something!")
else:
req = requests.post(f"{url}/api/adv/carry/", headers={
'Authorization': f"Token {key}", "Content-Type": "application/json"}, json=json).json()
self.cooldown = req['cooldown']
print(req)
else:
print("Your Bag is full!")
def drop_loot(self, item):
time.sleep(self.cooldown)
json = {"name": item}
req = requests.post(f"{url}/api/adv/drop/", headers={
'Authorization': f"Token {key}", "Content-Type": "application/json"}, json=json).json()
time.sleep(req['cooldown'])
self.check_self()
def buy_name(self, name):
time.sleep(self.cooldown)
json = {"name": name}
req = requests.post(f"{url}/api/adv/change_name/", headers={
'Authorization': f"Token {key}", "Content-Type": "application/json"}, json=json).json()
print(req)
time.sleep(req['cooldown'])
json['confirm'] = "aye"
r1_conf = requests.post(f"{url}/api/adv/change_name/", headers={
'Authorization': f"Token {key}", "Content-Type": "application/json"}, json=json).json()
print(r1_conf)
time.sleep(r1_conf['cooldown'])
self.check_self()
def examine(self, item):
time.sleep(self.cooldown)
json = {"name": item}
req = requests.post(f"{url}/api/adv/examine/", headers={
'Authorization': f"Token {key}", "Content-Type": "application/json"}, json=json).json()
self.cooldown = req['cooldown']
if item == "WELL": # Examining well gives binary code to be deciphered for next coin location
if os.path.exists("hint.txt"):
os.remove("hint.txt")
desc = req['description']
instructions = desc.split('\n')
for line in instructions[2:]:
with open("hint.txt", "a") as f:
f.write(f"{line}\n")
cpu = CPU()
cpu.load('hint.txt')
cpu.run()
# clean up after itself and remove the hint file after used (new one will be made for future hints anyway)
if os.path.exists("hint.txt"):
os.remove("hint.txt")
# full message for light is "Mine your coin in room ###"
# but message for dark well is "Find your snitch in room ###"
limiter = 23 if self.world == 'light' else 24
return cpu.hint[limiter:]
else:
print(req['description'])
def pray(self):
time.sleep(self.cooldown)
req = requests.post(f"{url}/api/adv/pray/", headers={
'Authorization': f"Token {key}", "Content-Type": "application/json"}).json()
print(req)
time.sleep(req['cooldown'])
self.check_self()
def wear(self, item):
time.sleep(self.cooldown)
json = {"name": item}
req = requests.post(f"{url}/api/adv/wear/", headers={
'Authorization': f"Token {key}", "Content-Type": "application/json"}, json=json).json()
self.cooldown = req['cooldown']
time.sleep(self.cooldown)
self.check_self()
def check_balance(self):
time.sleep(self.cooldown)
req = requests.get(f"{url}/api/bc/get_balance/", headers={
'Authorization': f"Token {key}"}).json()
self.coins = float(req['messages'][0].split(' ')[5])
self.cooldown = req['cooldown']
print(f"\n{req['messages'][0]}\n")
def transform_coin(self, item):
time.sleep(self.cooldown)
self.check_balance()
json = {"name": item}
if self.coins > 0 and item in self.inventory:
time.sleep(self.cooldown)
req = requests.post(f"{url}/api/adv/transmogrify/", headers={
'Authorization': f"Token {key}", "Content-Type": "application/json"}, json=json).json()
print(req)
self.cooldown = req['cooldown']
for item in req['items']:
self.pick_up_loot(item)
def warp(self):
if "warp" in self.abilities:
time.sleep(self.cooldown)
req = requests.post(f"{url}/api/adv/warp/", headers={
'Authorization': f"Token {key}", "Content-Type": "application/json"}).json()
print(req['messages'][0])
self.cooldown = req['cooldown']
if self.world == 'light':
self.world = 'dark'
else:
self.world = 'light'
self.current_room = req
time.sleep(self.cooldown)
self.check_self()
if req['room_id'] not in self.graph:
# Just warped to a previously unknown room, add it to graph and map
g = self.graph
g[req['room_id']] = {d: '?' for d in req['exits']}
self._write_file('graph.txt', g)
m = self.map
m[req['room_id']] = req
self._write_file('map.txt', m)
else:
print("You do not have the warp ability yet!")
|
[
"from api import url, key, opposite\nimport requests\nimport json\nimport time\nimport os\nfrom miner import mine\nfrom cpu import *\n\n\nclass Player:\n def __init__(self):\n data = self._get_status()\n time.sleep(data['cooldown'])\n self.name = data['name']\n self.cooldown = data['cooldown']\n self.encumbrance = data['encumbrance']\n self.strength = data['strength']\n self.speed = data['speed']\n self.gold = data['gold']\n self.bodywear = data['bodywear']\n self.footwear = data['footwear']\n self.inventory = data['inventory']\n self.abilities = data['abilities']\n self.status = data['status']\n self.has_mined = data['has_mined']\n self.errors = data['errors']\n self.messages = data['messages']\n self.snitches = data['snitches'] if data['snitches'] else 0\n self.current_room = self.check_room()\n self.world = \"dark\" if self.current_room['room_id'] > 499 else \"light\"\n self.map = self._read_file('map.txt')\n self.graph = self._read_file('graph.txt')\n\n def _get_status(self):\n r = requests.post(f\"{url}/api/adv/status/\",\n headers={'Authorization': f\"Token {key}\", \"Content-Type\": \"application/json\"})\n return r.json()\n\n def _read_file(self, filepath):\n if self.world == 'dark':\n filepath = 'dark_' + filepath\n if not os.path.exists(filepath):\n f = open(filepath, 'w+')\n room = self.current_room\n if 'graph' in filepath:\n room = {room['room_id']: {d: '?' for d in room['exits']}}\n\n self._write_file(filepath, {self.current_room['room_id']: room})\n\n with open(filepath, 'r') as f:\n data = json.load(f)\n return data\n\n def _write_file(self, filepath, data):\n if self.world == 'dark' and 'dark' not in filepath:\n filepath = 'dark_' + filepath\n with open(filepath, 'w+') as outfile:\n json.dump(data, outfile)\n\n def check_room(self):\n r = requests.get(f\"{url}/api/adv/init/\",\n headers={'Authorization': f\"Token {key}\"})\n data = r.json()\n if 'players' in data:\n del data['players']\n return data\n\n def check_self(self, cause=None):\n data = self._get_status()\n cleaned = {**data} # How cool is the spread operator!\n cleaned['status'].append(\"Glasowyn's hands stand Empty and Effervescent, see them filled.\") if len(\n cleaned['status']) < 1 else None\n cleaned[\"world\"] = self.world\n cut = ['has_mined', 'errors', ]\n for k in cut:\n del cleaned[k]\n if cause == \"item pick up\":\n ret = f\" You are now held down by the weight of {cleaned['encumbrance']} Stones.\\n Your Experience and equipment Grant you the ability to\\n carry {cleaned['strength']} stones before you need to take longer rests.\\n Your bag now carries {cleaned['inventory']}\"\n\n print(ret + f\"\\n Your ghost seems to have the space to carry an additional item if you would like\" if \"carry\" in cleaned['abilities'] and len(\n cleaned['status']) else ret)\n else:\n print('\\n'+\"*\"*22+' '+\"Your Current State\"+' '+\"*\"*22)\n for item in cleaned.items():\n print(f\"{item[0]}: {item[1]}\")\n print(\"*\"*64+'\\n')\n\n self.name = data['name']\n self.cooldown = data['cooldown']\n self.encumbrance = data['encumbrance']\n self.strength = data['strength']\n self.speed = data['speed']\n self.gold = data['gold']\n self.bodywear = data['bodywear']\n self.footwear = data['footwear']\n self.inventory = data['inventory']\n self.abilities = data['abilities']\n self.status = data['status']\n self.has_mined = data['has_mined']\n self.errors = data['errors']\n self.messages = data['messages']\n self.snitches = data['snitches'] if data['snitches'] else 0\n self.map = self._read_file('map.txt')\n self.graph = self._read_file('graph.txt')\n\n def dash(self, direction, num_rooms, room_ids):\n if \"dash\" not in self.abilities:\n print(\"Error! You can't dash yet!\")\n return\n time.sleep(self.cooldown)\n curr_id = self.current_room['room_id']\n print(\"\\n======================================\")\n print(f\"Dashing {direction} from room {curr_id}...\")\n\n json = {\"direction\": direction,\n \"num_rooms\": num_rooms, \"next_room_ids\": room_ids}\n r = requests.post(f\"{url}/api/adv/dash/\", headers={\n 'Authorization': f\"Token {key}\", \"Content-Type\": \"application/json\"}, json=json)\n next_room = r.json()\n if 'players' in next_room:\n del next_room['players']\n next_id = next_room['room_id']\n\n # update map with room info\n self.map[next_id] = next_room\n self._write_file('map.txt', self.map)\n\n # change current room and update cooldown\n self.current_room = next_room\n self.cooldown = self.current_room['cooldown']\n\n if self.world == 'dark' and 'golden snitch' in next_room['items']:\n try:\n self.pick_up_loot('golden snitch')\n except:\n print(\"Somebody already got that snitch!\")\n elif self.world == 'light' and len(next_room['items']):\n for item in next_room['items']:\n self.pick_up_loot(item)\n\n for message in next_room['messages']:\n print(f\"{message}\")\n\n print(f\"Now the player is in {self.current_room['room_id']}\")\n print(f\"Cooldown before next action: {self.cooldown} seconds\")\n print(\"======================================\\n\")\n\n def travel(self, direction, method=\"move\"):\n time.sleep(self.cooldown)\n curr_id = self.current_room['room_id']\n\n print(\"\\n======================================\")\n if \"fly\" in self.abilities and self.map[str(curr_id)]['terrain'] in ['MOUNTAIN', 'NORMAL']:\n method = \"fly\"\n print(f\"Flying {direction} from room {curr_id}...\")\n else:\n print(f\"Walking {direction} from room {curr_id}...\")\n\n if direction not in self.graph[str(curr_id)]:\n print(\"Error! Not a valid direction from the current room\")\n else:\n json = {\"direction\": direction}\n if self.graph[str(curr_id)][direction] != \"?\":\n json['next_room_id'] = str(self.graph[str(curr_id)][direction])\n next_room = requests.post(f\"{url}/api/adv/{method}/\", headers={\n 'Authorization': f\"Token {key}\", \"Content-Type\": \"application/json\"}, json=json).json()\n\n # change current room and update cooldown\n self.current_room = next_room\n self.cooldown = self.current_room['cooldown']\n\n if self.world != 'dark':\n # Code for looting any items in the room if the space is available\n if len(next_room['items']) > 0 and self.encumbrance < self.strength:\n for item in next_room['items']:\n time.sleep(next_room['cooldown'])\n self.pick_up_loot(item)\n else:\n if 'golden snitch' in next_room['items']:\n self.pick_up_loot('golden snitch')\n\n if 'players' in next_room:\n del next_room['players']\n next_id = next_room['room_id']\n\n # add to graph and map, in addition to making graph connections\n if str(next_id) not in self.graph:\n print(f\"New room! # {next_id}\")\n self.graph[str(next_id)] = {\n e: '?' for e in next_room['exits']}\n\n # make graph connections and update graph\n self.graph[str(curr_id)][direction] = next_id\n self.graph[str(next_id)][opposite[direction]] = curr_id\n self._write_file('graph.txt', self.graph)\n\n # update map with room info\n self.map[next_id] = next_room\n self._write_file('map.txt', self.map)\n\n for message in next_room['messages']:\n print(f\"{message}\")\n\n print(f\"Now the player is in {self.current_room['room_id']}\")\n print(f\"Cooldown before next action: {self.cooldown} seconds\")\n if len(self.graph) < 500:\n print(\n f\"Total number of rooms explored so far: {len(self.graph)}\")\n print(\"======================================\\n\")\n\n def get_coin(self):\n time.sleep(self.cooldown)\n data = mine()\n self.cooldown = data['cooldown']\n if len(data['errors']) > 0:\n self.get_coin()\n\n def pick_up_loot(self, item):\n print(f\"Looting {item}\")\n json = {\"name\": item}\n if self.encumbrance < self.strength:\n time.sleep(self.cooldown)\n req = requests.post(f\"{url}/api/adv/take/\", headers={\n 'Authorization': f\"Token {key}\", \"Content-Type\": \"application/json\"}, json=json).json()\n self.cooldown = req['cooldown']\n time.sleep(self.cooldown)\n self.check_self(\"item pick up\") if self.world == 'light' else print(' Success!\\n '+req['messages'][0] if len(req['messages']) > 0 else print(\n \" Oh NO!\\n just as quickly as you arrived, the Golden Snitch disappeared to the next room and out of grasp!\"))\n else:\n if \"carry\" in self.abilities:\n if len(self.status) != 0:\n print(\n \"It seems your Bag is full and Glasowyn is already carring something!\")\n else:\n req = requests.post(f\"{url}/api/adv/carry/\", headers={\n 'Authorization': f\"Token {key}\", \"Content-Type\": \"application/json\"}, json=json).json()\n self.cooldown = req['cooldown']\n print(req)\n else:\n print(\"Your Bag is full!\")\n\n def drop_loot(self, item):\n time.sleep(self.cooldown)\n json = {\"name\": item}\n req = requests.post(f\"{url}/api/adv/drop/\", headers={\n 'Authorization': f\"Token {key}\", \"Content-Type\": \"application/json\"}, json=json).json()\n time.sleep(req['cooldown'])\n self.check_self()\n\n def buy_name(self, name):\n time.sleep(self.cooldown)\n json = {\"name\": name}\n req = requests.post(f\"{url}/api/adv/change_name/\", headers={\n 'Authorization': f\"Token {key}\", \"Content-Type\": \"application/json\"}, json=json).json()\n print(req)\n\n time.sleep(req['cooldown'])\n\n json['confirm'] = \"aye\"\n r1_conf = requests.post(f\"{url}/api/adv/change_name/\", headers={\n 'Authorization': f\"Token {key}\", \"Content-Type\": \"application/json\"}, json=json).json()\n print(r1_conf)\n time.sleep(r1_conf['cooldown'])\n self.check_self()\n\n def examine(self, item):\n time.sleep(self.cooldown)\n json = {\"name\": item}\n req = requests.post(f\"{url}/api/adv/examine/\", headers={\n 'Authorization': f\"Token {key}\", \"Content-Type\": \"application/json\"}, json=json).json()\n self.cooldown = req['cooldown']\n\n if item == \"WELL\": # Examining well gives binary code to be deciphered for next coin location\n if os.path.exists(\"hint.txt\"):\n os.remove(\"hint.txt\")\n desc = req['description']\n instructions = desc.split('\\n')\n for line in instructions[2:]:\n with open(\"hint.txt\", \"a\") as f:\n f.write(f\"{line}\\n\")\n\n cpu = CPU()\n cpu.load('hint.txt')\n cpu.run()\n # clean up after itself and remove the hint file after used (new one will be made for future hints anyway)\n if os.path.exists(\"hint.txt\"):\n os.remove(\"hint.txt\")\n # full message for light is \"Mine your coin in room ###\"\n # but message for dark well is \"Find your snitch in room ###\"\n limiter = 23 if self.world == 'light' else 24\n return cpu.hint[limiter:]\n else:\n print(req['description'])\n\n def pray(self):\n time.sleep(self.cooldown)\n req = requests.post(f\"{url}/api/adv/pray/\", headers={\n 'Authorization': f\"Token {key}\", \"Content-Type\": \"application/json\"}).json()\n print(req)\n time.sleep(req['cooldown'])\n self.check_self()\n\n def wear(self, item):\n time.sleep(self.cooldown)\n json = {\"name\": item}\n req = requests.post(f\"{url}/api/adv/wear/\", headers={\n 'Authorization': f\"Token {key}\", \"Content-Type\": \"application/json\"}, json=json).json()\n\n self.cooldown = req['cooldown']\n time.sleep(self.cooldown)\n self.check_self()\n\n def check_balance(self):\n time.sleep(self.cooldown)\n req = requests.get(f\"{url}/api/bc/get_balance/\", headers={\n 'Authorization': f\"Token {key}\"}).json()\n self.coins = float(req['messages'][0].split(' ')[5])\n self.cooldown = req['cooldown']\n print(f\"\\n{req['messages'][0]}\\n\")\n\n def transform_coin(self, item):\n time.sleep(self.cooldown)\n self.check_balance()\n json = {\"name\": item}\n if self.coins > 0 and item in self.inventory:\n time.sleep(self.cooldown)\n req = requests.post(f\"{url}/api/adv/transmogrify/\", headers={\n 'Authorization': f\"Token {key}\", \"Content-Type\": \"application/json\"}, json=json).json()\n print(req)\n self.cooldown = req['cooldown']\n for item in req['items']:\n self.pick_up_loot(item)\n\n def warp(self):\n if \"warp\" in self.abilities:\n time.sleep(self.cooldown)\n req = requests.post(f\"{url}/api/adv/warp/\", headers={\n 'Authorization': f\"Token {key}\", \"Content-Type\": \"application/json\"}).json()\n print(req['messages'][0])\n self.cooldown = req['cooldown']\n if self.world == 'light':\n self.world = 'dark'\n else:\n self.world = 'light'\n self.current_room = req\n time.sleep(self.cooldown)\n self.check_self()\n\n if req['room_id'] not in self.graph:\n # Just warped to a previously unknown room, add it to graph and map\n g = self.graph\n g[req['room_id']] = {d: '?' for d in req['exits']}\n self._write_file('graph.txt', g)\n\n m = self.map\n m[req['room_id']] = req\n self._write_file('map.txt', m)\n else:\n print(\"You do not have the warp ability yet!\")\n",
"from api import url, key, opposite\nimport requests\nimport json\nimport time\nimport os\nfrom miner import mine\nfrom cpu import *\n\n\nclass Player:\n\n def __init__(self):\n data = self._get_status()\n time.sleep(data['cooldown'])\n self.name = data['name']\n self.cooldown = data['cooldown']\n self.encumbrance = data['encumbrance']\n self.strength = data['strength']\n self.speed = data['speed']\n self.gold = data['gold']\n self.bodywear = data['bodywear']\n self.footwear = data['footwear']\n self.inventory = data['inventory']\n self.abilities = data['abilities']\n self.status = data['status']\n self.has_mined = data['has_mined']\n self.errors = data['errors']\n self.messages = data['messages']\n self.snitches = data['snitches'] if data['snitches'] else 0\n self.current_room = self.check_room()\n self.world = 'dark' if self.current_room['room_id'] > 499 else 'light'\n self.map = self._read_file('map.txt')\n self.graph = self._read_file('graph.txt')\n\n def _get_status(self):\n r = requests.post(f'{url}/api/adv/status/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'})\n return r.json()\n\n def _read_file(self, filepath):\n if self.world == 'dark':\n filepath = 'dark_' + filepath\n if not os.path.exists(filepath):\n f = open(filepath, 'w+')\n room = self.current_room\n if 'graph' in filepath:\n room = {room['room_id']: {d: '?' for d in room['exits']}}\n self._write_file(filepath, {self.current_room['room_id']: room})\n with open(filepath, 'r') as f:\n data = json.load(f)\n return data\n\n def _write_file(self, filepath, data):\n if self.world == 'dark' and 'dark' not in filepath:\n filepath = 'dark_' + filepath\n with open(filepath, 'w+') as outfile:\n json.dump(data, outfile)\n\n def check_room(self):\n r = requests.get(f'{url}/api/adv/init/', headers={'Authorization':\n f'Token {key}'})\n data = r.json()\n if 'players' in data:\n del data['players']\n return data\n\n def check_self(self, cause=None):\n data = self._get_status()\n cleaned = {**data}\n cleaned['status'].append(\n \"Glasowyn's hands stand Empty and Effervescent, see them filled.\"\n ) if len(cleaned['status']) < 1 else None\n cleaned['world'] = self.world\n cut = ['has_mined', 'errors']\n for k in cut:\n del cleaned[k]\n if cause == 'item pick up':\n ret = f\"\"\" You are now held down by the weight of {cleaned['encumbrance']} Stones.\n Your Experience and equipment Grant you the ability to\n carry {cleaned['strength']} stones before you need to take longer rests.\n Your bag now carries {cleaned['inventory']}\"\"\"\n print(ret +\n f\"\"\"\n Your ghost seems to have the space to carry an additional item if you would like\"\"\"\n if 'carry' in cleaned['abilities'] and len(cleaned[\n 'status']) else ret)\n else:\n print('\\n' + '*' * 22 + ' ' + 'Your Current State' + ' ' + '*' * 22\n )\n for item in cleaned.items():\n print(f'{item[0]}: {item[1]}')\n print('*' * 64 + '\\n')\n self.name = data['name']\n self.cooldown = data['cooldown']\n self.encumbrance = data['encumbrance']\n self.strength = data['strength']\n self.speed = data['speed']\n self.gold = data['gold']\n self.bodywear = data['bodywear']\n self.footwear = data['footwear']\n self.inventory = data['inventory']\n self.abilities = data['abilities']\n self.status = data['status']\n self.has_mined = data['has_mined']\n self.errors = data['errors']\n self.messages = data['messages']\n self.snitches = data['snitches'] if data['snitches'] else 0\n self.map = self._read_file('map.txt')\n self.graph = self._read_file('graph.txt')\n\n def dash(self, direction, num_rooms, room_ids):\n if 'dash' not in self.abilities:\n print(\"Error! You can't dash yet!\")\n return\n time.sleep(self.cooldown)\n curr_id = self.current_room['room_id']\n print('\\n======================================')\n print(f'Dashing {direction} from room {curr_id}...')\n json = {'direction': direction, 'num_rooms': num_rooms,\n 'next_room_ids': room_ids}\n r = requests.post(f'{url}/api/adv/dash/', headers={'Authorization':\n f'Token {key}', 'Content-Type': 'application/json'}, json=json)\n next_room = r.json()\n if 'players' in next_room:\n del next_room['players']\n next_id = next_room['room_id']\n self.map[next_id] = next_room\n self._write_file('map.txt', self.map)\n self.current_room = next_room\n self.cooldown = self.current_room['cooldown']\n if self.world == 'dark' and 'golden snitch' in next_room['items']:\n try:\n self.pick_up_loot('golden snitch')\n except:\n print('Somebody already got that snitch!')\n elif self.world == 'light' and len(next_room['items']):\n for item in next_room['items']:\n self.pick_up_loot(item)\n for message in next_room['messages']:\n print(f'{message}')\n print(f\"Now the player is in {self.current_room['room_id']}\")\n print(f'Cooldown before next action: {self.cooldown} seconds')\n print('======================================\\n')\n\n def travel(self, direction, method='move'):\n time.sleep(self.cooldown)\n curr_id = self.current_room['room_id']\n print('\\n======================================')\n if 'fly' in self.abilities and self.map[str(curr_id)]['terrain'] in [\n 'MOUNTAIN', 'NORMAL']:\n method = 'fly'\n print(f'Flying {direction} from room {curr_id}...')\n else:\n print(f'Walking {direction} from room {curr_id}...')\n if direction not in self.graph[str(curr_id)]:\n print('Error! Not a valid direction from the current room')\n else:\n json = {'direction': direction}\n if self.graph[str(curr_id)][direction] != '?':\n json['next_room_id'] = str(self.graph[str(curr_id)][direction])\n next_room = requests.post(f'{url}/api/adv/{method}/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.current_room = next_room\n self.cooldown = self.current_room['cooldown']\n if self.world != 'dark':\n if len(next_room['items']\n ) > 0 and self.encumbrance < self.strength:\n for item in next_room['items']:\n time.sleep(next_room['cooldown'])\n self.pick_up_loot(item)\n elif 'golden snitch' in next_room['items']:\n self.pick_up_loot('golden snitch')\n if 'players' in next_room:\n del next_room['players']\n next_id = next_room['room_id']\n if str(next_id) not in self.graph:\n print(f'New room! # {next_id}')\n self.graph[str(next_id)] = {e: '?' for e in next_room['exits']}\n self.graph[str(curr_id)][direction] = next_id\n self.graph[str(next_id)][opposite[direction]] = curr_id\n self._write_file('graph.txt', self.graph)\n self.map[next_id] = next_room\n self._write_file('map.txt', self.map)\n for message in next_room['messages']:\n print(f'{message}')\n print(f\"Now the player is in {self.current_room['room_id']}\")\n print(f'Cooldown before next action: {self.cooldown} seconds')\n if len(self.graph) < 500:\n print(\n f'Total number of rooms explored so far: {len(self.graph)}'\n )\n print('======================================\\n')\n\n def get_coin(self):\n time.sleep(self.cooldown)\n data = mine()\n self.cooldown = data['cooldown']\n if len(data['errors']) > 0:\n self.get_coin()\n\n def pick_up_loot(self, item):\n print(f'Looting {item}')\n json = {'name': item}\n if self.encumbrance < self.strength:\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/take/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n time.sleep(self.cooldown)\n self.check_self('item pick up'\n ) if self.world == 'light' else print(' Success!\\n ' +\n req['messages'][0] if len(req['messages']) > 0 else print(\n \"\"\" Oh NO!\n just as quickly as you arrived, the Golden Snitch disappeared to the next room and out of grasp!\"\"\"\n ))\n elif 'carry' in self.abilities:\n if len(self.status) != 0:\n print(\n 'It seems your Bag is full and Glasowyn is already carring something!'\n )\n else:\n req = requests.post(f'{url}/api/adv/carry/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n print(req)\n else:\n print('Your Bag is full!')\n\n def drop_loot(self, item):\n time.sleep(self.cooldown)\n json = {'name': item}\n req = requests.post(f'{url}/api/adv/drop/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n time.sleep(req['cooldown'])\n self.check_self()\n\n def buy_name(self, name):\n time.sleep(self.cooldown)\n json = {'name': name}\n req = requests.post(f'{url}/api/adv/change_name/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n print(req)\n time.sleep(req['cooldown'])\n json['confirm'] = 'aye'\n r1_conf = requests.post(f'{url}/api/adv/change_name/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n print(r1_conf)\n time.sleep(r1_conf['cooldown'])\n self.check_self()\n\n def examine(self, item):\n time.sleep(self.cooldown)\n json = {'name': item}\n req = requests.post(f'{url}/api/adv/examine/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n if item == 'WELL':\n if os.path.exists('hint.txt'):\n os.remove('hint.txt')\n desc = req['description']\n instructions = desc.split('\\n')\n for line in instructions[2:]:\n with open('hint.txt', 'a') as f:\n f.write(f'{line}\\n')\n cpu = CPU()\n cpu.load('hint.txt')\n cpu.run()\n if os.path.exists('hint.txt'):\n os.remove('hint.txt')\n limiter = 23 if self.world == 'light' else 24\n return cpu.hint[limiter:]\n else:\n print(req['description'])\n\n def pray(self):\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/pray/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}).json()\n print(req)\n time.sleep(req['cooldown'])\n self.check_self()\n\n def wear(self, item):\n time.sleep(self.cooldown)\n json = {'name': item}\n req = requests.post(f'{url}/api/adv/wear/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n time.sleep(self.cooldown)\n self.check_self()\n\n def check_balance(self):\n time.sleep(self.cooldown)\n req = requests.get(f'{url}/api/bc/get_balance/', headers={\n 'Authorization': f'Token {key}'}).json()\n self.coins = float(req['messages'][0].split(' ')[5])\n self.cooldown = req['cooldown']\n print(f\"\\n{req['messages'][0]}\\n\")\n\n def transform_coin(self, item):\n time.sleep(self.cooldown)\n self.check_balance()\n json = {'name': item}\n if self.coins > 0 and item in self.inventory:\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/transmogrify/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n print(req)\n self.cooldown = req['cooldown']\n for item in req['items']:\n self.pick_up_loot(item)\n\n def warp(self):\n if 'warp' in self.abilities:\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/warp/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}).json()\n print(req['messages'][0])\n self.cooldown = req['cooldown']\n if self.world == 'light':\n self.world = 'dark'\n else:\n self.world = 'light'\n self.current_room = req\n time.sleep(self.cooldown)\n self.check_self()\n if req['room_id'] not in self.graph:\n g = self.graph\n g[req['room_id']] = {d: '?' for d in req['exits']}\n self._write_file('graph.txt', g)\n m = self.map\n m[req['room_id']] = req\n self._write_file('map.txt', m)\n else:\n print('You do not have the warp ability yet!')\n",
"<import token>\n\n\nclass Player:\n\n def __init__(self):\n data = self._get_status()\n time.sleep(data['cooldown'])\n self.name = data['name']\n self.cooldown = data['cooldown']\n self.encumbrance = data['encumbrance']\n self.strength = data['strength']\n self.speed = data['speed']\n self.gold = data['gold']\n self.bodywear = data['bodywear']\n self.footwear = data['footwear']\n self.inventory = data['inventory']\n self.abilities = data['abilities']\n self.status = data['status']\n self.has_mined = data['has_mined']\n self.errors = data['errors']\n self.messages = data['messages']\n self.snitches = data['snitches'] if data['snitches'] else 0\n self.current_room = self.check_room()\n self.world = 'dark' if self.current_room['room_id'] > 499 else 'light'\n self.map = self._read_file('map.txt')\n self.graph = self._read_file('graph.txt')\n\n def _get_status(self):\n r = requests.post(f'{url}/api/adv/status/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'})\n return r.json()\n\n def _read_file(self, filepath):\n if self.world == 'dark':\n filepath = 'dark_' + filepath\n if not os.path.exists(filepath):\n f = open(filepath, 'w+')\n room = self.current_room\n if 'graph' in filepath:\n room = {room['room_id']: {d: '?' for d in room['exits']}}\n self._write_file(filepath, {self.current_room['room_id']: room})\n with open(filepath, 'r') as f:\n data = json.load(f)\n return data\n\n def _write_file(self, filepath, data):\n if self.world == 'dark' and 'dark' not in filepath:\n filepath = 'dark_' + filepath\n with open(filepath, 'w+') as outfile:\n json.dump(data, outfile)\n\n def check_room(self):\n r = requests.get(f'{url}/api/adv/init/', headers={'Authorization':\n f'Token {key}'})\n data = r.json()\n if 'players' in data:\n del data['players']\n return data\n\n def check_self(self, cause=None):\n data = self._get_status()\n cleaned = {**data}\n cleaned['status'].append(\n \"Glasowyn's hands stand Empty and Effervescent, see them filled.\"\n ) if len(cleaned['status']) < 1 else None\n cleaned['world'] = self.world\n cut = ['has_mined', 'errors']\n for k in cut:\n del cleaned[k]\n if cause == 'item pick up':\n ret = f\"\"\" You are now held down by the weight of {cleaned['encumbrance']} Stones.\n Your Experience and equipment Grant you the ability to\n carry {cleaned['strength']} stones before you need to take longer rests.\n Your bag now carries {cleaned['inventory']}\"\"\"\n print(ret +\n f\"\"\"\n Your ghost seems to have the space to carry an additional item if you would like\"\"\"\n if 'carry' in cleaned['abilities'] and len(cleaned[\n 'status']) else ret)\n else:\n print('\\n' + '*' * 22 + ' ' + 'Your Current State' + ' ' + '*' * 22\n )\n for item in cleaned.items():\n print(f'{item[0]}: {item[1]}')\n print('*' * 64 + '\\n')\n self.name = data['name']\n self.cooldown = data['cooldown']\n self.encumbrance = data['encumbrance']\n self.strength = data['strength']\n self.speed = data['speed']\n self.gold = data['gold']\n self.bodywear = data['bodywear']\n self.footwear = data['footwear']\n self.inventory = data['inventory']\n self.abilities = data['abilities']\n self.status = data['status']\n self.has_mined = data['has_mined']\n self.errors = data['errors']\n self.messages = data['messages']\n self.snitches = data['snitches'] if data['snitches'] else 0\n self.map = self._read_file('map.txt')\n self.graph = self._read_file('graph.txt')\n\n def dash(self, direction, num_rooms, room_ids):\n if 'dash' not in self.abilities:\n print(\"Error! You can't dash yet!\")\n return\n time.sleep(self.cooldown)\n curr_id = self.current_room['room_id']\n print('\\n======================================')\n print(f'Dashing {direction} from room {curr_id}...')\n json = {'direction': direction, 'num_rooms': num_rooms,\n 'next_room_ids': room_ids}\n r = requests.post(f'{url}/api/adv/dash/', headers={'Authorization':\n f'Token {key}', 'Content-Type': 'application/json'}, json=json)\n next_room = r.json()\n if 'players' in next_room:\n del next_room['players']\n next_id = next_room['room_id']\n self.map[next_id] = next_room\n self._write_file('map.txt', self.map)\n self.current_room = next_room\n self.cooldown = self.current_room['cooldown']\n if self.world == 'dark' and 'golden snitch' in next_room['items']:\n try:\n self.pick_up_loot('golden snitch')\n except:\n print('Somebody already got that snitch!')\n elif self.world == 'light' and len(next_room['items']):\n for item in next_room['items']:\n self.pick_up_loot(item)\n for message in next_room['messages']:\n print(f'{message}')\n print(f\"Now the player is in {self.current_room['room_id']}\")\n print(f'Cooldown before next action: {self.cooldown} seconds')\n print('======================================\\n')\n\n def travel(self, direction, method='move'):\n time.sleep(self.cooldown)\n curr_id = self.current_room['room_id']\n print('\\n======================================')\n if 'fly' in self.abilities and self.map[str(curr_id)]['terrain'] in [\n 'MOUNTAIN', 'NORMAL']:\n method = 'fly'\n print(f'Flying {direction} from room {curr_id}...')\n else:\n print(f'Walking {direction} from room {curr_id}...')\n if direction not in self.graph[str(curr_id)]:\n print('Error! Not a valid direction from the current room')\n else:\n json = {'direction': direction}\n if self.graph[str(curr_id)][direction] != '?':\n json['next_room_id'] = str(self.graph[str(curr_id)][direction])\n next_room = requests.post(f'{url}/api/adv/{method}/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.current_room = next_room\n self.cooldown = self.current_room['cooldown']\n if self.world != 'dark':\n if len(next_room['items']\n ) > 0 and self.encumbrance < self.strength:\n for item in next_room['items']:\n time.sleep(next_room['cooldown'])\n self.pick_up_loot(item)\n elif 'golden snitch' in next_room['items']:\n self.pick_up_loot('golden snitch')\n if 'players' in next_room:\n del next_room['players']\n next_id = next_room['room_id']\n if str(next_id) not in self.graph:\n print(f'New room! # {next_id}')\n self.graph[str(next_id)] = {e: '?' for e in next_room['exits']}\n self.graph[str(curr_id)][direction] = next_id\n self.graph[str(next_id)][opposite[direction]] = curr_id\n self._write_file('graph.txt', self.graph)\n self.map[next_id] = next_room\n self._write_file('map.txt', self.map)\n for message in next_room['messages']:\n print(f'{message}')\n print(f\"Now the player is in {self.current_room['room_id']}\")\n print(f'Cooldown before next action: {self.cooldown} seconds')\n if len(self.graph) < 500:\n print(\n f'Total number of rooms explored so far: {len(self.graph)}'\n )\n print('======================================\\n')\n\n def get_coin(self):\n time.sleep(self.cooldown)\n data = mine()\n self.cooldown = data['cooldown']\n if len(data['errors']) > 0:\n self.get_coin()\n\n def pick_up_loot(self, item):\n print(f'Looting {item}')\n json = {'name': item}\n if self.encumbrance < self.strength:\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/take/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n time.sleep(self.cooldown)\n self.check_self('item pick up'\n ) if self.world == 'light' else print(' Success!\\n ' +\n req['messages'][0] if len(req['messages']) > 0 else print(\n \"\"\" Oh NO!\n just as quickly as you arrived, the Golden Snitch disappeared to the next room and out of grasp!\"\"\"\n ))\n elif 'carry' in self.abilities:\n if len(self.status) != 0:\n print(\n 'It seems your Bag is full and Glasowyn is already carring something!'\n )\n else:\n req = requests.post(f'{url}/api/adv/carry/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n print(req)\n else:\n print('Your Bag is full!')\n\n def drop_loot(self, item):\n time.sleep(self.cooldown)\n json = {'name': item}\n req = requests.post(f'{url}/api/adv/drop/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n time.sleep(req['cooldown'])\n self.check_self()\n\n def buy_name(self, name):\n time.sleep(self.cooldown)\n json = {'name': name}\n req = requests.post(f'{url}/api/adv/change_name/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n print(req)\n time.sleep(req['cooldown'])\n json['confirm'] = 'aye'\n r1_conf = requests.post(f'{url}/api/adv/change_name/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n print(r1_conf)\n time.sleep(r1_conf['cooldown'])\n self.check_self()\n\n def examine(self, item):\n time.sleep(self.cooldown)\n json = {'name': item}\n req = requests.post(f'{url}/api/adv/examine/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n if item == 'WELL':\n if os.path.exists('hint.txt'):\n os.remove('hint.txt')\n desc = req['description']\n instructions = desc.split('\\n')\n for line in instructions[2:]:\n with open('hint.txt', 'a') as f:\n f.write(f'{line}\\n')\n cpu = CPU()\n cpu.load('hint.txt')\n cpu.run()\n if os.path.exists('hint.txt'):\n os.remove('hint.txt')\n limiter = 23 if self.world == 'light' else 24\n return cpu.hint[limiter:]\n else:\n print(req['description'])\n\n def pray(self):\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/pray/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}).json()\n print(req)\n time.sleep(req['cooldown'])\n self.check_self()\n\n def wear(self, item):\n time.sleep(self.cooldown)\n json = {'name': item}\n req = requests.post(f'{url}/api/adv/wear/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n time.sleep(self.cooldown)\n self.check_self()\n\n def check_balance(self):\n time.sleep(self.cooldown)\n req = requests.get(f'{url}/api/bc/get_balance/', headers={\n 'Authorization': f'Token {key}'}).json()\n self.coins = float(req['messages'][0].split(' ')[5])\n self.cooldown = req['cooldown']\n print(f\"\\n{req['messages'][0]}\\n\")\n\n def transform_coin(self, item):\n time.sleep(self.cooldown)\n self.check_balance()\n json = {'name': item}\n if self.coins > 0 and item in self.inventory:\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/transmogrify/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n print(req)\n self.cooldown = req['cooldown']\n for item in req['items']:\n self.pick_up_loot(item)\n\n def warp(self):\n if 'warp' in self.abilities:\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/warp/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}).json()\n print(req['messages'][0])\n self.cooldown = req['cooldown']\n if self.world == 'light':\n self.world = 'dark'\n else:\n self.world = 'light'\n self.current_room = req\n time.sleep(self.cooldown)\n self.check_self()\n if req['room_id'] not in self.graph:\n g = self.graph\n g[req['room_id']] = {d: '?' for d in req['exits']}\n self._write_file('graph.txt', g)\n m = self.map\n m[req['room_id']] = req\n self._write_file('map.txt', m)\n else:\n print('You do not have the warp ability yet!')\n",
"<import token>\n\n\nclass Player:\n\n def __init__(self):\n data = self._get_status()\n time.sleep(data['cooldown'])\n self.name = data['name']\n self.cooldown = data['cooldown']\n self.encumbrance = data['encumbrance']\n self.strength = data['strength']\n self.speed = data['speed']\n self.gold = data['gold']\n self.bodywear = data['bodywear']\n self.footwear = data['footwear']\n self.inventory = data['inventory']\n self.abilities = data['abilities']\n self.status = data['status']\n self.has_mined = data['has_mined']\n self.errors = data['errors']\n self.messages = data['messages']\n self.snitches = data['snitches'] if data['snitches'] else 0\n self.current_room = self.check_room()\n self.world = 'dark' if self.current_room['room_id'] > 499 else 'light'\n self.map = self._read_file('map.txt')\n self.graph = self._read_file('graph.txt')\n\n def _get_status(self):\n r = requests.post(f'{url}/api/adv/status/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'})\n return r.json()\n\n def _read_file(self, filepath):\n if self.world == 'dark':\n filepath = 'dark_' + filepath\n if not os.path.exists(filepath):\n f = open(filepath, 'w+')\n room = self.current_room\n if 'graph' in filepath:\n room = {room['room_id']: {d: '?' for d in room['exits']}}\n self._write_file(filepath, {self.current_room['room_id']: room})\n with open(filepath, 'r') as f:\n data = json.load(f)\n return data\n\n def _write_file(self, filepath, data):\n if self.world == 'dark' and 'dark' not in filepath:\n filepath = 'dark_' + filepath\n with open(filepath, 'w+') as outfile:\n json.dump(data, outfile)\n\n def check_room(self):\n r = requests.get(f'{url}/api/adv/init/', headers={'Authorization':\n f'Token {key}'})\n data = r.json()\n if 'players' in data:\n del data['players']\n return data\n\n def check_self(self, cause=None):\n data = self._get_status()\n cleaned = {**data}\n cleaned['status'].append(\n \"Glasowyn's hands stand Empty and Effervescent, see them filled.\"\n ) if len(cleaned['status']) < 1 else None\n cleaned['world'] = self.world\n cut = ['has_mined', 'errors']\n for k in cut:\n del cleaned[k]\n if cause == 'item pick up':\n ret = f\"\"\" You are now held down by the weight of {cleaned['encumbrance']} Stones.\n Your Experience and equipment Grant you the ability to\n carry {cleaned['strength']} stones before you need to take longer rests.\n Your bag now carries {cleaned['inventory']}\"\"\"\n print(ret +\n f\"\"\"\n Your ghost seems to have the space to carry an additional item if you would like\"\"\"\n if 'carry' in cleaned['abilities'] and len(cleaned[\n 'status']) else ret)\n else:\n print('\\n' + '*' * 22 + ' ' + 'Your Current State' + ' ' + '*' * 22\n )\n for item in cleaned.items():\n print(f'{item[0]}: {item[1]}')\n print('*' * 64 + '\\n')\n self.name = data['name']\n self.cooldown = data['cooldown']\n self.encumbrance = data['encumbrance']\n self.strength = data['strength']\n self.speed = data['speed']\n self.gold = data['gold']\n self.bodywear = data['bodywear']\n self.footwear = data['footwear']\n self.inventory = data['inventory']\n self.abilities = data['abilities']\n self.status = data['status']\n self.has_mined = data['has_mined']\n self.errors = data['errors']\n self.messages = data['messages']\n self.snitches = data['snitches'] if data['snitches'] else 0\n self.map = self._read_file('map.txt')\n self.graph = self._read_file('graph.txt')\n\n def dash(self, direction, num_rooms, room_ids):\n if 'dash' not in self.abilities:\n print(\"Error! You can't dash yet!\")\n return\n time.sleep(self.cooldown)\n curr_id = self.current_room['room_id']\n print('\\n======================================')\n print(f'Dashing {direction} from room {curr_id}...')\n json = {'direction': direction, 'num_rooms': num_rooms,\n 'next_room_ids': room_ids}\n r = requests.post(f'{url}/api/adv/dash/', headers={'Authorization':\n f'Token {key}', 'Content-Type': 'application/json'}, json=json)\n next_room = r.json()\n if 'players' in next_room:\n del next_room['players']\n next_id = next_room['room_id']\n self.map[next_id] = next_room\n self._write_file('map.txt', self.map)\n self.current_room = next_room\n self.cooldown = self.current_room['cooldown']\n if self.world == 'dark' and 'golden snitch' in next_room['items']:\n try:\n self.pick_up_loot('golden snitch')\n except:\n print('Somebody already got that snitch!')\n elif self.world == 'light' and len(next_room['items']):\n for item in next_room['items']:\n self.pick_up_loot(item)\n for message in next_room['messages']:\n print(f'{message}')\n print(f\"Now the player is in {self.current_room['room_id']}\")\n print(f'Cooldown before next action: {self.cooldown} seconds')\n print('======================================\\n')\n\n def travel(self, direction, method='move'):\n time.sleep(self.cooldown)\n curr_id = self.current_room['room_id']\n print('\\n======================================')\n if 'fly' in self.abilities and self.map[str(curr_id)]['terrain'] in [\n 'MOUNTAIN', 'NORMAL']:\n method = 'fly'\n print(f'Flying {direction} from room {curr_id}...')\n else:\n print(f'Walking {direction} from room {curr_id}...')\n if direction not in self.graph[str(curr_id)]:\n print('Error! Not a valid direction from the current room')\n else:\n json = {'direction': direction}\n if self.graph[str(curr_id)][direction] != '?':\n json['next_room_id'] = str(self.graph[str(curr_id)][direction])\n next_room = requests.post(f'{url}/api/adv/{method}/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.current_room = next_room\n self.cooldown = self.current_room['cooldown']\n if self.world != 'dark':\n if len(next_room['items']\n ) > 0 and self.encumbrance < self.strength:\n for item in next_room['items']:\n time.sleep(next_room['cooldown'])\n self.pick_up_loot(item)\n elif 'golden snitch' in next_room['items']:\n self.pick_up_loot('golden snitch')\n if 'players' in next_room:\n del next_room['players']\n next_id = next_room['room_id']\n if str(next_id) not in self.graph:\n print(f'New room! # {next_id}')\n self.graph[str(next_id)] = {e: '?' for e in next_room['exits']}\n self.graph[str(curr_id)][direction] = next_id\n self.graph[str(next_id)][opposite[direction]] = curr_id\n self._write_file('graph.txt', self.graph)\n self.map[next_id] = next_room\n self._write_file('map.txt', self.map)\n for message in next_room['messages']:\n print(f'{message}')\n print(f\"Now the player is in {self.current_room['room_id']}\")\n print(f'Cooldown before next action: {self.cooldown} seconds')\n if len(self.graph) < 500:\n print(\n f'Total number of rooms explored so far: {len(self.graph)}'\n )\n print('======================================\\n')\n\n def get_coin(self):\n time.sleep(self.cooldown)\n data = mine()\n self.cooldown = data['cooldown']\n if len(data['errors']) > 0:\n self.get_coin()\n\n def pick_up_loot(self, item):\n print(f'Looting {item}')\n json = {'name': item}\n if self.encumbrance < self.strength:\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/take/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n time.sleep(self.cooldown)\n self.check_self('item pick up'\n ) if self.world == 'light' else print(' Success!\\n ' +\n req['messages'][0] if len(req['messages']) > 0 else print(\n \"\"\" Oh NO!\n just as quickly as you arrived, the Golden Snitch disappeared to the next room and out of grasp!\"\"\"\n ))\n elif 'carry' in self.abilities:\n if len(self.status) != 0:\n print(\n 'It seems your Bag is full and Glasowyn is already carring something!'\n )\n else:\n req = requests.post(f'{url}/api/adv/carry/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n print(req)\n else:\n print('Your Bag is full!')\n\n def drop_loot(self, item):\n time.sleep(self.cooldown)\n json = {'name': item}\n req = requests.post(f'{url}/api/adv/drop/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n time.sleep(req['cooldown'])\n self.check_self()\n <function token>\n\n def examine(self, item):\n time.sleep(self.cooldown)\n json = {'name': item}\n req = requests.post(f'{url}/api/adv/examine/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n if item == 'WELL':\n if os.path.exists('hint.txt'):\n os.remove('hint.txt')\n desc = req['description']\n instructions = desc.split('\\n')\n for line in instructions[2:]:\n with open('hint.txt', 'a') as f:\n f.write(f'{line}\\n')\n cpu = CPU()\n cpu.load('hint.txt')\n cpu.run()\n if os.path.exists('hint.txt'):\n os.remove('hint.txt')\n limiter = 23 if self.world == 'light' else 24\n return cpu.hint[limiter:]\n else:\n print(req['description'])\n\n def pray(self):\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/pray/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}).json()\n print(req)\n time.sleep(req['cooldown'])\n self.check_self()\n\n def wear(self, item):\n time.sleep(self.cooldown)\n json = {'name': item}\n req = requests.post(f'{url}/api/adv/wear/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n time.sleep(self.cooldown)\n self.check_self()\n\n def check_balance(self):\n time.sleep(self.cooldown)\n req = requests.get(f'{url}/api/bc/get_balance/', headers={\n 'Authorization': f'Token {key}'}).json()\n self.coins = float(req['messages'][0].split(' ')[5])\n self.cooldown = req['cooldown']\n print(f\"\\n{req['messages'][0]}\\n\")\n\n def transform_coin(self, item):\n time.sleep(self.cooldown)\n self.check_balance()\n json = {'name': item}\n if self.coins > 0 and item in self.inventory:\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/transmogrify/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n print(req)\n self.cooldown = req['cooldown']\n for item in req['items']:\n self.pick_up_loot(item)\n\n def warp(self):\n if 'warp' in self.abilities:\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/warp/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}).json()\n print(req['messages'][0])\n self.cooldown = req['cooldown']\n if self.world == 'light':\n self.world = 'dark'\n else:\n self.world = 'light'\n self.current_room = req\n time.sleep(self.cooldown)\n self.check_self()\n if req['room_id'] not in self.graph:\n g = self.graph\n g[req['room_id']] = {d: '?' for d in req['exits']}\n self._write_file('graph.txt', g)\n m = self.map\n m[req['room_id']] = req\n self._write_file('map.txt', m)\n else:\n print('You do not have the warp ability yet!')\n",
"<import token>\n\n\nclass Player:\n\n def __init__(self):\n data = self._get_status()\n time.sleep(data['cooldown'])\n self.name = data['name']\n self.cooldown = data['cooldown']\n self.encumbrance = data['encumbrance']\n self.strength = data['strength']\n self.speed = data['speed']\n self.gold = data['gold']\n self.bodywear = data['bodywear']\n self.footwear = data['footwear']\n self.inventory = data['inventory']\n self.abilities = data['abilities']\n self.status = data['status']\n self.has_mined = data['has_mined']\n self.errors = data['errors']\n self.messages = data['messages']\n self.snitches = data['snitches'] if data['snitches'] else 0\n self.current_room = self.check_room()\n self.world = 'dark' if self.current_room['room_id'] > 499 else 'light'\n self.map = self._read_file('map.txt')\n self.graph = self._read_file('graph.txt')\n\n def _get_status(self):\n r = requests.post(f'{url}/api/adv/status/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'})\n return r.json()\n\n def _read_file(self, filepath):\n if self.world == 'dark':\n filepath = 'dark_' + filepath\n if not os.path.exists(filepath):\n f = open(filepath, 'w+')\n room = self.current_room\n if 'graph' in filepath:\n room = {room['room_id']: {d: '?' for d in room['exits']}}\n self._write_file(filepath, {self.current_room['room_id']: room})\n with open(filepath, 'r') as f:\n data = json.load(f)\n return data\n\n def _write_file(self, filepath, data):\n if self.world == 'dark' and 'dark' not in filepath:\n filepath = 'dark_' + filepath\n with open(filepath, 'w+') as outfile:\n json.dump(data, outfile)\n\n def check_room(self):\n r = requests.get(f'{url}/api/adv/init/', headers={'Authorization':\n f'Token {key}'})\n data = r.json()\n if 'players' in data:\n del data['players']\n return data\n\n def check_self(self, cause=None):\n data = self._get_status()\n cleaned = {**data}\n cleaned['status'].append(\n \"Glasowyn's hands stand Empty and Effervescent, see them filled.\"\n ) if len(cleaned['status']) < 1 else None\n cleaned['world'] = self.world\n cut = ['has_mined', 'errors']\n for k in cut:\n del cleaned[k]\n if cause == 'item pick up':\n ret = f\"\"\" You are now held down by the weight of {cleaned['encumbrance']} Stones.\n Your Experience and equipment Grant you the ability to\n carry {cleaned['strength']} stones before you need to take longer rests.\n Your bag now carries {cleaned['inventory']}\"\"\"\n print(ret +\n f\"\"\"\n Your ghost seems to have the space to carry an additional item if you would like\"\"\"\n if 'carry' in cleaned['abilities'] and len(cleaned[\n 'status']) else ret)\n else:\n print('\\n' + '*' * 22 + ' ' + 'Your Current State' + ' ' + '*' * 22\n )\n for item in cleaned.items():\n print(f'{item[0]}: {item[1]}')\n print('*' * 64 + '\\n')\n self.name = data['name']\n self.cooldown = data['cooldown']\n self.encumbrance = data['encumbrance']\n self.strength = data['strength']\n self.speed = data['speed']\n self.gold = data['gold']\n self.bodywear = data['bodywear']\n self.footwear = data['footwear']\n self.inventory = data['inventory']\n self.abilities = data['abilities']\n self.status = data['status']\n self.has_mined = data['has_mined']\n self.errors = data['errors']\n self.messages = data['messages']\n self.snitches = data['snitches'] if data['snitches'] else 0\n self.map = self._read_file('map.txt')\n self.graph = self._read_file('graph.txt')\n\n def dash(self, direction, num_rooms, room_ids):\n if 'dash' not in self.abilities:\n print(\"Error! You can't dash yet!\")\n return\n time.sleep(self.cooldown)\n curr_id = self.current_room['room_id']\n print('\\n======================================')\n print(f'Dashing {direction} from room {curr_id}...')\n json = {'direction': direction, 'num_rooms': num_rooms,\n 'next_room_ids': room_ids}\n r = requests.post(f'{url}/api/adv/dash/', headers={'Authorization':\n f'Token {key}', 'Content-Type': 'application/json'}, json=json)\n next_room = r.json()\n if 'players' in next_room:\n del next_room['players']\n next_id = next_room['room_id']\n self.map[next_id] = next_room\n self._write_file('map.txt', self.map)\n self.current_room = next_room\n self.cooldown = self.current_room['cooldown']\n if self.world == 'dark' and 'golden snitch' in next_room['items']:\n try:\n self.pick_up_loot('golden snitch')\n except:\n print('Somebody already got that snitch!')\n elif self.world == 'light' and len(next_room['items']):\n for item in next_room['items']:\n self.pick_up_loot(item)\n for message in next_room['messages']:\n print(f'{message}')\n print(f\"Now the player is in {self.current_room['room_id']}\")\n print(f'Cooldown before next action: {self.cooldown} seconds')\n print('======================================\\n')\n\n def travel(self, direction, method='move'):\n time.sleep(self.cooldown)\n curr_id = self.current_room['room_id']\n print('\\n======================================')\n if 'fly' in self.abilities and self.map[str(curr_id)]['terrain'] in [\n 'MOUNTAIN', 'NORMAL']:\n method = 'fly'\n print(f'Flying {direction} from room {curr_id}...')\n else:\n print(f'Walking {direction} from room {curr_id}...')\n if direction not in self.graph[str(curr_id)]:\n print('Error! Not a valid direction from the current room')\n else:\n json = {'direction': direction}\n if self.graph[str(curr_id)][direction] != '?':\n json['next_room_id'] = str(self.graph[str(curr_id)][direction])\n next_room = requests.post(f'{url}/api/adv/{method}/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.current_room = next_room\n self.cooldown = self.current_room['cooldown']\n if self.world != 'dark':\n if len(next_room['items']\n ) > 0 and self.encumbrance < self.strength:\n for item in next_room['items']:\n time.sleep(next_room['cooldown'])\n self.pick_up_loot(item)\n elif 'golden snitch' in next_room['items']:\n self.pick_up_loot('golden snitch')\n if 'players' in next_room:\n del next_room['players']\n next_id = next_room['room_id']\n if str(next_id) not in self.graph:\n print(f'New room! # {next_id}')\n self.graph[str(next_id)] = {e: '?' for e in next_room['exits']}\n self.graph[str(curr_id)][direction] = next_id\n self.graph[str(next_id)][opposite[direction]] = curr_id\n self._write_file('graph.txt', self.graph)\n self.map[next_id] = next_room\n self._write_file('map.txt', self.map)\n for message in next_room['messages']:\n print(f'{message}')\n print(f\"Now the player is in {self.current_room['room_id']}\")\n print(f'Cooldown before next action: {self.cooldown} seconds')\n if len(self.graph) < 500:\n print(\n f'Total number of rooms explored so far: {len(self.graph)}'\n )\n print('======================================\\n')\n\n def get_coin(self):\n time.sleep(self.cooldown)\n data = mine()\n self.cooldown = data['cooldown']\n if len(data['errors']) > 0:\n self.get_coin()\n\n def pick_up_loot(self, item):\n print(f'Looting {item}')\n json = {'name': item}\n if self.encumbrance < self.strength:\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/take/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n time.sleep(self.cooldown)\n self.check_self('item pick up'\n ) if self.world == 'light' else print(' Success!\\n ' +\n req['messages'][0] if len(req['messages']) > 0 else print(\n \"\"\" Oh NO!\n just as quickly as you arrived, the Golden Snitch disappeared to the next room and out of grasp!\"\"\"\n ))\n elif 'carry' in self.abilities:\n if len(self.status) != 0:\n print(\n 'It seems your Bag is full and Glasowyn is already carring something!'\n )\n else:\n req = requests.post(f'{url}/api/adv/carry/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n print(req)\n else:\n print('Your Bag is full!')\n\n def drop_loot(self, item):\n time.sleep(self.cooldown)\n json = {'name': item}\n req = requests.post(f'{url}/api/adv/drop/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n time.sleep(req['cooldown'])\n self.check_self()\n <function token>\n\n def examine(self, item):\n time.sleep(self.cooldown)\n json = {'name': item}\n req = requests.post(f'{url}/api/adv/examine/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n if item == 'WELL':\n if os.path.exists('hint.txt'):\n os.remove('hint.txt')\n desc = req['description']\n instructions = desc.split('\\n')\n for line in instructions[2:]:\n with open('hint.txt', 'a') as f:\n f.write(f'{line}\\n')\n cpu = CPU()\n cpu.load('hint.txt')\n cpu.run()\n if os.path.exists('hint.txt'):\n os.remove('hint.txt')\n limiter = 23 if self.world == 'light' else 24\n return cpu.hint[limiter:]\n else:\n print(req['description'])\n\n def pray(self):\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/pray/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}).json()\n print(req)\n time.sleep(req['cooldown'])\n self.check_self()\n <function token>\n\n def check_balance(self):\n time.sleep(self.cooldown)\n req = requests.get(f'{url}/api/bc/get_balance/', headers={\n 'Authorization': f'Token {key}'}).json()\n self.coins = float(req['messages'][0].split(' ')[5])\n self.cooldown = req['cooldown']\n print(f\"\\n{req['messages'][0]}\\n\")\n\n def transform_coin(self, item):\n time.sleep(self.cooldown)\n self.check_balance()\n json = {'name': item}\n if self.coins > 0 and item in self.inventory:\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/transmogrify/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n print(req)\n self.cooldown = req['cooldown']\n for item in req['items']:\n self.pick_up_loot(item)\n\n def warp(self):\n if 'warp' in self.abilities:\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/warp/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}).json()\n print(req['messages'][0])\n self.cooldown = req['cooldown']\n if self.world == 'light':\n self.world = 'dark'\n else:\n self.world = 'light'\n self.current_room = req\n time.sleep(self.cooldown)\n self.check_self()\n if req['room_id'] not in self.graph:\n g = self.graph\n g[req['room_id']] = {d: '?' for d in req['exits']}\n self._write_file('graph.txt', g)\n m = self.map\n m[req['room_id']] = req\n self._write_file('map.txt', m)\n else:\n print('You do not have the warp ability yet!')\n",
"<import token>\n\n\nclass Player:\n\n def __init__(self):\n data = self._get_status()\n time.sleep(data['cooldown'])\n self.name = data['name']\n self.cooldown = data['cooldown']\n self.encumbrance = data['encumbrance']\n self.strength = data['strength']\n self.speed = data['speed']\n self.gold = data['gold']\n self.bodywear = data['bodywear']\n self.footwear = data['footwear']\n self.inventory = data['inventory']\n self.abilities = data['abilities']\n self.status = data['status']\n self.has_mined = data['has_mined']\n self.errors = data['errors']\n self.messages = data['messages']\n self.snitches = data['snitches'] if data['snitches'] else 0\n self.current_room = self.check_room()\n self.world = 'dark' if self.current_room['room_id'] > 499 else 'light'\n self.map = self._read_file('map.txt')\n self.graph = self._read_file('graph.txt')\n\n def _get_status(self):\n r = requests.post(f'{url}/api/adv/status/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'})\n return r.json()\n\n def _read_file(self, filepath):\n if self.world == 'dark':\n filepath = 'dark_' + filepath\n if not os.path.exists(filepath):\n f = open(filepath, 'w+')\n room = self.current_room\n if 'graph' in filepath:\n room = {room['room_id']: {d: '?' for d in room['exits']}}\n self._write_file(filepath, {self.current_room['room_id']: room})\n with open(filepath, 'r') as f:\n data = json.load(f)\n return data\n\n def _write_file(self, filepath, data):\n if self.world == 'dark' and 'dark' not in filepath:\n filepath = 'dark_' + filepath\n with open(filepath, 'w+') as outfile:\n json.dump(data, outfile)\n\n def check_room(self):\n r = requests.get(f'{url}/api/adv/init/', headers={'Authorization':\n f'Token {key}'})\n data = r.json()\n if 'players' in data:\n del data['players']\n return data\n\n def check_self(self, cause=None):\n data = self._get_status()\n cleaned = {**data}\n cleaned['status'].append(\n \"Glasowyn's hands stand Empty and Effervescent, see them filled.\"\n ) if len(cleaned['status']) < 1 else None\n cleaned['world'] = self.world\n cut = ['has_mined', 'errors']\n for k in cut:\n del cleaned[k]\n if cause == 'item pick up':\n ret = f\"\"\" You are now held down by the weight of {cleaned['encumbrance']} Stones.\n Your Experience and equipment Grant you the ability to\n carry {cleaned['strength']} stones before you need to take longer rests.\n Your bag now carries {cleaned['inventory']}\"\"\"\n print(ret +\n f\"\"\"\n Your ghost seems to have the space to carry an additional item if you would like\"\"\"\n if 'carry' in cleaned['abilities'] and len(cleaned[\n 'status']) else ret)\n else:\n print('\\n' + '*' * 22 + ' ' + 'Your Current State' + ' ' + '*' * 22\n )\n for item in cleaned.items():\n print(f'{item[0]}: {item[1]}')\n print('*' * 64 + '\\n')\n self.name = data['name']\n self.cooldown = data['cooldown']\n self.encumbrance = data['encumbrance']\n self.strength = data['strength']\n self.speed = data['speed']\n self.gold = data['gold']\n self.bodywear = data['bodywear']\n self.footwear = data['footwear']\n self.inventory = data['inventory']\n self.abilities = data['abilities']\n self.status = data['status']\n self.has_mined = data['has_mined']\n self.errors = data['errors']\n self.messages = data['messages']\n self.snitches = data['snitches'] if data['snitches'] else 0\n self.map = self._read_file('map.txt')\n self.graph = self._read_file('graph.txt')\n\n def dash(self, direction, num_rooms, room_ids):\n if 'dash' not in self.abilities:\n print(\"Error! You can't dash yet!\")\n return\n time.sleep(self.cooldown)\n curr_id = self.current_room['room_id']\n print('\\n======================================')\n print(f'Dashing {direction} from room {curr_id}...')\n json = {'direction': direction, 'num_rooms': num_rooms,\n 'next_room_ids': room_ids}\n r = requests.post(f'{url}/api/adv/dash/', headers={'Authorization':\n f'Token {key}', 'Content-Type': 'application/json'}, json=json)\n next_room = r.json()\n if 'players' in next_room:\n del next_room['players']\n next_id = next_room['room_id']\n self.map[next_id] = next_room\n self._write_file('map.txt', self.map)\n self.current_room = next_room\n self.cooldown = self.current_room['cooldown']\n if self.world == 'dark' and 'golden snitch' in next_room['items']:\n try:\n self.pick_up_loot('golden snitch')\n except:\n print('Somebody already got that snitch!')\n elif self.world == 'light' and len(next_room['items']):\n for item in next_room['items']:\n self.pick_up_loot(item)\n for message in next_room['messages']:\n print(f'{message}')\n print(f\"Now the player is in {self.current_room['room_id']}\")\n print(f'Cooldown before next action: {self.cooldown} seconds')\n print('======================================\\n')\n\n def travel(self, direction, method='move'):\n time.sleep(self.cooldown)\n curr_id = self.current_room['room_id']\n print('\\n======================================')\n if 'fly' in self.abilities and self.map[str(curr_id)]['terrain'] in [\n 'MOUNTAIN', 'NORMAL']:\n method = 'fly'\n print(f'Flying {direction} from room {curr_id}...')\n else:\n print(f'Walking {direction} from room {curr_id}...')\n if direction not in self.graph[str(curr_id)]:\n print('Error! Not a valid direction from the current room')\n else:\n json = {'direction': direction}\n if self.graph[str(curr_id)][direction] != '?':\n json['next_room_id'] = str(self.graph[str(curr_id)][direction])\n next_room = requests.post(f'{url}/api/adv/{method}/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.current_room = next_room\n self.cooldown = self.current_room['cooldown']\n if self.world != 'dark':\n if len(next_room['items']\n ) > 0 and self.encumbrance < self.strength:\n for item in next_room['items']:\n time.sleep(next_room['cooldown'])\n self.pick_up_loot(item)\n elif 'golden snitch' in next_room['items']:\n self.pick_up_loot('golden snitch')\n if 'players' in next_room:\n del next_room['players']\n next_id = next_room['room_id']\n if str(next_id) not in self.graph:\n print(f'New room! # {next_id}')\n self.graph[str(next_id)] = {e: '?' for e in next_room['exits']}\n self.graph[str(curr_id)][direction] = next_id\n self.graph[str(next_id)][opposite[direction]] = curr_id\n self._write_file('graph.txt', self.graph)\n self.map[next_id] = next_room\n self._write_file('map.txt', self.map)\n for message in next_room['messages']:\n print(f'{message}')\n print(f\"Now the player is in {self.current_room['room_id']}\")\n print(f'Cooldown before next action: {self.cooldown} seconds')\n if len(self.graph) < 500:\n print(\n f'Total number of rooms explored so far: {len(self.graph)}'\n )\n print('======================================\\n')\n\n def get_coin(self):\n time.sleep(self.cooldown)\n data = mine()\n self.cooldown = data['cooldown']\n if len(data['errors']) > 0:\n self.get_coin()\n\n def pick_up_loot(self, item):\n print(f'Looting {item}')\n json = {'name': item}\n if self.encumbrance < self.strength:\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/take/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n time.sleep(self.cooldown)\n self.check_self('item pick up'\n ) if self.world == 'light' else print(' Success!\\n ' +\n req['messages'][0] if len(req['messages']) > 0 else print(\n \"\"\" Oh NO!\n just as quickly as you arrived, the Golden Snitch disappeared to the next room and out of grasp!\"\"\"\n ))\n elif 'carry' in self.abilities:\n if len(self.status) != 0:\n print(\n 'It seems your Bag is full and Glasowyn is already carring something!'\n )\n else:\n req = requests.post(f'{url}/api/adv/carry/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n print(req)\n else:\n print('Your Bag is full!')\n\n def drop_loot(self, item):\n time.sleep(self.cooldown)\n json = {'name': item}\n req = requests.post(f'{url}/api/adv/drop/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n time.sleep(req['cooldown'])\n self.check_self()\n <function token>\n\n def examine(self, item):\n time.sleep(self.cooldown)\n json = {'name': item}\n req = requests.post(f'{url}/api/adv/examine/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n if item == 'WELL':\n if os.path.exists('hint.txt'):\n os.remove('hint.txt')\n desc = req['description']\n instructions = desc.split('\\n')\n for line in instructions[2:]:\n with open('hint.txt', 'a') as f:\n f.write(f'{line}\\n')\n cpu = CPU()\n cpu.load('hint.txt')\n cpu.run()\n if os.path.exists('hint.txt'):\n os.remove('hint.txt')\n limiter = 23 if self.world == 'light' else 24\n return cpu.hint[limiter:]\n else:\n print(req['description'])\n\n def pray(self):\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/pray/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}).json()\n print(req)\n time.sleep(req['cooldown'])\n self.check_self()\n <function token>\n\n def check_balance(self):\n time.sleep(self.cooldown)\n req = requests.get(f'{url}/api/bc/get_balance/', headers={\n 'Authorization': f'Token {key}'}).json()\n self.coins = float(req['messages'][0].split(' ')[5])\n self.cooldown = req['cooldown']\n print(f\"\\n{req['messages'][0]}\\n\")\n\n def transform_coin(self, item):\n time.sleep(self.cooldown)\n self.check_balance()\n json = {'name': item}\n if self.coins > 0 and item in self.inventory:\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/transmogrify/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n print(req)\n self.cooldown = req['cooldown']\n for item in req['items']:\n self.pick_up_loot(item)\n <function token>\n",
"<import token>\n\n\nclass Player:\n\n def __init__(self):\n data = self._get_status()\n time.sleep(data['cooldown'])\n self.name = data['name']\n self.cooldown = data['cooldown']\n self.encumbrance = data['encumbrance']\n self.strength = data['strength']\n self.speed = data['speed']\n self.gold = data['gold']\n self.bodywear = data['bodywear']\n self.footwear = data['footwear']\n self.inventory = data['inventory']\n self.abilities = data['abilities']\n self.status = data['status']\n self.has_mined = data['has_mined']\n self.errors = data['errors']\n self.messages = data['messages']\n self.snitches = data['snitches'] if data['snitches'] else 0\n self.current_room = self.check_room()\n self.world = 'dark' if self.current_room['room_id'] > 499 else 'light'\n self.map = self._read_file('map.txt')\n self.graph = self._read_file('graph.txt')\n\n def _get_status(self):\n r = requests.post(f'{url}/api/adv/status/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'})\n return r.json()\n\n def _read_file(self, filepath):\n if self.world == 'dark':\n filepath = 'dark_' + filepath\n if not os.path.exists(filepath):\n f = open(filepath, 'w+')\n room = self.current_room\n if 'graph' in filepath:\n room = {room['room_id']: {d: '?' for d in room['exits']}}\n self._write_file(filepath, {self.current_room['room_id']: room})\n with open(filepath, 'r') as f:\n data = json.load(f)\n return data\n <function token>\n\n def check_room(self):\n r = requests.get(f'{url}/api/adv/init/', headers={'Authorization':\n f'Token {key}'})\n data = r.json()\n if 'players' in data:\n del data['players']\n return data\n\n def check_self(self, cause=None):\n data = self._get_status()\n cleaned = {**data}\n cleaned['status'].append(\n \"Glasowyn's hands stand Empty and Effervescent, see them filled.\"\n ) if len(cleaned['status']) < 1 else None\n cleaned['world'] = self.world\n cut = ['has_mined', 'errors']\n for k in cut:\n del cleaned[k]\n if cause == 'item pick up':\n ret = f\"\"\" You are now held down by the weight of {cleaned['encumbrance']} Stones.\n Your Experience and equipment Grant you the ability to\n carry {cleaned['strength']} stones before you need to take longer rests.\n Your bag now carries {cleaned['inventory']}\"\"\"\n print(ret +\n f\"\"\"\n Your ghost seems to have the space to carry an additional item if you would like\"\"\"\n if 'carry' in cleaned['abilities'] and len(cleaned[\n 'status']) else ret)\n else:\n print('\\n' + '*' * 22 + ' ' + 'Your Current State' + ' ' + '*' * 22\n )\n for item in cleaned.items():\n print(f'{item[0]}: {item[1]}')\n print('*' * 64 + '\\n')\n self.name = data['name']\n self.cooldown = data['cooldown']\n self.encumbrance = data['encumbrance']\n self.strength = data['strength']\n self.speed = data['speed']\n self.gold = data['gold']\n self.bodywear = data['bodywear']\n self.footwear = data['footwear']\n self.inventory = data['inventory']\n self.abilities = data['abilities']\n self.status = data['status']\n self.has_mined = data['has_mined']\n self.errors = data['errors']\n self.messages = data['messages']\n self.snitches = data['snitches'] if data['snitches'] else 0\n self.map = self._read_file('map.txt')\n self.graph = self._read_file('graph.txt')\n\n def dash(self, direction, num_rooms, room_ids):\n if 'dash' not in self.abilities:\n print(\"Error! You can't dash yet!\")\n return\n time.sleep(self.cooldown)\n curr_id = self.current_room['room_id']\n print('\\n======================================')\n print(f'Dashing {direction} from room {curr_id}...')\n json = {'direction': direction, 'num_rooms': num_rooms,\n 'next_room_ids': room_ids}\n r = requests.post(f'{url}/api/adv/dash/', headers={'Authorization':\n f'Token {key}', 'Content-Type': 'application/json'}, json=json)\n next_room = r.json()\n if 'players' in next_room:\n del next_room['players']\n next_id = next_room['room_id']\n self.map[next_id] = next_room\n self._write_file('map.txt', self.map)\n self.current_room = next_room\n self.cooldown = self.current_room['cooldown']\n if self.world == 'dark' and 'golden snitch' in next_room['items']:\n try:\n self.pick_up_loot('golden snitch')\n except:\n print('Somebody already got that snitch!')\n elif self.world == 'light' and len(next_room['items']):\n for item in next_room['items']:\n self.pick_up_loot(item)\n for message in next_room['messages']:\n print(f'{message}')\n print(f\"Now the player is in {self.current_room['room_id']}\")\n print(f'Cooldown before next action: {self.cooldown} seconds')\n print('======================================\\n')\n\n def travel(self, direction, method='move'):\n time.sleep(self.cooldown)\n curr_id = self.current_room['room_id']\n print('\\n======================================')\n if 'fly' in self.abilities and self.map[str(curr_id)]['terrain'] in [\n 'MOUNTAIN', 'NORMAL']:\n method = 'fly'\n print(f'Flying {direction} from room {curr_id}...')\n else:\n print(f'Walking {direction} from room {curr_id}...')\n if direction not in self.graph[str(curr_id)]:\n print('Error! Not a valid direction from the current room')\n else:\n json = {'direction': direction}\n if self.graph[str(curr_id)][direction] != '?':\n json['next_room_id'] = str(self.graph[str(curr_id)][direction])\n next_room = requests.post(f'{url}/api/adv/{method}/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.current_room = next_room\n self.cooldown = self.current_room['cooldown']\n if self.world != 'dark':\n if len(next_room['items']\n ) > 0 and self.encumbrance < self.strength:\n for item in next_room['items']:\n time.sleep(next_room['cooldown'])\n self.pick_up_loot(item)\n elif 'golden snitch' in next_room['items']:\n self.pick_up_loot('golden snitch')\n if 'players' in next_room:\n del next_room['players']\n next_id = next_room['room_id']\n if str(next_id) not in self.graph:\n print(f'New room! # {next_id}')\n self.graph[str(next_id)] = {e: '?' for e in next_room['exits']}\n self.graph[str(curr_id)][direction] = next_id\n self.graph[str(next_id)][opposite[direction]] = curr_id\n self._write_file('graph.txt', self.graph)\n self.map[next_id] = next_room\n self._write_file('map.txt', self.map)\n for message in next_room['messages']:\n print(f'{message}')\n print(f\"Now the player is in {self.current_room['room_id']}\")\n print(f'Cooldown before next action: {self.cooldown} seconds')\n if len(self.graph) < 500:\n print(\n f'Total number of rooms explored so far: {len(self.graph)}'\n )\n print('======================================\\n')\n\n def get_coin(self):\n time.sleep(self.cooldown)\n data = mine()\n self.cooldown = data['cooldown']\n if len(data['errors']) > 0:\n self.get_coin()\n\n def pick_up_loot(self, item):\n print(f'Looting {item}')\n json = {'name': item}\n if self.encumbrance < self.strength:\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/take/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n time.sleep(self.cooldown)\n self.check_self('item pick up'\n ) if self.world == 'light' else print(' Success!\\n ' +\n req['messages'][0] if len(req['messages']) > 0 else print(\n \"\"\" Oh NO!\n just as quickly as you arrived, the Golden Snitch disappeared to the next room and out of grasp!\"\"\"\n ))\n elif 'carry' in self.abilities:\n if len(self.status) != 0:\n print(\n 'It seems your Bag is full and Glasowyn is already carring something!'\n )\n else:\n req = requests.post(f'{url}/api/adv/carry/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n print(req)\n else:\n print('Your Bag is full!')\n\n def drop_loot(self, item):\n time.sleep(self.cooldown)\n json = {'name': item}\n req = requests.post(f'{url}/api/adv/drop/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n time.sleep(req['cooldown'])\n self.check_self()\n <function token>\n\n def examine(self, item):\n time.sleep(self.cooldown)\n json = {'name': item}\n req = requests.post(f'{url}/api/adv/examine/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n if item == 'WELL':\n if os.path.exists('hint.txt'):\n os.remove('hint.txt')\n desc = req['description']\n instructions = desc.split('\\n')\n for line in instructions[2:]:\n with open('hint.txt', 'a') as f:\n f.write(f'{line}\\n')\n cpu = CPU()\n cpu.load('hint.txt')\n cpu.run()\n if os.path.exists('hint.txt'):\n os.remove('hint.txt')\n limiter = 23 if self.world == 'light' else 24\n return cpu.hint[limiter:]\n else:\n print(req['description'])\n\n def pray(self):\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/pray/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}).json()\n print(req)\n time.sleep(req['cooldown'])\n self.check_self()\n <function token>\n\n def check_balance(self):\n time.sleep(self.cooldown)\n req = requests.get(f'{url}/api/bc/get_balance/', headers={\n 'Authorization': f'Token {key}'}).json()\n self.coins = float(req['messages'][0].split(' ')[5])\n self.cooldown = req['cooldown']\n print(f\"\\n{req['messages'][0]}\\n\")\n\n def transform_coin(self, item):\n time.sleep(self.cooldown)\n self.check_balance()\n json = {'name': item}\n if self.coins > 0 and item in self.inventory:\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/transmogrify/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n print(req)\n self.cooldown = req['cooldown']\n for item in req['items']:\n self.pick_up_loot(item)\n <function token>\n",
"<import token>\n\n\nclass Player:\n\n def __init__(self):\n data = self._get_status()\n time.sleep(data['cooldown'])\n self.name = data['name']\n self.cooldown = data['cooldown']\n self.encumbrance = data['encumbrance']\n self.strength = data['strength']\n self.speed = data['speed']\n self.gold = data['gold']\n self.bodywear = data['bodywear']\n self.footwear = data['footwear']\n self.inventory = data['inventory']\n self.abilities = data['abilities']\n self.status = data['status']\n self.has_mined = data['has_mined']\n self.errors = data['errors']\n self.messages = data['messages']\n self.snitches = data['snitches'] if data['snitches'] else 0\n self.current_room = self.check_room()\n self.world = 'dark' if self.current_room['room_id'] > 499 else 'light'\n self.map = self._read_file('map.txt')\n self.graph = self._read_file('graph.txt')\n\n def _get_status(self):\n r = requests.post(f'{url}/api/adv/status/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'})\n return r.json()\n\n def _read_file(self, filepath):\n if self.world == 'dark':\n filepath = 'dark_' + filepath\n if not os.path.exists(filepath):\n f = open(filepath, 'w+')\n room = self.current_room\n if 'graph' in filepath:\n room = {room['room_id']: {d: '?' for d in room['exits']}}\n self._write_file(filepath, {self.current_room['room_id']: room})\n with open(filepath, 'r') as f:\n data = json.load(f)\n return data\n <function token>\n\n def check_room(self):\n r = requests.get(f'{url}/api/adv/init/', headers={'Authorization':\n f'Token {key}'})\n data = r.json()\n if 'players' in data:\n del data['players']\n return data\n\n def check_self(self, cause=None):\n data = self._get_status()\n cleaned = {**data}\n cleaned['status'].append(\n \"Glasowyn's hands stand Empty and Effervescent, see them filled.\"\n ) if len(cleaned['status']) < 1 else None\n cleaned['world'] = self.world\n cut = ['has_mined', 'errors']\n for k in cut:\n del cleaned[k]\n if cause == 'item pick up':\n ret = f\"\"\" You are now held down by the weight of {cleaned['encumbrance']} Stones.\n Your Experience and equipment Grant you the ability to\n carry {cleaned['strength']} stones before you need to take longer rests.\n Your bag now carries {cleaned['inventory']}\"\"\"\n print(ret +\n f\"\"\"\n Your ghost seems to have the space to carry an additional item if you would like\"\"\"\n if 'carry' in cleaned['abilities'] and len(cleaned[\n 'status']) else ret)\n else:\n print('\\n' + '*' * 22 + ' ' + 'Your Current State' + ' ' + '*' * 22\n )\n for item in cleaned.items():\n print(f'{item[0]}: {item[1]}')\n print('*' * 64 + '\\n')\n self.name = data['name']\n self.cooldown = data['cooldown']\n self.encumbrance = data['encumbrance']\n self.strength = data['strength']\n self.speed = data['speed']\n self.gold = data['gold']\n self.bodywear = data['bodywear']\n self.footwear = data['footwear']\n self.inventory = data['inventory']\n self.abilities = data['abilities']\n self.status = data['status']\n self.has_mined = data['has_mined']\n self.errors = data['errors']\n self.messages = data['messages']\n self.snitches = data['snitches'] if data['snitches'] else 0\n self.map = self._read_file('map.txt')\n self.graph = self._read_file('graph.txt')\n <function token>\n\n def travel(self, direction, method='move'):\n time.sleep(self.cooldown)\n curr_id = self.current_room['room_id']\n print('\\n======================================')\n if 'fly' in self.abilities and self.map[str(curr_id)]['terrain'] in [\n 'MOUNTAIN', 'NORMAL']:\n method = 'fly'\n print(f'Flying {direction} from room {curr_id}...')\n else:\n print(f'Walking {direction} from room {curr_id}...')\n if direction not in self.graph[str(curr_id)]:\n print('Error! Not a valid direction from the current room')\n else:\n json = {'direction': direction}\n if self.graph[str(curr_id)][direction] != '?':\n json['next_room_id'] = str(self.graph[str(curr_id)][direction])\n next_room = requests.post(f'{url}/api/adv/{method}/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.current_room = next_room\n self.cooldown = self.current_room['cooldown']\n if self.world != 'dark':\n if len(next_room['items']\n ) > 0 and self.encumbrance < self.strength:\n for item in next_room['items']:\n time.sleep(next_room['cooldown'])\n self.pick_up_loot(item)\n elif 'golden snitch' in next_room['items']:\n self.pick_up_loot('golden snitch')\n if 'players' in next_room:\n del next_room['players']\n next_id = next_room['room_id']\n if str(next_id) not in self.graph:\n print(f'New room! # {next_id}')\n self.graph[str(next_id)] = {e: '?' for e in next_room['exits']}\n self.graph[str(curr_id)][direction] = next_id\n self.graph[str(next_id)][opposite[direction]] = curr_id\n self._write_file('graph.txt', self.graph)\n self.map[next_id] = next_room\n self._write_file('map.txt', self.map)\n for message in next_room['messages']:\n print(f'{message}')\n print(f\"Now the player is in {self.current_room['room_id']}\")\n print(f'Cooldown before next action: {self.cooldown} seconds')\n if len(self.graph) < 500:\n print(\n f'Total number of rooms explored so far: {len(self.graph)}'\n )\n print('======================================\\n')\n\n def get_coin(self):\n time.sleep(self.cooldown)\n data = mine()\n self.cooldown = data['cooldown']\n if len(data['errors']) > 0:\n self.get_coin()\n\n def pick_up_loot(self, item):\n print(f'Looting {item}')\n json = {'name': item}\n if self.encumbrance < self.strength:\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/take/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n time.sleep(self.cooldown)\n self.check_self('item pick up'\n ) if self.world == 'light' else print(' Success!\\n ' +\n req['messages'][0] if len(req['messages']) > 0 else print(\n \"\"\" Oh NO!\n just as quickly as you arrived, the Golden Snitch disappeared to the next room and out of grasp!\"\"\"\n ))\n elif 'carry' in self.abilities:\n if len(self.status) != 0:\n print(\n 'It seems your Bag is full and Glasowyn is already carring something!'\n )\n else:\n req = requests.post(f'{url}/api/adv/carry/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n print(req)\n else:\n print('Your Bag is full!')\n\n def drop_loot(self, item):\n time.sleep(self.cooldown)\n json = {'name': item}\n req = requests.post(f'{url}/api/adv/drop/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n time.sleep(req['cooldown'])\n self.check_self()\n <function token>\n\n def examine(self, item):\n time.sleep(self.cooldown)\n json = {'name': item}\n req = requests.post(f'{url}/api/adv/examine/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n if item == 'WELL':\n if os.path.exists('hint.txt'):\n os.remove('hint.txt')\n desc = req['description']\n instructions = desc.split('\\n')\n for line in instructions[2:]:\n with open('hint.txt', 'a') as f:\n f.write(f'{line}\\n')\n cpu = CPU()\n cpu.load('hint.txt')\n cpu.run()\n if os.path.exists('hint.txt'):\n os.remove('hint.txt')\n limiter = 23 if self.world == 'light' else 24\n return cpu.hint[limiter:]\n else:\n print(req['description'])\n\n def pray(self):\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/pray/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}).json()\n print(req)\n time.sleep(req['cooldown'])\n self.check_self()\n <function token>\n\n def check_balance(self):\n time.sleep(self.cooldown)\n req = requests.get(f'{url}/api/bc/get_balance/', headers={\n 'Authorization': f'Token {key}'}).json()\n self.coins = float(req['messages'][0].split(' ')[5])\n self.cooldown = req['cooldown']\n print(f\"\\n{req['messages'][0]}\\n\")\n\n def transform_coin(self, item):\n time.sleep(self.cooldown)\n self.check_balance()\n json = {'name': item}\n if self.coins > 0 and item in self.inventory:\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/transmogrify/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n print(req)\n self.cooldown = req['cooldown']\n for item in req['items']:\n self.pick_up_loot(item)\n <function token>\n",
"<import token>\n\n\nclass Player:\n\n def __init__(self):\n data = self._get_status()\n time.sleep(data['cooldown'])\n self.name = data['name']\n self.cooldown = data['cooldown']\n self.encumbrance = data['encumbrance']\n self.strength = data['strength']\n self.speed = data['speed']\n self.gold = data['gold']\n self.bodywear = data['bodywear']\n self.footwear = data['footwear']\n self.inventory = data['inventory']\n self.abilities = data['abilities']\n self.status = data['status']\n self.has_mined = data['has_mined']\n self.errors = data['errors']\n self.messages = data['messages']\n self.snitches = data['snitches'] if data['snitches'] else 0\n self.current_room = self.check_room()\n self.world = 'dark' if self.current_room['room_id'] > 499 else 'light'\n self.map = self._read_file('map.txt')\n self.graph = self._read_file('graph.txt')\n\n def _get_status(self):\n r = requests.post(f'{url}/api/adv/status/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'})\n return r.json()\n\n def _read_file(self, filepath):\n if self.world == 'dark':\n filepath = 'dark_' + filepath\n if not os.path.exists(filepath):\n f = open(filepath, 'w+')\n room = self.current_room\n if 'graph' in filepath:\n room = {room['room_id']: {d: '?' for d in room['exits']}}\n self._write_file(filepath, {self.current_room['room_id']: room})\n with open(filepath, 'r') as f:\n data = json.load(f)\n return data\n <function token>\n\n def check_room(self):\n r = requests.get(f'{url}/api/adv/init/', headers={'Authorization':\n f'Token {key}'})\n data = r.json()\n if 'players' in data:\n del data['players']\n return data\n\n def check_self(self, cause=None):\n data = self._get_status()\n cleaned = {**data}\n cleaned['status'].append(\n \"Glasowyn's hands stand Empty and Effervescent, see them filled.\"\n ) if len(cleaned['status']) < 1 else None\n cleaned['world'] = self.world\n cut = ['has_mined', 'errors']\n for k in cut:\n del cleaned[k]\n if cause == 'item pick up':\n ret = f\"\"\" You are now held down by the weight of {cleaned['encumbrance']} Stones.\n Your Experience and equipment Grant you the ability to\n carry {cleaned['strength']} stones before you need to take longer rests.\n Your bag now carries {cleaned['inventory']}\"\"\"\n print(ret +\n f\"\"\"\n Your ghost seems to have the space to carry an additional item if you would like\"\"\"\n if 'carry' in cleaned['abilities'] and len(cleaned[\n 'status']) else ret)\n else:\n print('\\n' + '*' * 22 + ' ' + 'Your Current State' + ' ' + '*' * 22\n )\n for item in cleaned.items():\n print(f'{item[0]}: {item[1]}')\n print('*' * 64 + '\\n')\n self.name = data['name']\n self.cooldown = data['cooldown']\n self.encumbrance = data['encumbrance']\n self.strength = data['strength']\n self.speed = data['speed']\n self.gold = data['gold']\n self.bodywear = data['bodywear']\n self.footwear = data['footwear']\n self.inventory = data['inventory']\n self.abilities = data['abilities']\n self.status = data['status']\n self.has_mined = data['has_mined']\n self.errors = data['errors']\n self.messages = data['messages']\n self.snitches = data['snitches'] if data['snitches'] else 0\n self.map = self._read_file('map.txt')\n self.graph = self._read_file('graph.txt')\n <function token>\n\n def travel(self, direction, method='move'):\n time.sleep(self.cooldown)\n curr_id = self.current_room['room_id']\n print('\\n======================================')\n if 'fly' in self.abilities and self.map[str(curr_id)]['terrain'] in [\n 'MOUNTAIN', 'NORMAL']:\n method = 'fly'\n print(f'Flying {direction} from room {curr_id}...')\n else:\n print(f'Walking {direction} from room {curr_id}...')\n if direction not in self.graph[str(curr_id)]:\n print('Error! Not a valid direction from the current room')\n else:\n json = {'direction': direction}\n if self.graph[str(curr_id)][direction] != '?':\n json['next_room_id'] = str(self.graph[str(curr_id)][direction])\n next_room = requests.post(f'{url}/api/adv/{method}/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.current_room = next_room\n self.cooldown = self.current_room['cooldown']\n if self.world != 'dark':\n if len(next_room['items']\n ) > 0 and self.encumbrance < self.strength:\n for item in next_room['items']:\n time.sleep(next_room['cooldown'])\n self.pick_up_loot(item)\n elif 'golden snitch' in next_room['items']:\n self.pick_up_loot('golden snitch')\n if 'players' in next_room:\n del next_room['players']\n next_id = next_room['room_id']\n if str(next_id) not in self.graph:\n print(f'New room! # {next_id}')\n self.graph[str(next_id)] = {e: '?' for e in next_room['exits']}\n self.graph[str(curr_id)][direction] = next_id\n self.graph[str(next_id)][opposite[direction]] = curr_id\n self._write_file('graph.txt', self.graph)\n self.map[next_id] = next_room\n self._write_file('map.txt', self.map)\n for message in next_room['messages']:\n print(f'{message}')\n print(f\"Now the player is in {self.current_room['room_id']}\")\n print(f'Cooldown before next action: {self.cooldown} seconds')\n if len(self.graph) < 500:\n print(\n f'Total number of rooms explored so far: {len(self.graph)}'\n )\n print('======================================\\n')\n\n def get_coin(self):\n time.sleep(self.cooldown)\n data = mine()\n self.cooldown = data['cooldown']\n if len(data['errors']) > 0:\n self.get_coin()\n\n def pick_up_loot(self, item):\n print(f'Looting {item}')\n json = {'name': item}\n if self.encumbrance < self.strength:\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/take/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n time.sleep(self.cooldown)\n self.check_self('item pick up'\n ) if self.world == 'light' else print(' Success!\\n ' +\n req['messages'][0] if len(req['messages']) > 0 else print(\n \"\"\" Oh NO!\n just as quickly as you arrived, the Golden Snitch disappeared to the next room and out of grasp!\"\"\"\n ))\n elif 'carry' in self.abilities:\n if len(self.status) != 0:\n print(\n 'It seems your Bag is full and Glasowyn is already carring something!'\n )\n else:\n req = requests.post(f'{url}/api/adv/carry/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n print(req)\n else:\n print('Your Bag is full!')\n\n def drop_loot(self, item):\n time.sleep(self.cooldown)\n json = {'name': item}\n req = requests.post(f'{url}/api/adv/drop/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n time.sleep(req['cooldown'])\n self.check_self()\n <function token>\n\n def examine(self, item):\n time.sleep(self.cooldown)\n json = {'name': item}\n req = requests.post(f'{url}/api/adv/examine/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n if item == 'WELL':\n if os.path.exists('hint.txt'):\n os.remove('hint.txt')\n desc = req['description']\n instructions = desc.split('\\n')\n for line in instructions[2:]:\n with open('hint.txt', 'a') as f:\n f.write(f'{line}\\n')\n cpu = CPU()\n cpu.load('hint.txt')\n cpu.run()\n if os.path.exists('hint.txt'):\n os.remove('hint.txt')\n limiter = 23 if self.world == 'light' else 24\n return cpu.hint[limiter:]\n else:\n print(req['description'])\n\n def pray(self):\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/pray/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}).json()\n print(req)\n time.sleep(req['cooldown'])\n self.check_self()\n <function token>\n <function token>\n\n def transform_coin(self, item):\n time.sleep(self.cooldown)\n self.check_balance()\n json = {'name': item}\n if self.coins > 0 and item in self.inventory:\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/transmogrify/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n print(req)\n self.cooldown = req['cooldown']\n for item in req['items']:\n self.pick_up_loot(item)\n <function token>\n",
"<import token>\n\n\nclass Player:\n\n def __init__(self):\n data = self._get_status()\n time.sleep(data['cooldown'])\n self.name = data['name']\n self.cooldown = data['cooldown']\n self.encumbrance = data['encumbrance']\n self.strength = data['strength']\n self.speed = data['speed']\n self.gold = data['gold']\n self.bodywear = data['bodywear']\n self.footwear = data['footwear']\n self.inventory = data['inventory']\n self.abilities = data['abilities']\n self.status = data['status']\n self.has_mined = data['has_mined']\n self.errors = data['errors']\n self.messages = data['messages']\n self.snitches = data['snitches'] if data['snitches'] else 0\n self.current_room = self.check_room()\n self.world = 'dark' if self.current_room['room_id'] > 499 else 'light'\n self.map = self._read_file('map.txt')\n self.graph = self._read_file('graph.txt')\n\n def _get_status(self):\n r = requests.post(f'{url}/api/adv/status/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'})\n return r.json()\n\n def _read_file(self, filepath):\n if self.world == 'dark':\n filepath = 'dark_' + filepath\n if not os.path.exists(filepath):\n f = open(filepath, 'w+')\n room = self.current_room\n if 'graph' in filepath:\n room = {room['room_id']: {d: '?' for d in room['exits']}}\n self._write_file(filepath, {self.current_room['room_id']: room})\n with open(filepath, 'r') as f:\n data = json.load(f)\n return data\n <function token>\n\n def check_room(self):\n r = requests.get(f'{url}/api/adv/init/', headers={'Authorization':\n f'Token {key}'})\n data = r.json()\n if 'players' in data:\n del data['players']\n return data\n\n def check_self(self, cause=None):\n data = self._get_status()\n cleaned = {**data}\n cleaned['status'].append(\n \"Glasowyn's hands stand Empty and Effervescent, see them filled.\"\n ) if len(cleaned['status']) < 1 else None\n cleaned['world'] = self.world\n cut = ['has_mined', 'errors']\n for k in cut:\n del cleaned[k]\n if cause == 'item pick up':\n ret = f\"\"\" You are now held down by the weight of {cleaned['encumbrance']} Stones.\n Your Experience and equipment Grant you the ability to\n carry {cleaned['strength']} stones before you need to take longer rests.\n Your bag now carries {cleaned['inventory']}\"\"\"\n print(ret +\n f\"\"\"\n Your ghost seems to have the space to carry an additional item if you would like\"\"\"\n if 'carry' in cleaned['abilities'] and len(cleaned[\n 'status']) else ret)\n else:\n print('\\n' + '*' * 22 + ' ' + 'Your Current State' + ' ' + '*' * 22\n )\n for item in cleaned.items():\n print(f'{item[0]}: {item[1]}')\n print('*' * 64 + '\\n')\n self.name = data['name']\n self.cooldown = data['cooldown']\n self.encumbrance = data['encumbrance']\n self.strength = data['strength']\n self.speed = data['speed']\n self.gold = data['gold']\n self.bodywear = data['bodywear']\n self.footwear = data['footwear']\n self.inventory = data['inventory']\n self.abilities = data['abilities']\n self.status = data['status']\n self.has_mined = data['has_mined']\n self.errors = data['errors']\n self.messages = data['messages']\n self.snitches = data['snitches'] if data['snitches'] else 0\n self.map = self._read_file('map.txt')\n self.graph = self._read_file('graph.txt')\n <function token>\n\n def travel(self, direction, method='move'):\n time.sleep(self.cooldown)\n curr_id = self.current_room['room_id']\n print('\\n======================================')\n if 'fly' in self.abilities and self.map[str(curr_id)]['terrain'] in [\n 'MOUNTAIN', 'NORMAL']:\n method = 'fly'\n print(f'Flying {direction} from room {curr_id}...')\n else:\n print(f'Walking {direction} from room {curr_id}...')\n if direction not in self.graph[str(curr_id)]:\n print('Error! Not a valid direction from the current room')\n else:\n json = {'direction': direction}\n if self.graph[str(curr_id)][direction] != '?':\n json['next_room_id'] = str(self.graph[str(curr_id)][direction])\n next_room = requests.post(f'{url}/api/adv/{method}/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.current_room = next_room\n self.cooldown = self.current_room['cooldown']\n if self.world != 'dark':\n if len(next_room['items']\n ) > 0 and self.encumbrance < self.strength:\n for item in next_room['items']:\n time.sleep(next_room['cooldown'])\n self.pick_up_loot(item)\n elif 'golden snitch' in next_room['items']:\n self.pick_up_loot('golden snitch')\n if 'players' in next_room:\n del next_room['players']\n next_id = next_room['room_id']\n if str(next_id) not in self.graph:\n print(f'New room! # {next_id}')\n self.graph[str(next_id)] = {e: '?' for e in next_room['exits']}\n self.graph[str(curr_id)][direction] = next_id\n self.graph[str(next_id)][opposite[direction]] = curr_id\n self._write_file('graph.txt', self.graph)\n self.map[next_id] = next_room\n self._write_file('map.txt', self.map)\n for message in next_room['messages']:\n print(f'{message}')\n print(f\"Now the player is in {self.current_room['room_id']}\")\n print(f'Cooldown before next action: {self.cooldown} seconds')\n if len(self.graph) < 500:\n print(\n f'Total number of rooms explored so far: {len(self.graph)}'\n )\n print('======================================\\n')\n <function token>\n\n def pick_up_loot(self, item):\n print(f'Looting {item}')\n json = {'name': item}\n if self.encumbrance < self.strength:\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/take/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n time.sleep(self.cooldown)\n self.check_self('item pick up'\n ) if self.world == 'light' else print(' Success!\\n ' +\n req['messages'][0] if len(req['messages']) > 0 else print(\n \"\"\" Oh NO!\n just as quickly as you arrived, the Golden Snitch disappeared to the next room and out of grasp!\"\"\"\n ))\n elif 'carry' in self.abilities:\n if len(self.status) != 0:\n print(\n 'It seems your Bag is full and Glasowyn is already carring something!'\n )\n else:\n req = requests.post(f'{url}/api/adv/carry/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n print(req)\n else:\n print('Your Bag is full!')\n\n def drop_loot(self, item):\n time.sleep(self.cooldown)\n json = {'name': item}\n req = requests.post(f'{url}/api/adv/drop/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n time.sleep(req['cooldown'])\n self.check_self()\n <function token>\n\n def examine(self, item):\n time.sleep(self.cooldown)\n json = {'name': item}\n req = requests.post(f'{url}/api/adv/examine/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n if item == 'WELL':\n if os.path.exists('hint.txt'):\n os.remove('hint.txt')\n desc = req['description']\n instructions = desc.split('\\n')\n for line in instructions[2:]:\n with open('hint.txt', 'a') as f:\n f.write(f'{line}\\n')\n cpu = CPU()\n cpu.load('hint.txt')\n cpu.run()\n if os.path.exists('hint.txt'):\n os.remove('hint.txt')\n limiter = 23 if self.world == 'light' else 24\n return cpu.hint[limiter:]\n else:\n print(req['description'])\n\n def pray(self):\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/pray/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}).json()\n print(req)\n time.sleep(req['cooldown'])\n self.check_self()\n <function token>\n <function token>\n\n def transform_coin(self, item):\n time.sleep(self.cooldown)\n self.check_balance()\n json = {'name': item}\n if self.coins > 0 and item in self.inventory:\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/transmogrify/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n print(req)\n self.cooldown = req['cooldown']\n for item in req['items']:\n self.pick_up_loot(item)\n <function token>\n",
"<import token>\n\n\nclass Player:\n\n def __init__(self):\n data = self._get_status()\n time.sleep(data['cooldown'])\n self.name = data['name']\n self.cooldown = data['cooldown']\n self.encumbrance = data['encumbrance']\n self.strength = data['strength']\n self.speed = data['speed']\n self.gold = data['gold']\n self.bodywear = data['bodywear']\n self.footwear = data['footwear']\n self.inventory = data['inventory']\n self.abilities = data['abilities']\n self.status = data['status']\n self.has_mined = data['has_mined']\n self.errors = data['errors']\n self.messages = data['messages']\n self.snitches = data['snitches'] if data['snitches'] else 0\n self.current_room = self.check_room()\n self.world = 'dark' if self.current_room['room_id'] > 499 else 'light'\n self.map = self._read_file('map.txt')\n self.graph = self._read_file('graph.txt')\n\n def _get_status(self):\n r = requests.post(f'{url}/api/adv/status/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'})\n return r.json()\n\n def _read_file(self, filepath):\n if self.world == 'dark':\n filepath = 'dark_' + filepath\n if not os.path.exists(filepath):\n f = open(filepath, 'w+')\n room = self.current_room\n if 'graph' in filepath:\n room = {room['room_id']: {d: '?' for d in room['exits']}}\n self._write_file(filepath, {self.current_room['room_id']: room})\n with open(filepath, 'r') as f:\n data = json.load(f)\n return data\n <function token>\n\n def check_room(self):\n r = requests.get(f'{url}/api/adv/init/', headers={'Authorization':\n f'Token {key}'})\n data = r.json()\n if 'players' in data:\n del data['players']\n return data\n\n def check_self(self, cause=None):\n data = self._get_status()\n cleaned = {**data}\n cleaned['status'].append(\n \"Glasowyn's hands stand Empty and Effervescent, see them filled.\"\n ) if len(cleaned['status']) < 1 else None\n cleaned['world'] = self.world\n cut = ['has_mined', 'errors']\n for k in cut:\n del cleaned[k]\n if cause == 'item pick up':\n ret = f\"\"\" You are now held down by the weight of {cleaned['encumbrance']} Stones.\n Your Experience and equipment Grant you the ability to\n carry {cleaned['strength']} stones before you need to take longer rests.\n Your bag now carries {cleaned['inventory']}\"\"\"\n print(ret +\n f\"\"\"\n Your ghost seems to have the space to carry an additional item if you would like\"\"\"\n if 'carry' in cleaned['abilities'] and len(cleaned[\n 'status']) else ret)\n else:\n print('\\n' + '*' * 22 + ' ' + 'Your Current State' + ' ' + '*' * 22\n )\n for item in cleaned.items():\n print(f'{item[0]}: {item[1]}')\n print('*' * 64 + '\\n')\n self.name = data['name']\n self.cooldown = data['cooldown']\n self.encumbrance = data['encumbrance']\n self.strength = data['strength']\n self.speed = data['speed']\n self.gold = data['gold']\n self.bodywear = data['bodywear']\n self.footwear = data['footwear']\n self.inventory = data['inventory']\n self.abilities = data['abilities']\n self.status = data['status']\n self.has_mined = data['has_mined']\n self.errors = data['errors']\n self.messages = data['messages']\n self.snitches = data['snitches'] if data['snitches'] else 0\n self.map = self._read_file('map.txt')\n self.graph = self._read_file('graph.txt')\n <function token>\n <function token>\n <function token>\n\n def pick_up_loot(self, item):\n print(f'Looting {item}')\n json = {'name': item}\n if self.encumbrance < self.strength:\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/take/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n time.sleep(self.cooldown)\n self.check_self('item pick up'\n ) if self.world == 'light' else print(' Success!\\n ' +\n req['messages'][0] if len(req['messages']) > 0 else print(\n \"\"\" Oh NO!\n just as quickly as you arrived, the Golden Snitch disappeared to the next room and out of grasp!\"\"\"\n ))\n elif 'carry' in self.abilities:\n if len(self.status) != 0:\n print(\n 'It seems your Bag is full and Glasowyn is already carring something!'\n )\n else:\n req = requests.post(f'{url}/api/adv/carry/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n print(req)\n else:\n print('Your Bag is full!')\n\n def drop_loot(self, item):\n time.sleep(self.cooldown)\n json = {'name': item}\n req = requests.post(f'{url}/api/adv/drop/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n time.sleep(req['cooldown'])\n self.check_self()\n <function token>\n\n def examine(self, item):\n time.sleep(self.cooldown)\n json = {'name': item}\n req = requests.post(f'{url}/api/adv/examine/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n if item == 'WELL':\n if os.path.exists('hint.txt'):\n os.remove('hint.txt')\n desc = req['description']\n instructions = desc.split('\\n')\n for line in instructions[2:]:\n with open('hint.txt', 'a') as f:\n f.write(f'{line}\\n')\n cpu = CPU()\n cpu.load('hint.txt')\n cpu.run()\n if os.path.exists('hint.txt'):\n os.remove('hint.txt')\n limiter = 23 if self.world == 'light' else 24\n return cpu.hint[limiter:]\n else:\n print(req['description'])\n\n def pray(self):\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/pray/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}).json()\n print(req)\n time.sleep(req['cooldown'])\n self.check_self()\n <function token>\n <function token>\n\n def transform_coin(self, item):\n time.sleep(self.cooldown)\n self.check_balance()\n json = {'name': item}\n if self.coins > 0 and item in self.inventory:\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/transmogrify/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n print(req)\n self.cooldown = req['cooldown']\n for item in req['items']:\n self.pick_up_loot(item)\n <function token>\n",
"<import token>\n\n\nclass Player:\n\n def __init__(self):\n data = self._get_status()\n time.sleep(data['cooldown'])\n self.name = data['name']\n self.cooldown = data['cooldown']\n self.encumbrance = data['encumbrance']\n self.strength = data['strength']\n self.speed = data['speed']\n self.gold = data['gold']\n self.bodywear = data['bodywear']\n self.footwear = data['footwear']\n self.inventory = data['inventory']\n self.abilities = data['abilities']\n self.status = data['status']\n self.has_mined = data['has_mined']\n self.errors = data['errors']\n self.messages = data['messages']\n self.snitches = data['snitches'] if data['snitches'] else 0\n self.current_room = self.check_room()\n self.world = 'dark' if self.current_room['room_id'] > 499 else 'light'\n self.map = self._read_file('map.txt')\n self.graph = self._read_file('graph.txt')\n\n def _get_status(self):\n r = requests.post(f'{url}/api/adv/status/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'})\n return r.json()\n\n def _read_file(self, filepath):\n if self.world == 'dark':\n filepath = 'dark_' + filepath\n if not os.path.exists(filepath):\n f = open(filepath, 'w+')\n room = self.current_room\n if 'graph' in filepath:\n room = {room['room_id']: {d: '?' for d in room['exits']}}\n self._write_file(filepath, {self.current_room['room_id']: room})\n with open(filepath, 'r') as f:\n data = json.load(f)\n return data\n <function token>\n\n def check_room(self):\n r = requests.get(f'{url}/api/adv/init/', headers={'Authorization':\n f'Token {key}'})\n data = r.json()\n if 'players' in data:\n del data['players']\n return data\n\n def check_self(self, cause=None):\n data = self._get_status()\n cleaned = {**data}\n cleaned['status'].append(\n \"Glasowyn's hands stand Empty and Effervescent, see them filled.\"\n ) if len(cleaned['status']) < 1 else None\n cleaned['world'] = self.world\n cut = ['has_mined', 'errors']\n for k in cut:\n del cleaned[k]\n if cause == 'item pick up':\n ret = f\"\"\" You are now held down by the weight of {cleaned['encumbrance']} Stones.\n Your Experience and equipment Grant you the ability to\n carry {cleaned['strength']} stones before you need to take longer rests.\n Your bag now carries {cleaned['inventory']}\"\"\"\n print(ret +\n f\"\"\"\n Your ghost seems to have the space to carry an additional item if you would like\"\"\"\n if 'carry' in cleaned['abilities'] and len(cleaned[\n 'status']) else ret)\n else:\n print('\\n' + '*' * 22 + ' ' + 'Your Current State' + ' ' + '*' * 22\n )\n for item in cleaned.items():\n print(f'{item[0]}: {item[1]}')\n print('*' * 64 + '\\n')\n self.name = data['name']\n self.cooldown = data['cooldown']\n self.encumbrance = data['encumbrance']\n self.strength = data['strength']\n self.speed = data['speed']\n self.gold = data['gold']\n self.bodywear = data['bodywear']\n self.footwear = data['footwear']\n self.inventory = data['inventory']\n self.abilities = data['abilities']\n self.status = data['status']\n self.has_mined = data['has_mined']\n self.errors = data['errors']\n self.messages = data['messages']\n self.snitches = data['snitches'] if data['snitches'] else 0\n self.map = self._read_file('map.txt')\n self.graph = self._read_file('graph.txt')\n <function token>\n <function token>\n <function token>\n\n def pick_up_loot(self, item):\n print(f'Looting {item}')\n json = {'name': item}\n if self.encumbrance < self.strength:\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/take/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n time.sleep(self.cooldown)\n self.check_self('item pick up'\n ) if self.world == 'light' else print(' Success!\\n ' +\n req['messages'][0] if len(req['messages']) > 0 else print(\n \"\"\" Oh NO!\n just as quickly as you arrived, the Golden Snitch disappeared to the next room and out of grasp!\"\"\"\n ))\n elif 'carry' in self.abilities:\n if len(self.status) != 0:\n print(\n 'It seems your Bag is full and Glasowyn is already carring something!'\n )\n else:\n req = requests.post(f'{url}/api/adv/carry/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n print(req)\n else:\n print('Your Bag is full!')\n\n def drop_loot(self, item):\n time.sleep(self.cooldown)\n json = {'name': item}\n req = requests.post(f'{url}/api/adv/drop/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n time.sleep(req['cooldown'])\n self.check_self()\n <function token>\n\n def examine(self, item):\n time.sleep(self.cooldown)\n json = {'name': item}\n req = requests.post(f'{url}/api/adv/examine/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n if item == 'WELL':\n if os.path.exists('hint.txt'):\n os.remove('hint.txt')\n desc = req['description']\n instructions = desc.split('\\n')\n for line in instructions[2:]:\n with open('hint.txt', 'a') as f:\n f.write(f'{line}\\n')\n cpu = CPU()\n cpu.load('hint.txt')\n cpu.run()\n if os.path.exists('hint.txt'):\n os.remove('hint.txt')\n limiter = 23 if self.world == 'light' else 24\n return cpu.hint[limiter:]\n else:\n print(req['description'])\n\n def pray(self):\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/pray/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}).json()\n print(req)\n time.sleep(req['cooldown'])\n self.check_self()\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n\n\nclass Player:\n\n def __init__(self):\n data = self._get_status()\n time.sleep(data['cooldown'])\n self.name = data['name']\n self.cooldown = data['cooldown']\n self.encumbrance = data['encumbrance']\n self.strength = data['strength']\n self.speed = data['speed']\n self.gold = data['gold']\n self.bodywear = data['bodywear']\n self.footwear = data['footwear']\n self.inventory = data['inventory']\n self.abilities = data['abilities']\n self.status = data['status']\n self.has_mined = data['has_mined']\n self.errors = data['errors']\n self.messages = data['messages']\n self.snitches = data['snitches'] if data['snitches'] else 0\n self.current_room = self.check_room()\n self.world = 'dark' if self.current_room['room_id'] > 499 else 'light'\n self.map = self._read_file('map.txt')\n self.graph = self._read_file('graph.txt')\n\n def _get_status(self):\n r = requests.post(f'{url}/api/adv/status/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'})\n return r.json()\n\n def _read_file(self, filepath):\n if self.world == 'dark':\n filepath = 'dark_' + filepath\n if not os.path.exists(filepath):\n f = open(filepath, 'w+')\n room = self.current_room\n if 'graph' in filepath:\n room = {room['room_id']: {d: '?' for d in room['exits']}}\n self._write_file(filepath, {self.current_room['room_id']: room})\n with open(filepath, 'r') as f:\n data = json.load(f)\n return data\n <function token>\n\n def check_room(self):\n r = requests.get(f'{url}/api/adv/init/', headers={'Authorization':\n f'Token {key}'})\n data = r.json()\n if 'players' in data:\n del data['players']\n return data\n\n def check_self(self, cause=None):\n data = self._get_status()\n cleaned = {**data}\n cleaned['status'].append(\n \"Glasowyn's hands stand Empty and Effervescent, see them filled.\"\n ) if len(cleaned['status']) < 1 else None\n cleaned['world'] = self.world\n cut = ['has_mined', 'errors']\n for k in cut:\n del cleaned[k]\n if cause == 'item pick up':\n ret = f\"\"\" You are now held down by the weight of {cleaned['encumbrance']} Stones.\n Your Experience and equipment Grant you the ability to\n carry {cleaned['strength']} stones before you need to take longer rests.\n Your bag now carries {cleaned['inventory']}\"\"\"\n print(ret +\n f\"\"\"\n Your ghost seems to have the space to carry an additional item if you would like\"\"\"\n if 'carry' in cleaned['abilities'] and len(cleaned[\n 'status']) else ret)\n else:\n print('\\n' + '*' * 22 + ' ' + 'Your Current State' + ' ' + '*' * 22\n )\n for item in cleaned.items():\n print(f'{item[0]}: {item[1]}')\n print('*' * 64 + '\\n')\n self.name = data['name']\n self.cooldown = data['cooldown']\n self.encumbrance = data['encumbrance']\n self.strength = data['strength']\n self.speed = data['speed']\n self.gold = data['gold']\n self.bodywear = data['bodywear']\n self.footwear = data['footwear']\n self.inventory = data['inventory']\n self.abilities = data['abilities']\n self.status = data['status']\n self.has_mined = data['has_mined']\n self.errors = data['errors']\n self.messages = data['messages']\n self.snitches = data['snitches'] if data['snitches'] else 0\n self.map = self._read_file('map.txt')\n self.graph = self._read_file('graph.txt')\n <function token>\n <function token>\n <function token>\n <function token>\n\n def drop_loot(self, item):\n time.sleep(self.cooldown)\n json = {'name': item}\n req = requests.post(f'{url}/api/adv/drop/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n time.sleep(req['cooldown'])\n self.check_self()\n <function token>\n\n def examine(self, item):\n time.sleep(self.cooldown)\n json = {'name': item}\n req = requests.post(f'{url}/api/adv/examine/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n if item == 'WELL':\n if os.path.exists('hint.txt'):\n os.remove('hint.txt')\n desc = req['description']\n instructions = desc.split('\\n')\n for line in instructions[2:]:\n with open('hint.txt', 'a') as f:\n f.write(f'{line}\\n')\n cpu = CPU()\n cpu.load('hint.txt')\n cpu.run()\n if os.path.exists('hint.txt'):\n os.remove('hint.txt')\n limiter = 23 if self.world == 'light' else 24\n return cpu.hint[limiter:]\n else:\n print(req['description'])\n\n def pray(self):\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/pray/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}).json()\n print(req)\n time.sleep(req['cooldown'])\n self.check_self()\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n\n\nclass Player:\n\n def __init__(self):\n data = self._get_status()\n time.sleep(data['cooldown'])\n self.name = data['name']\n self.cooldown = data['cooldown']\n self.encumbrance = data['encumbrance']\n self.strength = data['strength']\n self.speed = data['speed']\n self.gold = data['gold']\n self.bodywear = data['bodywear']\n self.footwear = data['footwear']\n self.inventory = data['inventory']\n self.abilities = data['abilities']\n self.status = data['status']\n self.has_mined = data['has_mined']\n self.errors = data['errors']\n self.messages = data['messages']\n self.snitches = data['snitches'] if data['snitches'] else 0\n self.current_room = self.check_room()\n self.world = 'dark' if self.current_room['room_id'] > 499 else 'light'\n self.map = self._read_file('map.txt')\n self.graph = self._read_file('graph.txt')\n\n def _get_status(self):\n r = requests.post(f'{url}/api/adv/status/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'})\n return r.json()\n <function token>\n <function token>\n\n def check_room(self):\n r = requests.get(f'{url}/api/adv/init/', headers={'Authorization':\n f'Token {key}'})\n data = r.json()\n if 'players' in data:\n del data['players']\n return data\n\n def check_self(self, cause=None):\n data = self._get_status()\n cleaned = {**data}\n cleaned['status'].append(\n \"Glasowyn's hands stand Empty and Effervescent, see them filled.\"\n ) if len(cleaned['status']) < 1 else None\n cleaned['world'] = self.world\n cut = ['has_mined', 'errors']\n for k in cut:\n del cleaned[k]\n if cause == 'item pick up':\n ret = f\"\"\" You are now held down by the weight of {cleaned['encumbrance']} Stones.\n Your Experience and equipment Grant you the ability to\n carry {cleaned['strength']} stones before you need to take longer rests.\n Your bag now carries {cleaned['inventory']}\"\"\"\n print(ret +\n f\"\"\"\n Your ghost seems to have the space to carry an additional item if you would like\"\"\"\n if 'carry' in cleaned['abilities'] and len(cleaned[\n 'status']) else ret)\n else:\n print('\\n' + '*' * 22 + ' ' + 'Your Current State' + ' ' + '*' * 22\n )\n for item in cleaned.items():\n print(f'{item[0]}: {item[1]}')\n print('*' * 64 + '\\n')\n self.name = data['name']\n self.cooldown = data['cooldown']\n self.encumbrance = data['encumbrance']\n self.strength = data['strength']\n self.speed = data['speed']\n self.gold = data['gold']\n self.bodywear = data['bodywear']\n self.footwear = data['footwear']\n self.inventory = data['inventory']\n self.abilities = data['abilities']\n self.status = data['status']\n self.has_mined = data['has_mined']\n self.errors = data['errors']\n self.messages = data['messages']\n self.snitches = data['snitches'] if data['snitches'] else 0\n self.map = self._read_file('map.txt')\n self.graph = self._read_file('graph.txt')\n <function token>\n <function token>\n <function token>\n <function token>\n\n def drop_loot(self, item):\n time.sleep(self.cooldown)\n json = {'name': item}\n req = requests.post(f'{url}/api/adv/drop/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n time.sleep(req['cooldown'])\n self.check_self()\n <function token>\n\n def examine(self, item):\n time.sleep(self.cooldown)\n json = {'name': item}\n req = requests.post(f'{url}/api/adv/examine/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n if item == 'WELL':\n if os.path.exists('hint.txt'):\n os.remove('hint.txt')\n desc = req['description']\n instructions = desc.split('\\n')\n for line in instructions[2:]:\n with open('hint.txt', 'a') as f:\n f.write(f'{line}\\n')\n cpu = CPU()\n cpu.load('hint.txt')\n cpu.run()\n if os.path.exists('hint.txt'):\n os.remove('hint.txt')\n limiter = 23 if self.world == 'light' else 24\n return cpu.hint[limiter:]\n else:\n print(req['description'])\n\n def pray(self):\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/pray/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}).json()\n print(req)\n time.sleep(req['cooldown'])\n self.check_self()\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n\n\nclass Player:\n\n def __init__(self):\n data = self._get_status()\n time.sleep(data['cooldown'])\n self.name = data['name']\n self.cooldown = data['cooldown']\n self.encumbrance = data['encumbrance']\n self.strength = data['strength']\n self.speed = data['speed']\n self.gold = data['gold']\n self.bodywear = data['bodywear']\n self.footwear = data['footwear']\n self.inventory = data['inventory']\n self.abilities = data['abilities']\n self.status = data['status']\n self.has_mined = data['has_mined']\n self.errors = data['errors']\n self.messages = data['messages']\n self.snitches = data['snitches'] if data['snitches'] else 0\n self.current_room = self.check_room()\n self.world = 'dark' if self.current_room['room_id'] > 499 else 'light'\n self.map = self._read_file('map.txt')\n self.graph = self._read_file('graph.txt')\n <function token>\n <function token>\n <function token>\n\n def check_room(self):\n r = requests.get(f'{url}/api/adv/init/', headers={'Authorization':\n f'Token {key}'})\n data = r.json()\n if 'players' in data:\n del data['players']\n return data\n\n def check_self(self, cause=None):\n data = self._get_status()\n cleaned = {**data}\n cleaned['status'].append(\n \"Glasowyn's hands stand Empty and Effervescent, see them filled.\"\n ) if len(cleaned['status']) < 1 else None\n cleaned['world'] = self.world\n cut = ['has_mined', 'errors']\n for k in cut:\n del cleaned[k]\n if cause == 'item pick up':\n ret = f\"\"\" You are now held down by the weight of {cleaned['encumbrance']} Stones.\n Your Experience and equipment Grant you the ability to\n carry {cleaned['strength']} stones before you need to take longer rests.\n Your bag now carries {cleaned['inventory']}\"\"\"\n print(ret +\n f\"\"\"\n Your ghost seems to have the space to carry an additional item if you would like\"\"\"\n if 'carry' in cleaned['abilities'] and len(cleaned[\n 'status']) else ret)\n else:\n print('\\n' + '*' * 22 + ' ' + 'Your Current State' + ' ' + '*' * 22\n )\n for item in cleaned.items():\n print(f'{item[0]}: {item[1]}')\n print('*' * 64 + '\\n')\n self.name = data['name']\n self.cooldown = data['cooldown']\n self.encumbrance = data['encumbrance']\n self.strength = data['strength']\n self.speed = data['speed']\n self.gold = data['gold']\n self.bodywear = data['bodywear']\n self.footwear = data['footwear']\n self.inventory = data['inventory']\n self.abilities = data['abilities']\n self.status = data['status']\n self.has_mined = data['has_mined']\n self.errors = data['errors']\n self.messages = data['messages']\n self.snitches = data['snitches'] if data['snitches'] else 0\n self.map = self._read_file('map.txt')\n self.graph = self._read_file('graph.txt')\n <function token>\n <function token>\n <function token>\n <function token>\n\n def drop_loot(self, item):\n time.sleep(self.cooldown)\n json = {'name': item}\n req = requests.post(f'{url}/api/adv/drop/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n time.sleep(req['cooldown'])\n self.check_self()\n <function token>\n\n def examine(self, item):\n time.sleep(self.cooldown)\n json = {'name': item}\n req = requests.post(f'{url}/api/adv/examine/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n if item == 'WELL':\n if os.path.exists('hint.txt'):\n os.remove('hint.txt')\n desc = req['description']\n instructions = desc.split('\\n')\n for line in instructions[2:]:\n with open('hint.txt', 'a') as f:\n f.write(f'{line}\\n')\n cpu = CPU()\n cpu.load('hint.txt')\n cpu.run()\n if os.path.exists('hint.txt'):\n os.remove('hint.txt')\n limiter = 23 if self.world == 'light' else 24\n return cpu.hint[limiter:]\n else:\n print(req['description'])\n\n def pray(self):\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/pray/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}).json()\n print(req)\n time.sleep(req['cooldown'])\n self.check_self()\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n\n\nclass Player:\n\n def __init__(self):\n data = self._get_status()\n time.sleep(data['cooldown'])\n self.name = data['name']\n self.cooldown = data['cooldown']\n self.encumbrance = data['encumbrance']\n self.strength = data['strength']\n self.speed = data['speed']\n self.gold = data['gold']\n self.bodywear = data['bodywear']\n self.footwear = data['footwear']\n self.inventory = data['inventory']\n self.abilities = data['abilities']\n self.status = data['status']\n self.has_mined = data['has_mined']\n self.errors = data['errors']\n self.messages = data['messages']\n self.snitches = data['snitches'] if data['snitches'] else 0\n self.current_room = self.check_room()\n self.world = 'dark' if self.current_room['room_id'] > 499 else 'light'\n self.map = self._read_file('map.txt')\n self.graph = self._read_file('graph.txt')\n <function token>\n <function token>\n <function token>\n <function token>\n\n def check_self(self, cause=None):\n data = self._get_status()\n cleaned = {**data}\n cleaned['status'].append(\n \"Glasowyn's hands stand Empty and Effervescent, see them filled.\"\n ) if len(cleaned['status']) < 1 else None\n cleaned['world'] = self.world\n cut = ['has_mined', 'errors']\n for k in cut:\n del cleaned[k]\n if cause == 'item pick up':\n ret = f\"\"\" You are now held down by the weight of {cleaned['encumbrance']} Stones.\n Your Experience and equipment Grant you the ability to\n carry {cleaned['strength']} stones before you need to take longer rests.\n Your bag now carries {cleaned['inventory']}\"\"\"\n print(ret +\n f\"\"\"\n Your ghost seems to have the space to carry an additional item if you would like\"\"\"\n if 'carry' in cleaned['abilities'] and len(cleaned[\n 'status']) else ret)\n else:\n print('\\n' + '*' * 22 + ' ' + 'Your Current State' + ' ' + '*' * 22\n )\n for item in cleaned.items():\n print(f'{item[0]}: {item[1]}')\n print('*' * 64 + '\\n')\n self.name = data['name']\n self.cooldown = data['cooldown']\n self.encumbrance = data['encumbrance']\n self.strength = data['strength']\n self.speed = data['speed']\n self.gold = data['gold']\n self.bodywear = data['bodywear']\n self.footwear = data['footwear']\n self.inventory = data['inventory']\n self.abilities = data['abilities']\n self.status = data['status']\n self.has_mined = data['has_mined']\n self.errors = data['errors']\n self.messages = data['messages']\n self.snitches = data['snitches'] if data['snitches'] else 0\n self.map = self._read_file('map.txt')\n self.graph = self._read_file('graph.txt')\n <function token>\n <function token>\n <function token>\n <function token>\n\n def drop_loot(self, item):\n time.sleep(self.cooldown)\n json = {'name': item}\n req = requests.post(f'{url}/api/adv/drop/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n time.sleep(req['cooldown'])\n self.check_self()\n <function token>\n\n def examine(self, item):\n time.sleep(self.cooldown)\n json = {'name': item}\n req = requests.post(f'{url}/api/adv/examine/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n self.cooldown = req['cooldown']\n if item == 'WELL':\n if os.path.exists('hint.txt'):\n os.remove('hint.txt')\n desc = req['description']\n instructions = desc.split('\\n')\n for line in instructions[2:]:\n with open('hint.txt', 'a') as f:\n f.write(f'{line}\\n')\n cpu = CPU()\n cpu.load('hint.txt')\n cpu.run()\n if os.path.exists('hint.txt'):\n os.remove('hint.txt')\n limiter = 23 if self.world == 'light' else 24\n return cpu.hint[limiter:]\n else:\n print(req['description'])\n\n def pray(self):\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/pray/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}).json()\n print(req)\n time.sleep(req['cooldown'])\n self.check_self()\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n\n\nclass Player:\n\n def __init__(self):\n data = self._get_status()\n time.sleep(data['cooldown'])\n self.name = data['name']\n self.cooldown = data['cooldown']\n self.encumbrance = data['encumbrance']\n self.strength = data['strength']\n self.speed = data['speed']\n self.gold = data['gold']\n self.bodywear = data['bodywear']\n self.footwear = data['footwear']\n self.inventory = data['inventory']\n self.abilities = data['abilities']\n self.status = data['status']\n self.has_mined = data['has_mined']\n self.errors = data['errors']\n self.messages = data['messages']\n self.snitches = data['snitches'] if data['snitches'] else 0\n self.current_room = self.check_room()\n self.world = 'dark' if self.current_room['room_id'] > 499 else 'light'\n self.map = self._read_file('map.txt')\n self.graph = self._read_file('graph.txt')\n <function token>\n <function token>\n <function token>\n <function token>\n\n def check_self(self, cause=None):\n data = self._get_status()\n cleaned = {**data}\n cleaned['status'].append(\n \"Glasowyn's hands stand Empty and Effervescent, see them filled.\"\n ) if len(cleaned['status']) < 1 else None\n cleaned['world'] = self.world\n cut = ['has_mined', 'errors']\n for k in cut:\n del cleaned[k]\n if cause == 'item pick up':\n ret = f\"\"\" You are now held down by the weight of {cleaned['encumbrance']} Stones.\n Your Experience and equipment Grant you the ability to\n carry {cleaned['strength']} stones before you need to take longer rests.\n Your bag now carries {cleaned['inventory']}\"\"\"\n print(ret +\n f\"\"\"\n Your ghost seems to have the space to carry an additional item if you would like\"\"\"\n if 'carry' in cleaned['abilities'] and len(cleaned[\n 'status']) else ret)\n else:\n print('\\n' + '*' * 22 + ' ' + 'Your Current State' + ' ' + '*' * 22\n )\n for item in cleaned.items():\n print(f'{item[0]}: {item[1]}')\n print('*' * 64 + '\\n')\n self.name = data['name']\n self.cooldown = data['cooldown']\n self.encumbrance = data['encumbrance']\n self.strength = data['strength']\n self.speed = data['speed']\n self.gold = data['gold']\n self.bodywear = data['bodywear']\n self.footwear = data['footwear']\n self.inventory = data['inventory']\n self.abilities = data['abilities']\n self.status = data['status']\n self.has_mined = data['has_mined']\n self.errors = data['errors']\n self.messages = data['messages']\n self.snitches = data['snitches'] if data['snitches'] else 0\n self.map = self._read_file('map.txt')\n self.graph = self._read_file('graph.txt')\n <function token>\n <function token>\n <function token>\n <function token>\n\n def drop_loot(self, item):\n time.sleep(self.cooldown)\n json = {'name': item}\n req = requests.post(f'{url}/api/adv/drop/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n time.sleep(req['cooldown'])\n self.check_self()\n <function token>\n <function token>\n\n def pray(self):\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/pray/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}).json()\n print(req)\n time.sleep(req['cooldown'])\n self.check_self()\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n\n\nclass Player:\n\n def __init__(self):\n data = self._get_status()\n time.sleep(data['cooldown'])\n self.name = data['name']\n self.cooldown = data['cooldown']\n self.encumbrance = data['encumbrance']\n self.strength = data['strength']\n self.speed = data['speed']\n self.gold = data['gold']\n self.bodywear = data['bodywear']\n self.footwear = data['footwear']\n self.inventory = data['inventory']\n self.abilities = data['abilities']\n self.status = data['status']\n self.has_mined = data['has_mined']\n self.errors = data['errors']\n self.messages = data['messages']\n self.snitches = data['snitches'] if data['snitches'] else 0\n self.current_room = self.check_room()\n self.world = 'dark' if self.current_room['room_id'] > 499 else 'light'\n self.map = self._read_file('map.txt')\n self.graph = self._read_file('graph.txt')\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def drop_loot(self, item):\n time.sleep(self.cooldown)\n json = {'name': item}\n req = requests.post(f'{url}/api/adv/drop/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}, json=json).json()\n time.sleep(req['cooldown'])\n self.check_self()\n <function token>\n <function token>\n\n def pray(self):\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/pray/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}).json()\n print(req)\n time.sleep(req['cooldown'])\n self.check_self()\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n\n\nclass Player:\n\n def __init__(self):\n data = self._get_status()\n time.sleep(data['cooldown'])\n self.name = data['name']\n self.cooldown = data['cooldown']\n self.encumbrance = data['encumbrance']\n self.strength = data['strength']\n self.speed = data['speed']\n self.gold = data['gold']\n self.bodywear = data['bodywear']\n self.footwear = data['footwear']\n self.inventory = data['inventory']\n self.abilities = data['abilities']\n self.status = data['status']\n self.has_mined = data['has_mined']\n self.errors = data['errors']\n self.messages = data['messages']\n self.snitches = data['snitches'] if data['snitches'] else 0\n self.current_room = self.check_room()\n self.world = 'dark' if self.current_room['room_id'] > 499 else 'light'\n self.map = self._read_file('map.txt')\n self.graph = self._read_file('graph.txt')\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def pray(self):\n time.sleep(self.cooldown)\n req = requests.post(f'{url}/api/adv/pray/', headers={\n 'Authorization': f'Token {key}', 'Content-Type':\n 'application/json'}).json()\n print(req)\n time.sleep(req['cooldown'])\n self.check_self()\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n\n\nclass Player:\n\n def __init__(self):\n data = self._get_status()\n time.sleep(data['cooldown'])\n self.name = data['name']\n self.cooldown = data['cooldown']\n self.encumbrance = data['encumbrance']\n self.strength = data['strength']\n self.speed = data['speed']\n self.gold = data['gold']\n self.bodywear = data['bodywear']\n self.footwear = data['footwear']\n self.inventory = data['inventory']\n self.abilities = data['abilities']\n self.status = data['status']\n self.has_mined = data['has_mined']\n self.errors = data['errors']\n self.messages = data['messages']\n self.snitches = data['snitches'] if data['snitches'] else 0\n self.current_room = self.check_room()\n self.world = 'dark' if self.current_room['room_id'] > 499 else 'light'\n self.map = self._read_file('map.txt')\n self.graph = self._read_file('graph.txt')\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n\n\nclass Player:\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n<class token>\n"
] | false |
891 |
e14bea6376c8649bf9c9c5759d530af773664cd4
|
#!/usr/bin/env python3
import pandas as pd
import csv
def get_apriori_input(input_file,output_file,sample_col="Sample",gene_id_col="Gene_ID"):
df=pd.read_csv(input_file,sep="\t")
sample_names=df[sample_col].unique()
with open(output_file,"w") as out:
csv_writer=csv.writer(out,delimiter="\t")
for sample_name in sample_names:
bool=df[sample_col]==sample_name
df_sample=df[bool]
gene_ids=df_sample[gene_id_col]
gene_string=",".join(gene_ids)
csv_writer.writerow([sample_name,gene_string])
if __name__ == "__main__":
import sys
program,input_file,output_file,sample_col,gene_id_col=sys.argv
get_apriori_input(input_file,output_file,sample_col,gene_id_col)
|
[
"#!/usr/bin/env python3\nimport pandas as pd\nimport csv\ndef get_apriori_input(input_file,output_file,sample_col=\"Sample\",gene_id_col=\"Gene_ID\"):\n df=pd.read_csv(input_file,sep=\"\\t\")\n sample_names=df[sample_col].unique()\n with open(output_file,\"w\") as out:\n csv_writer=csv.writer(out,delimiter=\"\\t\")\n for sample_name in sample_names:\n bool=df[sample_col]==sample_name\n df_sample=df[bool]\n gene_ids=df_sample[gene_id_col]\n gene_string=\",\".join(gene_ids)\n csv_writer.writerow([sample_name,gene_string])\n\n\nif __name__ == \"__main__\":\n import sys\n program,input_file,output_file,sample_col,gene_id_col=sys.argv\n get_apriori_input(input_file,output_file,sample_col,gene_id_col)\n",
"import pandas as pd\nimport csv\n\n\ndef get_apriori_input(input_file, output_file, sample_col='Sample',\n gene_id_col='Gene_ID'):\n df = pd.read_csv(input_file, sep='\\t')\n sample_names = df[sample_col].unique()\n with open(output_file, 'w') as out:\n csv_writer = csv.writer(out, delimiter='\\t')\n for sample_name in sample_names:\n bool = df[sample_col] == sample_name\n df_sample = df[bool]\n gene_ids = df_sample[gene_id_col]\n gene_string = ','.join(gene_ids)\n csv_writer.writerow([sample_name, gene_string])\n\n\nif __name__ == '__main__':\n import sys\n program, input_file, output_file, sample_col, gene_id_col = sys.argv\n get_apriori_input(input_file, output_file, sample_col, gene_id_col)\n",
"<import token>\n\n\ndef get_apriori_input(input_file, output_file, sample_col='Sample',\n gene_id_col='Gene_ID'):\n df = pd.read_csv(input_file, sep='\\t')\n sample_names = df[sample_col].unique()\n with open(output_file, 'w') as out:\n csv_writer = csv.writer(out, delimiter='\\t')\n for sample_name in sample_names:\n bool = df[sample_col] == sample_name\n df_sample = df[bool]\n gene_ids = df_sample[gene_id_col]\n gene_string = ','.join(gene_ids)\n csv_writer.writerow([sample_name, gene_string])\n\n\nif __name__ == '__main__':\n import sys\n program, input_file, output_file, sample_col, gene_id_col = sys.argv\n get_apriori_input(input_file, output_file, sample_col, gene_id_col)\n",
"<import token>\n\n\ndef get_apriori_input(input_file, output_file, sample_col='Sample',\n gene_id_col='Gene_ID'):\n df = pd.read_csv(input_file, sep='\\t')\n sample_names = df[sample_col].unique()\n with open(output_file, 'w') as out:\n csv_writer = csv.writer(out, delimiter='\\t')\n for sample_name in sample_names:\n bool = df[sample_col] == sample_name\n df_sample = df[bool]\n gene_ids = df_sample[gene_id_col]\n gene_string = ','.join(gene_ids)\n csv_writer.writerow([sample_name, gene_string])\n\n\n<code token>\n",
"<import token>\n<function token>\n<code token>\n"
] | false |
892 |
462d73195680118d19a3d4e8a855e65aaeecb3c6
|
import time
class DISTRICT:
def __init__(
self, cdcode, county, district, street, city, zipcode,
state, mailstreet, mailcity, mailzip, mailstate, phone, extphone,
faxnumber, email, admfname, admlname, admemail, lat, long,
distrownercode, doctype, statustype, lastupdate):
self.cdcode = cdcode
self.county = county
self.district = district
self.street = street
self.city = city
self.zipcode = zipcode
self.state = state
self.mailstreet = mailstreet
self.mailcity = mailcity
self.mailzip = mailzip
self.mailstate = mailstate
self.phone = phone
self.extphone = extphone
self.faxnumber = faxnumber
self.email = email
self.admfname = admfname
self.admlname = admlname
self.admemail = admemail
self.lat = lat
self.long = long
self.distrownercode = distrownercode
self.doctype = doctype
self.statustype = statustype
self.lastupdate = lastupdate
def get_district_name(self):
print(self.district)
def get_district_cdcode(self):
print(self.cdcode)
def get_district_statustype(self):
print(self.statustype)
def start_end_timer():
print(time.perf_counter())
def read_text_file(strfile):
f = open(strfile, "r")
f.read()
def print_text_file(strfile):
f = open(strfile, "r")
print(f.read(3))
def load_text_file_to_class(strfile):
t = open("/home/student/Desktop/schooldata/copiedfile.txt", "w")
f = open(strfile, "r")
next(f)
for line in f:
d = []
d = line.split("\t")
# print(d)
# t.write(d)
district = DISTRICT(d[0], d[1], d[2], d[3], d[4], d[5], d[6], d[7], d[8], d[9], d[10], d[11],
d[12], d[13], d[14], d[15], d[16], d[17], d[18], d[19], d[20], d[21], d[22], d[23])
district.get_district_name()
district.get_district_cdcode()
district.get_district_statustype()
f.close()
t.close()
start_end_timer()
strfile = "/home/student/Desktop/schooldata/pubdistricts.txt"
load_text_file_to_class(strfile)
start_end_timer()
|
[
"import time\n\n\nclass DISTRICT:\n\n def __init__(\n self, cdcode, county, district, street, city, zipcode,\n state, mailstreet, mailcity, mailzip, mailstate, phone, extphone,\n faxnumber, email, admfname, admlname, admemail, lat, long,\n distrownercode, doctype, statustype, lastupdate):\n\n self.cdcode = cdcode\n self.county = county\n self.district = district\n self.street = street\n self.city = city\n self.zipcode = zipcode\n self.state = state\n self.mailstreet = mailstreet\n self.mailcity = mailcity\n self.mailzip = mailzip\n self.mailstate = mailstate\n self.phone = phone\n self.extphone = extphone\n self.faxnumber = faxnumber\n self.email = email\n self.admfname = admfname\n self.admlname = admlname\n self.admemail = admemail\n self.lat = lat\n self.long = long\n self.distrownercode = distrownercode\n self.doctype = doctype\n self.statustype = statustype\n self.lastupdate = lastupdate\n\n def get_district_name(self):\n print(self.district)\n\n def get_district_cdcode(self):\n print(self.cdcode)\n\n def get_district_statustype(self):\n print(self.statustype)\n\ndef start_end_timer():\n print(time.perf_counter())\n\n\ndef read_text_file(strfile):\n f = open(strfile, \"r\")\n f.read()\n\n\ndef print_text_file(strfile):\n f = open(strfile, \"r\")\n print(f.read(3))\n\n\ndef load_text_file_to_class(strfile):\n t = open(\"/home/student/Desktop/schooldata/copiedfile.txt\", \"w\")\n f = open(strfile, \"r\")\n next(f)\n\n\n for line in f:\n d = []\n d = line.split(\"\\t\")\n # print(d)\n # t.write(d)\n district = DISTRICT(d[0], d[1], d[2], d[3], d[4], d[5], d[6], d[7], d[8], d[9], d[10], d[11],\n d[12], d[13], d[14], d[15], d[16], d[17], d[18], d[19], d[20], d[21], d[22], d[23])\n district.get_district_name()\n district.get_district_cdcode()\n district.get_district_statustype()\n\n f.close()\n t.close()\n\n\nstart_end_timer()\nstrfile = \"/home/student/Desktop/schooldata/pubdistricts.txt\"\nload_text_file_to_class(strfile)\nstart_end_timer()\n\n",
"import time\n\n\nclass DISTRICT:\n\n def __init__(self, cdcode, county, district, street, city, zipcode,\n state, mailstreet, mailcity, mailzip, mailstate, phone, extphone,\n faxnumber, email, admfname, admlname, admemail, lat, long,\n distrownercode, doctype, statustype, lastupdate):\n self.cdcode = cdcode\n self.county = county\n self.district = district\n self.street = street\n self.city = city\n self.zipcode = zipcode\n self.state = state\n self.mailstreet = mailstreet\n self.mailcity = mailcity\n self.mailzip = mailzip\n self.mailstate = mailstate\n self.phone = phone\n self.extphone = extphone\n self.faxnumber = faxnumber\n self.email = email\n self.admfname = admfname\n self.admlname = admlname\n self.admemail = admemail\n self.lat = lat\n self.long = long\n self.distrownercode = distrownercode\n self.doctype = doctype\n self.statustype = statustype\n self.lastupdate = lastupdate\n\n def get_district_name(self):\n print(self.district)\n\n def get_district_cdcode(self):\n print(self.cdcode)\n\n def get_district_statustype(self):\n print(self.statustype)\n\n\ndef start_end_timer():\n print(time.perf_counter())\n\n\ndef read_text_file(strfile):\n f = open(strfile, 'r')\n f.read()\n\n\ndef print_text_file(strfile):\n f = open(strfile, 'r')\n print(f.read(3))\n\n\ndef load_text_file_to_class(strfile):\n t = open('/home/student/Desktop/schooldata/copiedfile.txt', 'w')\n f = open(strfile, 'r')\n next(f)\n for line in f:\n d = []\n d = line.split('\\t')\n district = DISTRICT(d[0], d[1], d[2], d[3], d[4], d[5], d[6], d[7],\n d[8], d[9], d[10], d[11], d[12], d[13], d[14], d[15], d[16], d[\n 17], d[18], d[19], d[20], d[21], d[22], d[23])\n district.get_district_name()\n district.get_district_cdcode()\n district.get_district_statustype()\n f.close()\n t.close()\n\n\nstart_end_timer()\nstrfile = '/home/student/Desktop/schooldata/pubdistricts.txt'\nload_text_file_to_class(strfile)\nstart_end_timer()\n",
"<import token>\n\n\nclass DISTRICT:\n\n def __init__(self, cdcode, county, district, street, city, zipcode,\n state, mailstreet, mailcity, mailzip, mailstate, phone, extphone,\n faxnumber, email, admfname, admlname, admemail, lat, long,\n distrownercode, doctype, statustype, lastupdate):\n self.cdcode = cdcode\n self.county = county\n self.district = district\n self.street = street\n self.city = city\n self.zipcode = zipcode\n self.state = state\n self.mailstreet = mailstreet\n self.mailcity = mailcity\n self.mailzip = mailzip\n self.mailstate = mailstate\n self.phone = phone\n self.extphone = extphone\n self.faxnumber = faxnumber\n self.email = email\n self.admfname = admfname\n self.admlname = admlname\n self.admemail = admemail\n self.lat = lat\n self.long = long\n self.distrownercode = distrownercode\n self.doctype = doctype\n self.statustype = statustype\n self.lastupdate = lastupdate\n\n def get_district_name(self):\n print(self.district)\n\n def get_district_cdcode(self):\n print(self.cdcode)\n\n def get_district_statustype(self):\n print(self.statustype)\n\n\ndef start_end_timer():\n print(time.perf_counter())\n\n\ndef read_text_file(strfile):\n f = open(strfile, 'r')\n f.read()\n\n\ndef print_text_file(strfile):\n f = open(strfile, 'r')\n print(f.read(3))\n\n\ndef load_text_file_to_class(strfile):\n t = open('/home/student/Desktop/schooldata/copiedfile.txt', 'w')\n f = open(strfile, 'r')\n next(f)\n for line in f:\n d = []\n d = line.split('\\t')\n district = DISTRICT(d[0], d[1], d[2], d[3], d[4], d[5], d[6], d[7],\n d[8], d[9], d[10], d[11], d[12], d[13], d[14], d[15], d[16], d[\n 17], d[18], d[19], d[20], d[21], d[22], d[23])\n district.get_district_name()\n district.get_district_cdcode()\n district.get_district_statustype()\n f.close()\n t.close()\n\n\nstart_end_timer()\nstrfile = '/home/student/Desktop/schooldata/pubdistricts.txt'\nload_text_file_to_class(strfile)\nstart_end_timer()\n",
"<import token>\n\n\nclass DISTRICT:\n\n def __init__(self, cdcode, county, district, street, city, zipcode,\n state, mailstreet, mailcity, mailzip, mailstate, phone, extphone,\n faxnumber, email, admfname, admlname, admemail, lat, long,\n distrownercode, doctype, statustype, lastupdate):\n self.cdcode = cdcode\n self.county = county\n self.district = district\n self.street = street\n self.city = city\n self.zipcode = zipcode\n self.state = state\n self.mailstreet = mailstreet\n self.mailcity = mailcity\n self.mailzip = mailzip\n self.mailstate = mailstate\n self.phone = phone\n self.extphone = extphone\n self.faxnumber = faxnumber\n self.email = email\n self.admfname = admfname\n self.admlname = admlname\n self.admemail = admemail\n self.lat = lat\n self.long = long\n self.distrownercode = distrownercode\n self.doctype = doctype\n self.statustype = statustype\n self.lastupdate = lastupdate\n\n def get_district_name(self):\n print(self.district)\n\n def get_district_cdcode(self):\n print(self.cdcode)\n\n def get_district_statustype(self):\n print(self.statustype)\n\n\ndef start_end_timer():\n print(time.perf_counter())\n\n\ndef read_text_file(strfile):\n f = open(strfile, 'r')\n f.read()\n\n\ndef print_text_file(strfile):\n f = open(strfile, 'r')\n print(f.read(3))\n\n\ndef load_text_file_to_class(strfile):\n t = open('/home/student/Desktop/schooldata/copiedfile.txt', 'w')\n f = open(strfile, 'r')\n next(f)\n for line in f:\n d = []\n d = line.split('\\t')\n district = DISTRICT(d[0], d[1], d[2], d[3], d[4], d[5], d[6], d[7],\n d[8], d[9], d[10], d[11], d[12], d[13], d[14], d[15], d[16], d[\n 17], d[18], d[19], d[20], d[21], d[22], d[23])\n district.get_district_name()\n district.get_district_cdcode()\n district.get_district_statustype()\n f.close()\n t.close()\n\n\nstart_end_timer()\n<assignment token>\nload_text_file_to_class(strfile)\nstart_end_timer()\n",
"<import token>\n\n\nclass DISTRICT:\n\n def __init__(self, cdcode, county, district, street, city, zipcode,\n state, mailstreet, mailcity, mailzip, mailstate, phone, extphone,\n faxnumber, email, admfname, admlname, admemail, lat, long,\n distrownercode, doctype, statustype, lastupdate):\n self.cdcode = cdcode\n self.county = county\n self.district = district\n self.street = street\n self.city = city\n self.zipcode = zipcode\n self.state = state\n self.mailstreet = mailstreet\n self.mailcity = mailcity\n self.mailzip = mailzip\n self.mailstate = mailstate\n self.phone = phone\n self.extphone = extphone\n self.faxnumber = faxnumber\n self.email = email\n self.admfname = admfname\n self.admlname = admlname\n self.admemail = admemail\n self.lat = lat\n self.long = long\n self.distrownercode = distrownercode\n self.doctype = doctype\n self.statustype = statustype\n self.lastupdate = lastupdate\n\n def get_district_name(self):\n print(self.district)\n\n def get_district_cdcode(self):\n print(self.cdcode)\n\n def get_district_statustype(self):\n print(self.statustype)\n\n\ndef start_end_timer():\n print(time.perf_counter())\n\n\ndef read_text_file(strfile):\n f = open(strfile, 'r')\n f.read()\n\n\ndef print_text_file(strfile):\n f = open(strfile, 'r')\n print(f.read(3))\n\n\ndef load_text_file_to_class(strfile):\n t = open('/home/student/Desktop/schooldata/copiedfile.txt', 'w')\n f = open(strfile, 'r')\n next(f)\n for line in f:\n d = []\n d = line.split('\\t')\n district = DISTRICT(d[0], d[1], d[2], d[3], d[4], d[5], d[6], d[7],\n d[8], d[9], d[10], d[11], d[12], d[13], d[14], d[15], d[16], d[\n 17], d[18], d[19], d[20], d[21], d[22], d[23])\n district.get_district_name()\n district.get_district_cdcode()\n district.get_district_statustype()\n f.close()\n t.close()\n\n\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n\n\nclass DISTRICT:\n\n def __init__(self, cdcode, county, district, street, city, zipcode,\n state, mailstreet, mailcity, mailzip, mailstate, phone, extphone,\n faxnumber, email, admfname, admlname, admemail, lat, long,\n distrownercode, doctype, statustype, lastupdate):\n self.cdcode = cdcode\n self.county = county\n self.district = district\n self.street = street\n self.city = city\n self.zipcode = zipcode\n self.state = state\n self.mailstreet = mailstreet\n self.mailcity = mailcity\n self.mailzip = mailzip\n self.mailstate = mailstate\n self.phone = phone\n self.extphone = extphone\n self.faxnumber = faxnumber\n self.email = email\n self.admfname = admfname\n self.admlname = admlname\n self.admemail = admemail\n self.lat = lat\n self.long = long\n self.distrownercode = distrownercode\n self.doctype = doctype\n self.statustype = statustype\n self.lastupdate = lastupdate\n\n def get_district_name(self):\n print(self.district)\n\n def get_district_cdcode(self):\n print(self.cdcode)\n\n def get_district_statustype(self):\n print(self.statustype)\n\n\ndef start_end_timer():\n print(time.perf_counter())\n\n\n<function token>\n\n\ndef print_text_file(strfile):\n f = open(strfile, 'r')\n print(f.read(3))\n\n\ndef load_text_file_to_class(strfile):\n t = open('/home/student/Desktop/schooldata/copiedfile.txt', 'w')\n f = open(strfile, 'r')\n next(f)\n for line in f:\n d = []\n d = line.split('\\t')\n district = DISTRICT(d[0], d[1], d[2], d[3], d[4], d[5], d[6], d[7],\n d[8], d[9], d[10], d[11], d[12], d[13], d[14], d[15], d[16], d[\n 17], d[18], d[19], d[20], d[21], d[22], d[23])\n district.get_district_name()\n district.get_district_cdcode()\n district.get_district_statustype()\n f.close()\n t.close()\n\n\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n\n\nclass DISTRICT:\n\n def __init__(self, cdcode, county, district, street, city, zipcode,\n state, mailstreet, mailcity, mailzip, mailstate, phone, extphone,\n faxnumber, email, admfname, admlname, admemail, lat, long,\n distrownercode, doctype, statustype, lastupdate):\n self.cdcode = cdcode\n self.county = county\n self.district = district\n self.street = street\n self.city = city\n self.zipcode = zipcode\n self.state = state\n self.mailstreet = mailstreet\n self.mailcity = mailcity\n self.mailzip = mailzip\n self.mailstate = mailstate\n self.phone = phone\n self.extphone = extphone\n self.faxnumber = faxnumber\n self.email = email\n self.admfname = admfname\n self.admlname = admlname\n self.admemail = admemail\n self.lat = lat\n self.long = long\n self.distrownercode = distrownercode\n self.doctype = doctype\n self.statustype = statustype\n self.lastupdate = lastupdate\n\n def get_district_name(self):\n print(self.district)\n\n def get_district_cdcode(self):\n print(self.cdcode)\n\n def get_district_statustype(self):\n print(self.statustype)\n\n\ndef start_end_timer():\n print(time.perf_counter())\n\n\n<function token>\n<function token>\n\n\ndef load_text_file_to_class(strfile):\n t = open('/home/student/Desktop/schooldata/copiedfile.txt', 'w')\n f = open(strfile, 'r')\n next(f)\n for line in f:\n d = []\n d = line.split('\\t')\n district = DISTRICT(d[0], d[1], d[2], d[3], d[4], d[5], d[6], d[7],\n d[8], d[9], d[10], d[11], d[12], d[13], d[14], d[15], d[16], d[\n 17], d[18], d[19], d[20], d[21], d[22], d[23])\n district.get_district_name()\n district.get_district_cdcode()\n district.get_district_statustype()\n f.close()\n t.close()\n\n\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n\n\nclass DISTRICT:\n\n def __init__(self, cdcode, county, district, street, city, zipcode,\n state, mailstreet, mailcity, mailzip, mailstate, phone, extphone,\n faxnumber, email, admfname, admlname, admemail, lat, long,\n distrownercode, doctype, statustype, lastupdate):\n self.cdcode = cdcode\n self.county = county\n self.district = district\n self.street = street\n self.city = city\n self.zipcode = zipcode\n self.state = state\n self.mailstreet = mailstreet\n self.mailcity = mailcity\n self.mailzip = mailzip\n self.mailstate = mailstate\n self.phone = phone\n self.extphone = extphone\n self.faxnumber = faxnumber\n self.email = email\n self.admfname = admfname\n self.admlname = admlname\n self.admemail = admemail\n self.lat = lat\n self.long = long\n self.distrownercode = distrownercode\n self.doctype = doctype\n self.statustype = statustype\n self.lastupdate = lastupdate\n\n def get_district_name(self):\n print(self.district)\n\n def get_district_cdcode(self):\n print(self.cdcode)\n\n def get_district_statustype(self):\n print(self.statustype)\n\n\ndef start_end_timer():\n print(time.perf_counter())\n\n\n<function token>\n<function token>\n<function token>\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n\n\nclass DISTRICT:\n\n def __init__(self, cdcode, county, district, street, city, zipcode,\n state, mailstreet, mailcity, mailzip, mailstate, phone, extphone,\n faxnumber, email, admfname, admlname, admemail, lat, long,\n distrownercode, doctype, statustype, lastupdate):\n self.cdcode = cdcode\n self.county = county\n self.district = district\n self.street = street\n self.city = city\n self.zipcode = zipcode\n self.state = state\n self.mailstreet = mailstreet\n self.mailcity = mailcity\n self.mailzip = mailzip\n self.mailstate = mailstate\n self.phone = phone\n self.extphone = extphone\n self.faxnumber = faxnumber\n self.email = email\n self.admfname = admfname\n self.admlname = admlname\n self.admemail = admemail\n self.lat = lat\n self.long = long\n self.distrownercode = distrownercode\n self.doctype = doctype\n self.statustype = statustype\n self.lastupdate = lastupdate\n\n def get_district_name(self):\n print(self.district)\n\n def get_district_cdcode(self):\n print(self.cdcode)\n\n def get_district_statustype(self):\n print(self.statustype)\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n\n\nclass DISTRICT:\n\n def __init__(self, cdcode, county, district, street, city, zipcode,\n state, mailstreet, mailcity, mailzip, mailstate, phone, extphone,\n faxnumber, email, admfname, admlname, admemail, lat, long,\n distrownercode, doctype, statustype, lastupdate):\n self.cdcode = cdcode\n self.county = county\n self.district = district\n self.street = street\n self.city = city\n self.zipcode = zipcode\n self.state = state\n self.mailstreet = mailstreet\n self.mailcity = mailcity\n self.mailzip = mailzip\n self.mailstate = mailstate\n self.phone = phone\n self.extphone = extphone\n self.faxnumber = faxnumber\n self.email = email\n self.admfname = admfname\n self.admlname = admlname\n self.admemail = admemail\n self.lat = lat\n self.long = long\n self.distrownercode = distrownercode\n self.doctype = doctype\n self.statustype = statustype\n self.lastupdate = lastupdate\n\n def get_district_name(self):\n print(self.district)\n\n def get_district_cdcode(self):\n print(self.cdcode)\n <function token>\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n\n\nclass DISTRICT:\n <function token>\n\n def get_district_name(self):\n print(self.district)\n\n def get_district_cdcode(self):\n print(self.cdcode)\n <function token>\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n\n\nclass DISTRICT:\n <function token>\n\n def get_district_name(self):\n print(self.district)\n <function token>\n <function token>\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n\n\nclass DISTRICT:\n <function token>\n <function token>\n <function token>\n <function token>\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n<class token>\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n<assignment token>\n<code token>\n"
] | false |
893 |
e884825325ceb401142cab0618d9d4e70e475cf5
|
#!/usr/bin/env python
import sys, re
window = 2
for line in sys.stdin:
line = line.strip()
twits = line.split()
i = 0
while i <len(twits):
j = 0
while j <len(twits):
if i!= j:
print("%s%s\t%d" % (twits[i]+' ', twits[j], 1))
j+=1
i+=1
|
[
"#!/usr/bin/env python\n\nimport sys, re\n\nwindow = 2\n\nfor line in sys.stdin:\n line = line.strip()\n twits = line.split()\n i = 0\n while i <len(twits):\n j = 0\n while j <len(twits):\n if i!= j:\n print(\"%s%s\\t%d\" % (twits[i]+' ', twits[j], 1))\n j+=1\n i+=1",
"import sys, re\nwindow = 2\nfor line in sys.stdin:\n line = line.strip()\n twits = line.split()\n i = 0\n while i < len(twits):\n j = 0\n while j < len(twits):\n if i != j:\n print('%s%s\\t%d' % (twits[i] + ' ', twits[j], 1))\n j += 1\n i += 1\n",
"<import token>\nwindow = 2\nfor line in sys.stdin:\n line = line.strip()\n twits = line.split()\n i = 0\n while i < len(twits):\n j = 0\n while j < len(twits):\n if i != j:\n print('%s%s\\t%d' % (twits[i] + ' ', twits[j], 1))\n j += 1\n i += 1\n",
"<import token>\n<assignment token>\nfor line in sys.stdin:\n line = line.strip()\n twits = line.split()\n i = 0\n while i < len(twits):\n j = 0\n while j < len(twits):\n if i != j:\n print('%s%s\\t%d' % (twits[i] + ' ', twits[j], 1))\n j += 1\n i += 1\n",
"<import token>\n<assignment token>\n<code token>\n"
] | false |
894 |
9d6b5baa8462b2996e4518dd39b5bb1efde1fd9d
|
# -*- coding: utf-8 -*-
# Enter your code here. Read input from STDIN. Print output to STDOUT
n= input()
vals= list(map(int,input().split()))
def median(values):
n=len(values)
values = sorted(values)
if n%2==1:
return values[(n+1)//2 - 1]
else:
return int(sum(values[int((n/2)-1):int((n/2)+1)])/2)
def quartiles(values):
n=len(values)
values.sort()
Q2=median(values)
Q1=median(values[:int(n/2)])
#print ("values=",values)
if n%2==0:
Q3=median(values[int(n/2):])
else:
Q3=median(values[int(n/2+1):])
return Q1,Q2,Q3
Q1,Q2,Q3=quartiles(vals)
print(Q1)
print(Q2)
print(Q3)
|
[
"# -*- coding: utf-8 -*-\r\n# Enter your code here. Read input from STDIN. Print output to STDOUT\r\n\r\nn= input()\r\nvals= list(map(int,input().split()))\r\n\r\ndef median(values):\r\n n=len(values)\r\n values = sorted(values)\r\n if n%2==1:\r\n return values[(n+1)//2 - 1]\r\n else:\r\n return int(sum(values[int((n/2)-1):int((n/2)+1)])/2)\r\n \r\ndef quartiles(values):\r\n n=len(values)\r\n values.sort()\r\n Q2=median(values)\r\n Q1=median(values[:int(n/2)])\r\n #print (\"values=\",values)\r\n\r\n if n%2==0:\r\n Q3=median(values[int(n/2):]) \r\n\r\n else:\r\n Q3=median(values[int(n/2+1):])\r\n \r\n return Q1,Q2,Q3\r\n\r\nQ1,Q2,Q3=quartiles(vals)\r\n\r\nprint(Q1)\r\nprint(Q2)\r\nprint(Q3)\r\n",
"n = input()\nvals = list(map(int, input().split()))\n\n\ndef median(values):\n n = len(values)\n values = sorted(values)\n if n % 2 == 1:\n return values[(n + 1) // 2 - 1]\n else:\n return int(sum(values[int(n / 2 - 1):int(n / 2 + 1)]) / 2)\n\n\ndef quartiles(values):\n n = len(values)\n values.sort()\n Q2 = median(values)\n Q1 = median(values[:int(n / 2)])\n if n % 2 == 0:\n Q3 = median(values[int(n / 2):])\n else:\n Q3 = median(values[int(n / 2 + 1):])\n return Q1, Q2, Q3\n\n\nQ1, Q2, Q3 = quartiles(vals)\nprint(Q1)\nprint(Q2)\nprint(Q3)\n",
"<assignment token>\n\n\ndef median(values):\n n = len(values)\n values = sorted(values)\n if n % 2 == 1:\n return values[(n + 1) // 2 - 1]\n else:\n return int(sum(values[int(n / 2 - 1):int(n / 2 + 1)]) / 2)\n\n\ndef quartiles(values):\n n = len(values)\n values.sort()\n Q2 = median(values)\n Q1 = median(values[:int(n / 2)])\n if n % 2 == 0:\n Q3 = median(values[int(n / 2):])\n else:\n Q3 = median(values[int(n / 2 + 1):])\n return Q1, Q2, Q3\n\n\n<assignment token>\nprint(Q1)\nprint(Q2)\nprint(Q3)\n",
"<assignment token>\n\n\ndef median(values):\n n = len(values)\n values = sorted(values)\n if n % 2 == 1:\n return values[(n + 1) // 2 - 1]\n else:\n return int(sum(values[int(n / 2 - 1):int(n / 2 + 1)]) / 2)\n\n\ndef quartiles(values):\n n = len(values)\n values.sort()\n Q2 = median(values)\n Q1 = median(values[:int(n / 2)])\n if n % 2 == 0:\n Q3 = median(values[int(n / 2):])\n else:\n Q3 = median(values[int(n / 2 + 1):])\n return Q1, Q2, Q3\n\n\n<assignment token>\n<code token>\n",
"<assignment token>\n\n\ndef median(values):\n n = len(values)\n values = sorted(values)\n if n % 2 == 1:\n return values[(n + 1) // 2 - 1]\n else:\n return int(sum(values[int(n / 2 - 1):int(n / 2 + 1)]) / 2)\n\n\n<function token>\n<assignment token>\n<code token>\n",
"<assignment token>\n<function token>\n<function token>\n<assignment token>\n<code token>\n"
] | false |
895 |
624b34d160ea6db4f5249544f1614a20f506ca9e
|
import PySimpleGUI as sg
class TelaLisatrClientes():
def __init__(self):
self.__window = None
def init_components(self, lista_clientes):
layout = [
[sg.Text('Dados do cliente')],
[sg.Listbox(values=lista_clientes, size=(60, 10))],
[sg.Submit()]
]
self.__window = sg.Window('Lista de clientes').Layout(layout)
def lista_clientes(self, lista_clientes):
self.init_components(lista_clientes)
button, values = self.__window.Read()
self.__window.Close()
return button, values
|
[
"import PySimpleGUI as sg\n\nclass TelaLisatrClientes():\n\n def __init__(self):\n self.__window = None\n\n def init_components(self, lista_clientes):\n\n layout = [\n [sg.Text('Dados do cliente')],\n [sg.Listbox(values=lista_clientes, size=(60, 10))],\n [sg.Submit()]\n ]\n\n self.__window = sg.Window('Lista de clientes').Layout(layout)\n\n def lista_clientes(self, lista_clientes):\n\n self.init_components(lista_clientes)\n\n button, values = self.__window.Read()\n\n self.__window.Close()\n\n return button, values\n\n",
"import PySimpleGUI as sg\n\n\nclass TelaLisatrClientes:\n\n def __init__(self):\n self.__window = None\n\n def init_components(self, lista_clientes):\n layout = [[sg.Text('Dados do cliente')], [sg.Listbox(values=\n lista_clientes, size=(60, 10))], [sg.Submit()]]\n self.__window = sg.Window('Lista de clientes').Layout(layout)\n\n def lista_clientes(self, lista_clientes):\n self.init_components(lista_clientes)\n button, values = self.__window.Read()\n self.__window.Close()\n return button, values\n",
"<import token>\n\n\nclass TelaLisatrClientes:\n\n def __init__(self):\n self.__window = None\n\n def init_components(self, lista_clientes):\n layout = [[sg.Text('Dados do cliente')], [sg.Listbox(values=\n lista_clientes, size=(60, 10))], [sg.Submit()]]\n self.__window = sg.Window('Lista de clientes').Layout(layout)\n\n def lista_clientes(self, lista_clientes):\n self.init_components(lista_clientes)\n button, values = self.__window.Read()\n self.__window.Close()\n return button, values\n",
"<import token>\n\n\nclass TelaLisatrClientes:\n <function token>\n\n def init_components(self, lista_clientes):\n layout = [[sg.Text('Dados do cliente')], [sg.Listbox(values=\n lista_clientes, size=(60, 10))], [sg.Submit()]]\n self.__window = sg.Window('Lista de clientes').Layout(layout)\n\n def lista_clientes(self, lista_clientes):\n self.init_components(lista_clientes)\n button, values = self.__window.Read()\n self.__window.Close()\n return button, values\n",
"<import token>\n\n\nclass TelaLisatrClientes:\n <function token>\n <function token>\n\n def lista_clientes(self, lista_clientes):\n self.init_components(lista_clientes)\n button, values = self.__window.Read()\n self.__window.Close()\n return button, values\n",
"<import token>\n\n\nclass TelaLisatrClientes:\n <function token>\n <function token>\n <function token>\n",
"<import token>\n<class token>\n"
] | false |
896 |
d9156c20e046f608563bc6779575e14cc60f4c25
|
from django.core.urlresolvers import reverse
from keptar import settings
import os, os.path
import Image
try:
from collections import OrderedDict
except ImportError:
from keptar.odict import OrderedDict
class AccessDenied(Exception):
pass
class FileNotFound(Exception):
pass
class NotDirectory(Exception):
pass
def enrich(filelist, relpath='', thumbnails=True):
"""A kep neveihez hozzateszi a szukseges adatokat"""
files = OrderedDict()
for f in filelist:
abspath = os.path.abspath(os.path.join(settings.KEPTAR_ROOT, relpath, f))
if os.path.isdir(abspath):
thumb = settings.KEPTAR_ICONS.get('dir', None)
url = reverse('keptar.views.listdir', args=[os.path.join(relpath, f)])
direct_url = None
type = 'dir'
else:
if thumbnails:
try:
thumb = get_thumbnail(abspath)
except:
thumb = None
else:
thumb = settings.KEPTAR_ICONS.get('file', None)
url = reverse('keptar.views.showfile', args=[os.path.join(relpath, f)])
direct_url = getattr(settings, 'KEPTAR_URL', '/media/')+relpath+f
type = 'file'
# TODO: egyeb adatok
files[f] = {
'relpath': relpath,
'url': url,
'abspath': abspath,
'thumb': thumb,
'type': type,
'direct_url': direct_url,
}
return files
def get_parent(path):
"""A megadott elem szulokonyvtarat adja meg"""
# security check
parent = os.path.dirname(path)
try:
get_abspath(parent)
except:
parent = ''
return parent
def get_abspath(path):
"""AccessDenied exceptiont dob, ha valaki cselezni akar"""
abspath = os.path.abspath(os.path.join(settings.KEPTAR_ROOT, path))
# vajon a celkonyvtar valoban a root-on belul talalhato? - /../... miatt
if not abspath.startswith(settings.KEPTAR_ROOT):
raise AccessDenied("%s < %s" % (abspath, settings.KEPTAR_ROOT))
return abspath
def get_filelist(path, show_hidden=getattr(settings, 'KEPTAR_SHOW_HIDDEN', False), thumbnails=True):
"""Visszaadja a ``path`` konyvtarban levo konyvtarak es fileok listajat.
A ``path`` a ``settings.KEPTAR_ROOT``-hoz relativ.
A konyvtarak es a fileok listajat ket kulon dict-ben adja vissza,
mindenfele extra parameterrel.
A ``settings.KEPTAR_EXTENSIONS``-nel allithatoak a tamogatott
kiterjesztesek.
"""
abspath = get_abspath(path)
if not os.path.isdir(abspath):
raise NotDirectory(abspath)
dirs = []
pictures = []
for fname in os.listdir(abspath):
file = os.path.join(abspath, fname)
if os.path.isdir(file) and (show_hidden or not fname.startswith('.')):
dirs.append(fname)
if os.path.isfile(file):
# a kiterjesztes tamogatott-e
ext = file[file.rfind('.')+1:]
if ext.lower() in settings.KEPTAR_EXTENSIONS and (show_hidden or not fname.startswith('.')):
pictures.append(fname)
dirs.sort()
pictures.sort()
return enrich(dirs+pictures, relpath=path)
def get_thumbnail(file, type='', regenerate=False):
"""Visszaadja, illetve ha nem letezik, akkor legeneralja a ``file``-hoz
tartozo thumbnailt.
A ``type``-on keresztul mondhatjuk meg, hogy milyen tipusu thumbnailre
van szuksegunk, a tipusok parametereit a ``settings.py``-ben allithatjuk.
Ha a ``regenerate`` ``True``, akkor ujrageneralja a thumbnailt.
"""
ext = file[file.rfind('.')+1:]
if not os.path.isfile(file) or ext.lower() not in settings.KEPTAR_EXTENSIONS:
raise FileNotFound(file)
basename = os.path.basename(file)
dirname = os.path.dirname(file)
thumbname = os.path.join(dirname, settings.KEPTAR_THUMBS[type]['dir'], basename)
if regenerate or not os.path.isfile(thumbname):
if not os.path.isdir(os.path.dirname(thumbname)):
os.mkdir(os.path.dirname(thumbname))
generate_thumbnail(file, thumbname, settings.KEPTAR_THUMBS[type]['size'])
thumburl = getattr(settings, 'KEPTAR_URL', '/media') + thumbname[len(settings.KEPTAR_ROOT):]
return thumburl
def generate_thumbnail(file, thumbname, size):
image = Image.open(file)
image.thumbnail(size)
image.save(thumbname, image.format)
|
[
"from django.core.urlresolvers import reverse\nfrom keptar import settings\nimport os, os.path\nimport Image\ntry:\n from collections import OrderedDict\nexcept ImportError:\n from keptar.odict import OrderedDict\n\nclass AccessDenied(Exception):\n pass\n\nclass FileNotFound(Exception):\n pass\n\nclass NotDirectory(Exception):\n pass\n\ndef enrich(filelist, relpath='', thumbnails=True):\n \"\"\"A kep neveihez hozzateszi a szukseges adatokat\"\"\"\n\n files = OrderedDict()\n\n for f in filelist:\n abspath = os.path.abspath(os.path.join(settings.KEPTAR_ROOT, relpath, f))\n if os.path.isdir(abspath):\n thumb = settings.KEPTAR_ICONS.get('dir', None)\n url = reverse('keptar.views.listdir', args=[os.path.join(relpath, f)])\n direct_url = None\n type = 'dir'\n else:\n if thumbnails:\n try:\n thumb = get_thumbnail(abspath)\n except:\n thumb = None\n else:\n thumb = settings.KEPTAR_ICONS.get('file', None)\n url = reverse('keptar.views.showfile', args=[os.path.join(relpath, f)])\n direct_url = getattr(settings, 'KEPTAR_URL', '/media/')+relpath+f\n type = 'file'\n\n # TODO: egyeb adatok\n files[f] = {\n 'relpath': relpath,\n 'url': url,\n 'abspath': abspath,\n 'thumb': thumb,\n 'type': type,\n 'direct_url': direct_url,\n }\n\n return files\n\n\ndef get_parent(path):\n \"\"\"A megadott elem szulokonyvtarat adja meg\"\"\"\n\n # security check\n parent = os.path.dirname(path)\n\n try:\n get_abspath(parent)\n except:\n parent = ''\n\n return parent\n\n\ndef get_abspath(path):\n \"\"\"AccessDenied exceptiont dob, ha valaki cselezni akar\"\"\"\n\n abspath = os.path.abspath(os.path.join(settings.KEPTAR_ROOT, path))\n # vajon a celkonyvtar valoban a root-on belul talalhato? - /../... miatt\n if not abspath.startswith(settings.KEPTAR_ROOT):\n raise AccessDenied(\"%s < %s\" % (abspath, settings.KEPTAR_ROOT))\n \n return abspath\n\n\ndef get_filelist(path, show_hidden=getattr(settings, 'KEPTAR_SHOW_HIDDEN', False), thumbnails=True):\n \"\"\"Visszaadja a ``path`` konyvtarban levo konyvtarak es fileok listajat.\n A ``path`` a ``settings.KEPTAR_ROOT``-hoz relativ.\n A konyvtarak es a fileok listajat ket kulon dict-ben adja vissza, \n mindenfele extra parameterrel.\n A ``settings.KEPTAR_EXTENSIONS``-nel allithatoak a tamogatott \n kiterjesztesek.\n \"\"\"\n\n abspath = get_abspath(path)\n\n if not os.path.isdir(abspath):\n raise NotDirectory(abspath)\n\n dirs = []\n pictures = []\n\n for fname in os.listdir(abspath):\n file = os.path.join(abspath, fname)\n if os.path.isdir(file) and (show_hidden or not fname.startswith('.')):\n dirs.append(fname)\n if os.path.isfile(file):\n # a kiterjesztes tamogatott-e\n ext = file[file.rfind('.')+1:]\n if ext.lower() in settings.KEPTAR_EXTENSIONS and (show_hidden or not fname.startswith('.')):\n pictures.append(fname)\n\n dirs.sort()\n pictures.sort()\n\n return enrich(dirs+pictures, relpath=path)\n\n\ndef get_thumbnail(file, type='', regenerate=False):\n \"\"\"Visszaadja, illetve ha nem letezik, akkor legeneralja a ``file``-hoz\n tartozo thumbnailt.\n A ``type``-on keresztul mondhatjuk meg, hogy milyen tipusu thumbnailre\n van szuksegunk, a tipusok parametereit a ``settings.py``-ben allithatjuk.\n Ha a ``regenerate`` ``True``, akkor ujrageneralja a thumbnailt.\n \"\"\"\n\n ext = file[file.rfind('.')+1:]\n if not os.path.isfile(file) or ext.lower() not in settings.KEPTAR_EXTENSIONS:\n raise FileNotFound(file)\n \n basename = os.path.basename(file)\n dirname = os.path.dirname(file)\n thumbname = os.path.join(dirname, settings.KEPTAR_THUMBS[type]['dir'], basename)\n if regenerate or not os.path.isfile(thumbname):\n if not os.path.isdir(os.path.dirname(thumbname)):\n os.mkdir(os.path.dirname(thumbname))\n generate_thumbnail(file, thumbname, settings.KEPTAR_THUMBS[type]['size'])\n \n thumburl = getattr(settings, 'KEPTAR_URL', '/media') + thumbname[len(settings.KEPTAR_ROOT):]\n\n return thumburl\n\n\ndef generate_thumbnail(file, thumbname, size):\n image = Image.open(file)\n image.thumbnail(size)\n image.save(thumbname, image.format)\n\n",
"from django.core.urlresolvers import reverse\nfrom keptar import settings\nimport os, os.path\nimport Image\ntry:\n from collections import OrderedDict\nexcept ImportError:\n from keptar.odict import OrderedDict\n\n\nclass AccessDenied(Exception):\n pass\n\n\nclass FileNotFound(Exception):\n pass\n\n\nclass NotDirectory(Exception):\n pass\n\n\ndef enrich(filelist, relpath='', thumbnails=True):\n \"\"\"A kep neveihez hozzateszi a szukseges adatokat\"\"\"\n files = OrderedDict()\n for f in filelist:\n abspath = os.path.abspath(os.path.join(settings.KEPTAR_ROOT,\n relpath, f))\n if os.path.isdir(abspath):\n thumb = settings.KEPTAR_ICONS.get('dir', None)\n url = reverse('keptar.views.listdir', args=[os.path.join(\n relpath, f)])\n direct_url = None\n type = 'dir'\n else:\n if thumbnails:\n try:\n thumb = get_thumbnail(abspath)\n except:\n thumb = None\n else:\n thumb = settings.KEPTAR_ICONS.get('file', None)\n url = reverse('keptar.views.showfile', args=[os.path.join(\n relpath, f)])\n direct_url = getattr(settings, 'KEPTAR_URL', '/media/'\n ) + relpath + f\n type = 'file'\n files[f] = {'relpath': relpath, 'url': url, 'abspath': abspath,\n 'thumb': thumb, 'type': type, 'direct_url': direct_url}\n return files\n\n\ndef get_parent(path):\n \"\"\"A megadott elem szulokonyvtarat adja meg\"\"\"\n parent = os.path.dirname(path)\n try:\n get_abspath(parent)\n except:\n parent = ''\n return parent\n\n\ndef get_abspath(path):\n \"\"\"AccessDenied exceptiont dob, ha valaki cselezni akar\"\"\"\n abspath = os.path.abspath(os.path.join(settings.KEPTAR_ROOT, path))\n if not abspath.startswith(settings.KEPTAR_ROOT):\n raise AccessDenied('%s < %s' % (abspath, settings.KEPTAR_ROOT))\n return abspath\n\n\ndef get_filelist(path, show_hidden=getattr(settings, 'KEPTAR_SHOW_HIDDEN', \n False), thumbnails=True):\n \"\"\"Visszaadja a ``path`` konyvtarban levo konyvtarak es fileok listajat.\n A ``path`` a ``settings.KEPTAR_ROOT``-hoz relativ.\n A konyvtarak es a fileok listajat ket kulon dict-ben adja vissza, \n mindenfele extra parameterrel.\n A ``settings.KEPTAR_EXTENSIONS``-nel allithatoak a tamogatott \n kiterjesztesek.\n \"\"\"\n abspath = get_abspath(path)\n if not os.path.isdir(abspath):\n raise NotDirectory(abspath)\n dirs = []\n pictures = []\n for fname in os.listdir(abspath):\n file = os.path.join(abspath, fname)\n if os.path.isdir(file) and (show_hidden or not fname.startswith('.')):\n dirs.append(fname)\n if os.path.isfile(file):\n ext = file[file.rfind('.') + 1:]\n if ext.lower() in settings.KEPTAR_EXTENSIONS and (show_hidden or\n not fname.startswith('.')):\n pictures.append(fname)\n dirs.sort()\n pictures.sort()\n return enrich(dirs + pictures, relpath=path)\n\n\ndef get_thumbnail(file, type='', regenerate=False):\n \"\"\"Visszaadja, illetve ha nem letezik, akkor legeneralja a ``file``-hoz\n tartozo thumbnailt.\n A ``type``-on keresztul mondhatjuk meg, hogy milyen tipusu thumbnailre\n van szuksegunk, a tipusok parametereit a ``settings.py``-ben allithatjuk.\n Ha a ``regenerate`` ``True``, akkor ujrageneralja a thumbnailt.\n \"\"\"\n ext = file[file.rfind('.') + 1:]\n if not os.path.isfile(file) or ext.lower(\n ) not in settings.KEPTAR_EXTENSIONS:\n raise FileNotFound(file)\n basename = os.path.basename(file)\n dirname = os.path.dirname(file)\n thumbname = os.path.join(dirname, settings.KEPTAR_THUMBS[type]['dir'],\n basename)\n if regenerate or not os.path.isfile(thumbname):\n if not os.path.isdir(os.path.dirname(thumbname)):\n os.mkdir(os.path.dirname(thumbname))\n generate_thumbnail(file, thumbname, settings.KEPTAR_THUMBS[type][\n 'size'])\n thumburl = getattr(settings, 'KEPTAR_URL', '/media') + thumbname[len(\n settings.KEPTAR_ROOT):]\n return thumburl\n\n\ndef generate_thumbnail(file, thumbname, size):\n image = Image.open(file)\n image.thumbnail(size)\n image.save(thumbname, image.format)\n",
"<import token>\ntry:\n from collections import OrderedDict\nexcept ImportError:\n from keptar.odict import OrderedDict\n\n\nclass AccessDenied(Exception):\n pass\n\n\nclass FileNotFound(Exception):\n pass\n\n\nclass NotDirectory(Exception):\n pass\n\n\ndef enrich(filelist, relpath='', thumbnails=True):\n \"\"\"A kep neveihez hozzateszi a szukseges adatokat\"\"\"\n files = OrderedDict()\n for f in filelist:\n abspath = os.path.abspath(os.path.join(settings.KEPTAR_ROOT,\n relpath, f))\n if os.path.isdir(abspath):\n thumb = settings.KEPTAR_ICONS.get('dir', None)\n url = reverse('keptar.views.listdir', args=[os.path.join(\n relpath, f)])\n direct_url = None\n type = 'dir'\n else:\n if thumbnails:\n try:\n thumb = get_thumbnail(abspath)\n except:\n thumb = None\n else:\n thumb = settings.KEPTAR_ICONS.get('file', None)\n url = reverse('keptar.views.showfile', args=[os.path.join(\n relpath, f)])\n direct_url = getattr(settings, 'KEPTAR_URL', '/media/'\n ) + relpath + f\n type = 'file'\n files[f] = {'relpath': relpath, 'url': url, 'abspath': abspath,\n 'thumb': thumb, 'type': type, 'direct_url': direct_url}\n return files\n\n\ndef get_parent(path):\n \"\"\"A megadott elem szulokonyvtarat adja meg\"\"\"\n parent = os.path.dirname(path)\n try:\n get_abspath(parent)\n except:\n parent = ''\n return parent\n\n\ndef get_abspath(path):\n \"\"\"AccessDenied exceptiont dob, ha valaki cselezni akar\"\"\"\n abspath = os.path.abspath(os.path.join(settings.KEPTAR_ROOT, path))\n if not abspath.startswith(settings.KEPTAR_ROOT):\n raise AccessDenied('%s < %s' % (abspath, settings.KEPTAR_ROOT))\n return abspath\n\n\ndef get_filelist(path, show_hidden=getattr(settings, 'KEPTAR_SHOW_HIDDEN', \n False), thumbnails=True):\n \"\"\"Visszaadja a ``path`` konyvtarban levo konyvtarak es fileok listajat.\n A ``path`` a ``settings.KEPTAR_ROOT``-hoz relativ.\n A konyvtarak es a fileok listajat ket kulon dict-ben adja vissza, \n mindenfele extra parameterrel.\n A ``settings.KEPTAR_EXTENSIONS``-nel allithatoak a tamogatott \n kiterjesztesek.\n \"\"\"\n abspath = get_abspath(path)\n if not os.path.isdir(abspath):\n raise NotDirectory(abspath)\n dirs = []\n pictures = []\n for fname in os.listdir(abspath):\n file = os.path.join(abspath, fname)\n if os.path.isdir(file) and (show_hidden or not fname.startswith('.')):\n dirs.append(fname)\n if os.path.isfile(file):\n ext = file[file.rfind('.') + 1:]\n if ext.lower() in settings.KEPTAR_EXTENSIONS and (show_hidden or\n not fname.startswith('.')):\n pictures.append(fname)\n dirs.sort()\n pictures.sort()\n return enrich(dirs + pictures, relpath=path)\n\n\ndef get_thumbnail(file, type='', regenerate=False):\n \"\"\"Visszaadja, illetve ha nem letezik, akkor legeneralja a ``file``-hoz\n tartozo thumbnailt.\n A ``type``-on keresztul mondhatjuk meg, hogy milyen tipusu thumbnailre\n van szuksegunk, a tipusok parametereit a ``settings.py``-ben allithatjuk.\n Ha a ``regenerate`` ``True``, akkor ujrageneralja a thumbnailt.\n \"\"\"\n ext = file[file.rfind('.') + 1:]\n if not os.path.isfile(file) or ext.lower(\n ) not in settings.KEPTAR_EXTENSIONS:\n raise FileNotFound(file)\n basename = os.path.basename(file)\n dirname = os.path.dirname(file)\n thumbname = os.path.join(dirname, settings.KEPTAR_THUMBS[type]['dir'],\n basename)\n if regenerate or not os.path.isfile(thumbname):\n if not os.path.isdir(os.path.dirname(thumbname)):\n os.mkdir(os.path.dirname(thumbname))\n generate_thumbnail(file, thumbname, settings.KEPTAR_THUMBS[type][\n 'size'])\n thumburl = getattr(settings, 'KEPTAR_URL', '/media') + thumbname[len(\n settings.KEPTAR_ROOT):]\n return thumburl\n\n\ndef generate_thumbnail(file, thumbname, size):\n image = Image.open(file)\n image.thumbnail(size)\n image.save(thumbname, image.format)\n",
"<import token>\n<code token>\n\n\nclass AccessDenied(Exception):\n pass\n\n\nclass FileNotFound(Exception):\n pass\n\n\nclass NotDirectory(Exception):\n pass\n\n\ndef enrich(filelist, relpath='', thumbnails=True):\n \"\"\"A kep neveihez hozzateszi a szukseges adatokat\"\"\"\n files = OrderedDict()\n for f in filelist:\n abspath = os.path.abspath(os.path.join(settings.KEPTAR_ROOT,\n relpath, f))\n if os.path.isdir(abspath):\n thumb = settings.KEPTAR_ICONS.get('dir', None)\n url = reverse('keptar.views.listdir', args=[os.path.join(\n relpath, f)])\n direct_url = None\n type = 'dir'\n else:\n if thumbnails:\n try:\n thumb = get_thumbnail(abspath)\n except:\n thumb = None\n else:\n thumb = settings.KEPTAR_ICONS.get('file', None)\n url = reverse('keptar.views.showfile', args=[os.path.join(\n relpath, f)])\n direct_url = getattr(settings, 'KEPTAR_URL', '/media/'\n ) + relpath + f\n type = 'file'\n files[f] = {'relpath': relpath, 'url': url, 'abspath': abspath,\n 'thumb': thumb, 'type': type, 'direct_url': direct_url}\n return files\n\n\ndef get_parent(path):\n \"\"\"A megadott elem szulokonyvtarat adja meg\"\"\"\n parent = os.path.dirname(path)\n try:\n get_abspath(parent)\n except:\n parent = ''\n return parent\n\n\ndef get_abspath(path):\n \"\"\"AccessDenied exceptiont dob, ha valaki cselezni akar\"\"\"\n abspath = os.path.abspath(os.path.join(settings.KEPTAR_ROOT, path))\n if not abspath.startswith(settings.KEPTAR_ROOT):\n raise AccessDenied('%s < %s' % (abspath, settings.KEPTAR_ROOT))\n return abspath\n\n\ndef get_filelist(path, show_hidden=getattr(settings, 'KEPTAR_SHOW_HIDDEN', \n False), thumbnails=True):\n \"\"\"Visszaadja a ``path`` konyvtarban levo konyvtarak es fileok listajat.\n A ``path`` a ``settings.KEPTAR_ROOT``-hoz relativ.\n A konyvtarak es a fileok listajat ket kulon dict-ben adja vissza, \n mindenfele extra parameterrel.\n A ``settings.KEPTAR_EXTENSIONS``-nel allithatoak a tamogatott \n kiterjesztesek.\n \"\"\"\n abspath = get_abspath(path)\n if not os.path.isdir(abspath):\n raise NotDirectory(abspath)\n dirs = []\n pictures = []\n for fname in os.listdir(abspath):\n file = os.path.join(abspath, fname)\n if os.path.isdir(file) and (show_hidden or not fname.startswith('.')):\n dirs.append(fname)\n if os.path.isfile(file):\n ext = file[file.rfind('.') + 1:]\n if ext.lower() in settings.KEPTAR_EXTENSIONS and (show_hidden or\n not fname.startswith('.')):\n pictures.append(fname)\n dirs.sort()\n pictures.sort()\n return enrich(dirs + pictures, relpath=path)\n\n\ndef get_thumbnail(file, type='', regenerate=False):\n \"\"\"Visszaadja, illetve ha nem letezik, akkor legeneralja a ``file``-hoz\n tartozo thumbnailt.\n A ``type``-on keresztul mondhatjuk meg, hogy milyen tipusu thumbnailre\n van szuksegunk, a tipusok parametereit a ``settings.py``-ben allithatjuk.\n Ha a ``regenerate`` ``True``, akkor ujrageneralja a thumbnailt.\n \"\"\"\n ext = file[file.rfind('.') + 1:]\n if not os.path.isfile(file) or ext.lower(\n ) not in settings.KEPTAR_EXTENSIONS:\n raise FileNotFound(file)\n basename = os.path.basename(file)\n dirname = os.path.dirname(file)\n thumbname = os.path.join(dirname, settings.KEPTAR_THUMBS[type]['dir'],\n basename)\n if regenerate or not os.path.isfile(thumbname):\n if not os.path.isdir(os.path.dirname(thumbname)):\n os.mkdir(os.path.dirname(thumbname))\n generate_thumbnail(file, thumbname, settings.KEPTAR_THUMBS[type][\n 'size'])\n thumburl = getattr(settings, 'KEPTAR_URL', '/media') + thumbname[len(\n settings.KEPTAR_ROOT):]\n return thumburl\n\n\ndef generate_thumbnail(file, thumbname, size):\n image = Image.open(file)\n image.thumbnail(size)\n image.save(thumbname, image.format)\n",
"<import token>\n<code token>\n\n\nclass AccessDenied(Exception):\n pass\n\n\nclass FileNotFound(Exception):\n pass\n\n\nclass NotDirectory(Exception):\n pass\n\n\ndef enrich(filelist, relpath='', thumbnails=True):\n \"\"\"A kep neveihez hozzateszi a szukseges adatokat\"\"\"\n files = OrderedDict()\n for f in filelist:\n abspath = os.path.abspath(os.path.join(settings.KEPTAR_ROOT,\n relpath, f))\n if os.path.isdir(abspath):\n thumb = settings.KEPTAR_ICONS.get('dir', None)\n url = reverse('keptar.views.listdir', args=[os.path.join(\n relpath, f)])\n direct_url = None\n type = 'dir'\n else:\n if thumbnails:\n try:\n thumb = get_thumbnail(abspath)\n except:\n thumb = None\n else:\n thumb = settings.KEPTAR_ICONS.get('file', None)\n url = reverse('keptar.views.showfile', args=[os.path.join(\n relpath, f)])\n direct_url = getattr(settings, 'KEPTAR_URL', '/media/'\n ) + relpath + f\n type = 'file'\n files[f] = {'relpath': relpath, 'url': url, 'abspath': abspath,\n 'thumb': thumb, 'type': type, 'direct_url': direct_url}\n return files\n\n\ndef get_parent(path):\n \"\"\"A megadott elem szulokonyvtarat adja meg\"\"\"\n parent = os.path.dirname(path)\n try:\n get_abspath(parent)\n except:\n parent = ''\n return parent\n\n\ndef get_abspath(path):\n \"\"\"AccessDenied exceptiont dob, ha valaki cselezni akar\"\"\"\n abspath = os.path.abspath(os.path.join(settings.KEPTAR_ROOT, path))\n if not abspath.startswith(settings.KEPTAR_ROOT):\n raise AccessDenied('%s < %s' % (abspath, settings.KEPTAR_ROOT))\n return abspath\n\n\n<function token>\n\n\ndef get_thumbnail(file, type='', regenerate=False):\n \"\"\"Visszaadja, illetve ha nem letezik, akkor legeneralja a ``file``-hoz\n tartozo thumbnailt.\n A ``type``-on keresztul mondhatjuk meg, hogy milyen tipusu thumbnailre\n van szuksegunk, a tipusok parametereit a ``settings.py``-ben allithatjuk.\n Ha a ``regenerate`` ``True``, akkor ujrageneralja a thumbnailt.\n \"\"\"\n ext = file[file.rfind('.') + 1:]\n if not os.path.isfile(file) or ext.lower(\n ) not in settings.KEPTAR_EXTENSIONS:\n raise FileNotFound(file)\n basename = os.path.basename(file)\n dirname = os.path.dirname(file)\n thumbname = os.path.join(dirname, settings.KEPTAR_THUMBS[type]['dir'],\n basename)\n if regenerate or not os.path.isfile(thumbname):\n if not os.path.isdir(os.path.dirname(thumbname)):\n os.mkdir(os.path.dirname(thumbname))\n generate_thumbnail(file, thumbname, settings.KEPTAR_THUMBS[type][\n 'size'])\n thumburl = getattr(settings, 'KEPTAR_URL', '/media') + thumbname[len(\n settings.KEPTAR_ROOT):]\n return thumburl\n\n\ndef generate_thumbnail(file, thumbname, size):\n image = Image.open(file)\n image.thumbnail(size)\n image.save(thumbname, image.format)\n",
"<import token>\n<code token>\n\n\nclass AccessDenied(Exception):\n pass\n\n\nclass FileNotFound(Exception):\n pass\n\n\nclass NotDirectory(Exception):\n pass\n\n\ndef enrich(filelist, relpath='', thumbnails=True):\n \"\"\"A kep neveihez hozzateszi a szukseges adatokat\"\"\"\n files = OrderedDict()\n for f in filelist:\n abspath = os.path.abspath(os.path.join(settings.KEPTAR_ROOT,\n relpath, f))\n if os.path.isdir(abspath):\n thumb = settings.KEPTAR_ICONS.get('dir', None)\n url = reverse('keptar.views.listdir', args=[os.path.join(\n relpath, f)])\n direct_url = None\n type = 'dir'\n else:\n if thumbnails:\n try:\n thumb = get_thumbnail(abspath)\n except:\n thumb = None\n else:\n thumb = settings.KEPTAR_ICONS.get('file', None)\n url = reverse('keptar.views.showfile', args=[os.path.join(\n relpath, f)])\n direct_url = getattr(settings, 'KEPTAR_URL', '/media/'\n ) + relpath + f\n type = 'file'\n files[f] = {'relpath': relpath, 'url': url, 'abspath': abspath,\n 'thumb': thumb, 'type': type, 'direct_url': direct_url}\n return files\n\n\ndef get_parent(path):\n \"\"\"A megadott elem szulokonyvtarat adja meg\"\"\"\n parent = os.path.dirname(path)\n try:\n get_abspath(parent)\n except:\n parent = ''\n return parent\n\n\n<function token>\n<function token>\n\n\ndef get_thumbnail(file, type='', regenerate=False):\n \"\"\"Visszaadja, illetve ha nem letezik, akkor legeneralja a ``file``-hoz\n tartozo thumbnailt.\n A ``type``-on keresztul mondhatjuk meg, hogy milyen tipusu thumbnailre\n van szuksegunk, a tipusok parametereit a ``settings.py``-ben allithatjuk.\n Ha a ``regenerate`` ``True``, akkor ujrageneralja a thumbnailt.\n \"\"\"\n ext = file[file.rfind('.') + 1:]\n if not os.path.isfile(file) or ext.lower(\n ) not in settings.KEPTAR_EXTENSIONS:\n raise FileNotFound(file)\n basename = os.path.basename(file)\n dirname = os.path.dirname(file)\n thumbname = os.path.join(dirname, settings.KEPTAR_THUMBS[type]['dir'],\n basename)\n if regenerate or not os.path.isfile(thumbname):\n if not os.path.isdir(os.path.dirname(thumbname)):\n os.mkdir(os.path.dirname(thumbname))\n generate_thumbnail(file, thumbname, settings.KEPTAR_THUMBS[type][\n 'size'])\n thumburl = getattr(settings, 'KEPTAR_URL', '/media') + thumbname[len(\n settings.KEPTAR_ROOT):]\n return thumburl\n\n\ndef generate_thumbnail(file, thumbname, size):\n image = Image.open(file)\n image.thumbnail(size)\n image.save(thumbname, image.format)\n",
"<import token>\n<code token>\n\n\nclass AccessDenied(Exception):\n pass\n\n\nclass FileNotFound(Exception):\n pass\n\n\nclass NotDirectory(Exception):\n pass\n\n\ndef enrich(filelist, relpath='', thumbnails=True):\n \"\"\"A kep neveihez hozzateszi a szukseges adatokat\"\"\"\n files = OrderedDict()\n for f in filelist:\n abspath = os.path.abspath(os.path.join(settings.KEPTAR_ROOT,\n relpath, f))\n if os.path.isdir(abspath):\n thumb = settings.KEPTAR_ICONS.get('dir', None)\n url = reverse('keptar.views.listdir', args=[os.path.join(\n relpath, f)])\n direct_url = None\n type = 'dir'\n else:\n if thumbnails:\n try:\n thumb = get_thumbnail(abspath)\n except:\n thumb = None\n else:\n thumb = settings.KEPTAR_ICONS.get('file', None)\n url = reverse('keptar.views.showfile', args=[os.path.join(\n relpath, f)])\n direct_url = getattr(settings, 'KEPTAR_URL', '/media/'\n ) + relpath + f\n type = 'file'\n files[f] = {'relpath': relpath, 'url': url, 'abspath': abspath,\n 'thumb': thumb, 'type': type, 'direct_url': direct_url}\n return files\n\n\ndef get_parent(path):\n \"\"\"A megadott elem szulokonyvtarat adja meg\"\"\"\n parent = os.path.dirname(path)\n try:\n get_abspath(parent)\n except:\n parent = ''\n return parent\n\n\n<function token>\n<function token>\n<function token>\n\n\ndef generate_thumbnail(file, thumbname, size):\n image = Image.open(file)\n image.thumbnail(size)\n image.save(thumbname, image.format)\n",
"<import token>\n<code token>\n\n\nclass AccessDenied(Exception):\n pass\n\n\nclass FileNotFound(Exception):\n pass\n\n\nclass NotDirectory(Exception):\n pass\n\n\n<function token>\n\n\ndef get_parent(path):\n \"\"\"A megadott elem szulokonyvtarat adja meg\"\"\"\n parent = os.path.dirname(path)\n try:\n get_abspath(parent)\n except:\n parent = ''\n return parent\n\n\n<function token>\n<function token>\n<function token>\n\n\ndef generate_thumbnail(file, thumbname, size):\n image = Image.open(file)\n image.thumbnail(size)\n image.save(thumbname, image.format)\n",
"<import token>\n<code token>\n\n\nclass AccessDenied(Exception):\n pass\n\n\nclass FileNotFound(Exception):\n pass\n\n\nclass NotDirectory(Exception):\n pass\n\n\n<function token>\n\n\ndef get_parent(path):\n \"\"\"A megadott elem szulokonyvtarat adja meg\"\"\"\n parent = os.path.dirname(path)\n try:\n get_abspath(parent)\n except:\n parent = ''\n return parent\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n",
"<import token>\n<code token>\n\n\nclass AccessDenied(Exception):\n pass\n\n\nclass FileNotFound(Exception):\n pass\n\n\nclass NotDirectory(Exception):\n pass\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n",
"<import token>\n<code token>\n<class token>\n\n\nclass FileNotFound(Exception):\n pass\n\n\nclass NotDirectory(Exception):\n pass\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n",
"<import token>\n<code token>\n<class token>\n<class token>\n\n\nclass NotDirectory(Exception):\n pass\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n",
"<import token>\n<code token>\n<class token>\n<class token>\n<class token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n"
] | false |
897 |
5193de15052f81460a23d993cfa039fa90c9de5e
|
"""
Copyright (C) 2014, Jill Huchital
"""
# test comment
from flask import Flask
from flask import render_template
from flask import jsonify
from flask import request
from playlists import get_all_playlists, create_playlists, get_all_categories, add_new_category, add_new_topic, get_all_topics
from db import connect_to_db
ALL_DBS = None
app = Flask(__name__)
@app.route('/')
def index():
# return render_template('index.html', greeting='here we are then')
return "index"
@app.route('/hello/')
def hello():
return render_template('index.html', greeting='here we are')
@app.route('/tools/')
def tools():
return render_template('tools.html')
@app.route('/api/1.0/create_playlists', methods = ['POST'])
def do_create_playlists():
create_playlists(ALL_DBS)
retval = get_all_playlists(ALL_DBS)
return jsonify({'all_playlists': retval})
@app.route('/api/1.0/get_playlists', methods = ['POST'])
def get_playlists():
retval = get_all_playlists(ALL_DBS)
return jsonify({'all_playlists': retval})
@app.route('/api/1.0/get_all_categories', methods = ['POST'])
def get_categories():
retval = get_all_categories(ALL_DBS)
return jsonify({'all_categories': retval})
@app.route('/api/1.0/get_all_topics', methods = ['POST'])
def get_topics():
retval = get_all_topics(ALL_DBS)
return jsonify({'all_topics': retval})
@app.route('/api/1.0/add_category', methods = ['POST'])
def add_category():
retval = add_new_category(request.json, ALL_DBS)
return retval
@app.route('/api/1.0/add_topic', methods = ['POST'])
def add_topic():
retval = add_new_topic(request.json, ALL_DBS)
return jsonify({'return_code': retval})
@app.route('/api/1.0/<string:api_call>', methods = ['POST'])
def generic_api_call(api_call):
if not request.json:
abort(400)
param1 = request.json.get('param1', 'no param 1')
param2 = request.json.get('param2', 'no param 2')
retval = {'param_1': param1,
'api_call': api_call,
'param_2': param2}
return jsonify(retval)
if __name__ == '__main__':
# debug = True makes the server restart when the Python files change. TODO: make it
# depend on whether we're running locally or in production.
ALL_DBS = connect_to_db()
# create_playlists(ALL_DBS)
app.run(debug = True)
|
[
"\"\"\"\nCopyright (C) 2014, Jill Huchital\n\"\"\"\n\n# test comment\n\nfrom flask import Flask\nfrom flask import render_template\nfrom flask import jsonify\nfrom flask import request\n\nfrom playlists import get_all_playlists, create_playlists, get_all_categories, add_new_category, add_new_topic, get_all_topics\nfrom db import connect_to_db\n\nALL_DBS = None\n\napp = Flask(__name__)\n\[email protected]('/')\ndef index():\n # return render_template('index.html', greeting='here we are then')\n return \"index\"\n\[email protected]('/hello/')\ndef hello():\n return render_template('index.html', greeting='here we are')\n\[email protected]('/tools/')\ndef tools():\n return render_template('tools.html')\n\[email protected]('/api/1.0/create_playlists', methods = ['POST'])\ndef do_create_playlists():\n create_playlists(ALL_DBS)\n retval = get_all_playlists(ALL_DBS)\n return jsonify({'all_playlists': retval})\n\[email protected]('/api/1.0/get_playlists', methods = ['POST'])\ndef get_playlists():\n retval = get_all_playlists(ALL_DBS)\n return jsonify({'all_playlists': retval})\n\[email protected]('/api/1.0/get_all_categories', methods = ['POST'])\ndef get_categories():\n retval = get_all_categories(ALL_DBS)\n return jsonify({'all_categories': retval})\n\[email protected]('/api/1.0/get_all_topics', methods = ['POST'])\ndef get_topics():\n retval = get_all_topics(ALL_DBS)\n return jsonify({'all_topics': retval})\n\[email protected]('/api/1.0/add_category', methods = ['POST'])\ndef add_category():\n retval = add_new_category(request.json, ALL_DBS)\n return retval\n\[email protected]('/api/1.0/add_topic', methods = ['POST'])\ndef add_topic():\n retval = add_new_topic(request.json, ALL_DBS)\n return jsonify({'return_code': retval})\n\[email protected]('/api/1.0/<string:api_call>', methods = ['POST'])\ndef generic_api_call(api_call):\n if not request.json:\n abort(400)\n param1 = request.json.get('param1', 'no param 1')\n param2 = request.json.get('param2', 'no param 2')\n retval = {'param_1': param1,\n 'api_call': api_call,\n 'param_2': param2}\n return jsonify(retval)\n\nif __name__ == '__main__':\n # debug = True makes the server restart when the Python files change. TODO: make it\n # depend on whether we're running locally or in production.\n ALL_DBS = connect_to_db()\n # create_playlists(ALL_DBS)\n app.run(debug = True)\n",
"<docstring token>\nfrom flask import Flask\nfrom flask import render_template\nfrom flask import jsonify\nfrom flask import request\nfrom playlists import get_all_playlists, create_playlists, get_all_categories, add_new_category, add_new_topic, get_all_topics\nfrom db import connect_to_db\nALL_DBS = None\napp = Flask(__name__)\n\n\[email protected]('/')\ndef index():\n return 'index'\n\n\[email protected]('/hello/')\ndef hello():\n return render_template('index.html', greeting='here we are')\n\n\[email protected]('/tools/')\ndef tools():\n return render_template('tools.html')\n\n\[email protected]('/api/1.0/create_playlists', methods=['POST'])\ndef do_create_playlists():\n create_playlists(ALL_DBS)\n retval = get_all_playlists(ALL_DBS)\n return jsonify({'all_playlists': retval})\n\n\[email protected]('/api/1.0/get_playlists', methods=['POST'])\ndef get_playlists():\n retval = get_all_playlists(ALL_DBS)\n return jsonify({'all_playlists': retval})\n\n\[email protected]('/api/1.0/get_all_categories', methods=['POST'])\ndef get_categories():\n retval = get_all_categories(ALL_DBS)\n return jsonify({'all_categories': retval})\n\n\[email protected]('/api/1.0/get_all_topics', methods=['POST'])\ndef get_topics():\n retval = get_all_topics(ALL_DBS)\n return jsonify({'all_topics': retval})\n\n\[email protected]('/api/1.0/add_category', methods=['POST'])\ndef add_category():\n retval = add_new_category(request.json, ALL_DBS)\n return retval\n\n\[email protected]('/api/1.0/add_topic', methods=['POST'])\ndef add_topic():\n retval = add_new_topic(request.json, ALL_DBS)\n return jsonify({'return_code': retval})\n\n\[email protected]('/api/1.0/<string:api_call>', methods=['POST'])\ndef generic_api_call(api_call):\n if not request.json:\n abort(400)\n param1 = request.json.get('param1', 'no param 1')\n param2 = request.json.get('param2', 'no param 2')\n retval = {'param_1': param1, 'api_call': api_call, 'param_2': param2}\n return jsonify(retval)\n\n\nif __name__ == '__main__':\n ALL_DBS = connect_to_db()\n app.run(debug=True)\n",
"<docstring token>\n<import token>\nALL_DBS = None\napp = Flask(__name__)\n\n\[email protected]('/')\ndef index():\n return 'index'\n\n\[email protected]('/hello/')\ndef hello():\n return render_template('index.html', greeting='here we are')\n\n\[email protected]('/tools/')\ndef tools():\n return render_template('tools.html')\n\n\[email protected]('/api/1.0/create_playlists', methods=['POST'])\ndef do_create_playlists():\n create_playlists(ALL_DBS)\n retval = get_all_playlists(ALL_DBS)\n return jsonify({'all_playlists': retval})\n\n\[email protected]('/api/1.0/get_playlists', methods=['POST'])\ndef get_playlists():\n retval = get_all_playlists(ALL_DBS)\n return jsonify({'all_playlists': retval})\n\n\[email protected]('/api/1.0/get_all_categories', methods=['POST'])\ndef get_categories():\n retval = get_all_categories(ALL_DBS)\n return jsonify({'all_categories': retval})\n\n\[email protected]('/api/1.0/get_all_topics', methods=['POST'])\ndef get_topics():\n retval = get_all_topics(ALL_DBS)\n return jsonify({'all_topics': retval})\n\n\[email protected]('/api/1.0/add_category', methods=['POST'])\ndef add_category():\n retval = add_new_category(request.json, ALL_DBS)\n return retval\n\n\[email protected]('/api/1.0/add_topic', methods=['POST'])\ndef add_topic():\n retval = add_new_topic(request.json, ALL_DBS)\n return jsonify({'return_code': retval})\n\n\[email protected]('/api/1.0/<string:api_call>', methods=['POST'])\ndef generic_api_call(api_call):\n if not request.json:\n abort(400)\n param1 = request.json.get('param1', 'no param 1')\n param2 = request.json.get('param2', 'no param 2')\n retval = {'param_1': param1, 'api_call': api_call, 'param_2': param2}\n return jsonify(retval)\n\n\nif __name__ == '__main__':\n ALL_DBS = connect_to_db()\n app.run(debug=True)\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\[email protected]('/')\ndef index():\n return 'index'\n\n\[email protected]('/hello/')\ndef hello():\n return render_template('index.html', greeting='here we are')\n\n\[email protected]('/tools/')\ndef tools():\n return render_template('tools.html')\n\n\[email protected]('/api/1.0/create_playlists', methods=['POST'])\ndef do_create_playlists():\n create_playlists(ALL_DBS)\n retval = get_all_playlists(ALL_DBS)\n return jsonify({'all_playlists': retval})\n\n\[email protected]('/api/1.0/get_playlists', methods=['POST'])\ndef get_playlists():\n retval = get_all_playlists(ALL_DBS)\n return jsonify({'all_playlists': retval})\n\n\[email protected]('/api/1.0/get_all_categories', methods=['POST'])\ndef get_categories():\n retval = get_all_categories(ALL_DBS)\n return jsonify({'all_categories': retval})\n\n\[email protected]('/api/1.0/get_all_topics', methods=['POST'])\ndef get_topics():\n retval = get_all_topics(ALL_DBS)\n return jsonify({'all_topics': retval})\n\n\[email protected]('/api/1.0/add_category', methods=['POST'])\ndef add_category():\n retval = add_new_category(request.json, ALL_DBS)\n return retval\n\n\[email protected]('/api/1.0/add_topic', methods=['POST'])\ndef add_topic():\n retval = add_new_topic(request.json, ALL_DBS)\n return jsonify({'return_code': retval})\n\n\[email protected]('/api/1.0/<string:api_call>', methods=['POST'])\ndef generic_api_call(api_call):\n if not request.json:\n abort(400)\n param1 = request.json.get('param1', 'no param 1')\n param2 = request.json.get('param2', 'no param 2')\n retval = {'param_1': param1, 'api_call': api_call, 'param_2': param2}\n return jsonify(retval)\n\n\nif __name__ == '__main__':\n ALL_DBS = connect_to_db()\n app.run(debug=True)\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\[email protected]('/')\ndef index():\n return 'index'\n\n\[email protected]('/hello/')\ndef hello():\n return render_template('index.html', greeting='here we are')\n\n\[email protected]('/tools/')\ndef tools():\n return render_template('tools.html')\n\n\[email protected]('/api/1.0/create_playlists', methods=['POST'])\ndef do_create_playlists():\n create_playlists(ALL_DBS)\n retval = get_all_playlists(ALL_DBS)\n return jsonify({'all_playlists': retval})\n\n\[email protected]('/api/1.0/get_playlists', methods=['POST'])\ndef get_playlists():\n retval = get_all_playlists(ALL_DBS)\n return jsonify({'all_playlists': retval})\n\n\[email protected]('/api/1.0/get_all_categories', methods=['POST'])\ndef get_categories():\n retval = get_all_categories(ALL_DBS)\n return jsonify({'all_categories': retval})\n\n\[email protected]('/api/1.0/get_all_topics', methods=['POST'])\ndef get_topics():\n retval = get_all_topics(ALL_DBS)\n return jsonify({'all_topics': retval})\n\n\[email protected]('/api/1.0/add_category', methods=['POST'])\ndef add_category():\n retval = add_new_category(request.json, ALL_DBS)\n return retval\n\n\[email protected]('/api/1.0/add_topic', methods=['POST'])\ndef add_topic():\n retval = add_new_topic(request.json, ALL_DBS)\n return jsonify({'return_code': retval})\n\n\[email protected]('/api/1.0/<string:api_call>', methods=['POST'])\ndef generic_api_call(api_call):\n if not request.json:\n abort(400)\n param1 = request.json.get('param1', 'no param 1')\n param2 = request.json.get('param2', 'no param 2')\n retval = {'param_1': param1, 'api_call': api_call, 'param_2': param2}\n return jsonify(retval)\n\n\n<code token>\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\[email protected]('/')\ndef index():\n return 'index'\n\n\[email protected]('/hello/')\ndef hello():\n return render_template('index.html', greeting='here we are')\n\n\[email protected]('/tools/')\ndef tools():\n return render_template('tools.html')\n\n\n<function token>\n\n\[email protected]('/api/1.0/get_playlists', methods=['POST'])\ndef get_playlists():\n retval = get_all_playlists(ALL_DBS)\n return jsonify({'all_playlists': retval})\n\n\[email protected]('/api/1.0/get_all_categories', methods=['POST'])\ndef get_categories():\n retval = get_all_categories(ALL_DBS)\n return jsonify({'all_categories': retval})\n\n\[email protected]('/api/1.0/get_all_topics', methods=['POST'])\ndef get_topics():\n retval = get_all_topics(ALL_DBS)\n return jsonify({'all_topics': retval})\n\n\[email protected]('/api/1.0/add_category', methods=['POST'])\ndef add_category():\n retval = add_new_category(request.json, ALL_DBS)\n return retval\n\n\[email protected]('/api/1.0/add_topic', methods=['POST'])\ndef add_topic():\n retval = add_new_topic(request.json, ALL_DBS)\n return jsonify({'return_code': retval})\n\n\[email protected]('/api/1.0/<string:api_call>', methods=['POST'])\ndef generic_api_call(api_call):\n if not request.json:\n abort(400)\n param1 = request.json.get('param1', 'no param 1')\n param2 = request.json.get('param2', 'no param 2')\n retval = {'param_1': param1, 'api_call': api_call, 'param_2': param2}\n return jsonify(retval)\n\n\n<code token>\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\[email protected]('/')\ndef index():\n return 'index'\n\n\[email protected]('/hello/')\ndef hello():\n return render_template('index.html', greeting='here we are')\n\n\[email protected]('/tools/')\ndef tools():\n return render_template('tools.html')\n\n\n<function token>\n\n\[email protected]('/api/1.0/get_playlists', methods=['POST'])\ndef get_playlists():\n retval = get_all_playlists(ALL_DBS)\n return jsonify({'all_playlists': retval})\n\n\n<function token>\n\n\[email protected]('/api/1.0/get_all_topics', methods=['POST'])\ndef get_topics():\n retval = get_all_topics(ALL_DBS)\n return jsonify({'all_topics': retval})\n\n\[email protected]('/api/1.0/add_category', methods=['POST'])\ndef add_category():\n retval = add_new_category(request.json, ALL_DBS)\n return retval\n\n\[email protected]('/api/1.0/add_topic', methods=['POST'])\ndef add_topic():\n retval = add_new_topic(request.json, ALL_DBS)\n return jsonify({'return_code': retval})\n\n\[email protected]('/api/1.0/<string:api_call>', methods=['POST'])\ndef generic_api_call(api_call):\n if not request.json:\n abort(400)\n param1 = request.json.get('param1', 'no param 1')\n param2 = request.json.get('param2', 'no param 2')\n retval = {'param_1': param1, 'api_call': api_call, 'param_2': param2}\n return jsonify(retval)\n\n\n<code token>\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\[email protected]('/')\ndef index():\n return 'index'\n\n\[email protected]('/hello/')\ndef hello():\n return render_template('index.html', greeting='here we are')\n\n\[email protected]('/tools/')\ndef tools():\n return render_template('tools.html')\n\n\n<function token>\n\n\[email protected]('/api/1.0/get_playlists', methods=['POST'])\ndef get_playlists():\n retval = get_all_playlists(ALL_DBS)\n return jsonify({'all_playlists': retval})\n\n\n<function token>\n\n\[email protected]('/api/1.0/get_all_topics', methods=['POST'])\ndef get_topics():\n retval = get_all_topics(ALL_DBS)\n return jsonify({'all_topics': retval})\n\n\n<function token>\n\n\[email protected]('/api/1.0/add_topic', methods=['POST'])\ndef add_topic():\n retval = add_new_topic(request.json, ALL_DBS)\n return jsonify({'return_code': retval})\n\n\[email protected]('/api/1.0/<string:api_call>', methods=['POST'])\ndef generic_api_call(api_call):\n if not request.json:\n abort(400)\n param1 = request.json.get('param1', 'no param 1')\n param2 = request.json.get('param2', 'no param 2')\n retval = {'param_1': param1, 'api_call': api_call, 'param_2': param2}\n return jsonify(retval)\n\n\n<code token>\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\[email protected]('/')\ndef index():\n return 'index'\n\n\[email protected]('/hello/')\ndef hello():\n return render_template('index.html', greeting='here we are')\n\n\[email protected]('/tools/')\ndef tools():\n return render_template('tools.html')\n\n\n<function token>\n<function token>\n<function token>\n\n\[email protected]('/api/1.0/get_all_topics', methods=['POST'])\ndef get_topics():\n retval = get_all_topics(ALL_DBS)\n return jsonify({'all_topics': retval})\n\n\n<function token>\n\n\[email protected]('/api/1.0/add_topic', methods=['POST'])\ndef add_topic():\n retval = add_new_topic(request.json, ALL_DBS)\n return jsonify({'return_code': retval})\n\n\[email protected]('/api/1.0/<string:api_call>', methods=['POST'])\ndef generic_api_call(api_call):\n if not request.json:\n abort(400)\n param1 = request.json.get('param1', 'no param 1')\n param2 = request.json.get('param2', 'no param 2')\n retval = {'param_1': param1, 'api_call': api_call, 'param_2': param2}\n return jsonify(retval)\n\n\n<code token>\n",
"<docstring token>\n<import token>\n<assignment token>\n<function token>\n\n\[email protected]('/hello/')\ndef hello():\n return render_template('index.html', greeting='here we are')\n\n\[email protected]('/tools/')\ndef tools():\n return render_template('tools.html')\n\n\n<function token>\n<function token>\n<function token>\n\n\[email protected]('/api/1.0/get_all_topics', methods=['POST'])\ndef get_topics():\n retval = get_all_topics(ALL_DBS)\n return jsonify({'all_topics': retval})\n\n\n<function token>\n\n\[email protected]('/api/1.0/add_topic', methods=['POST'])\ndef add_topic():\n retval = add_new_topic(request.json, ALL_DBS)\n return jsonify({'return_code': retval})\n\n\[email protected]('/api/1.0/<string:api_call>', methods=['POST'])\ndef generic_api_call(api_call):\n if not request.json:\n abort(400)\n param1 = request.json.get('param1', 'no param 1')\n param2 = request.json.get('param2', 'no param 2')\n retval = {'param_1': param1, 'api_call': api_call, 'param_2': param2}\n return jsonify(retval)\n\n\n<code token>\n",
"<docstring token>\n<import token>\n<assignment token>\n<function token>\n\n\[email protected]('/hello/')\ndef hello():\n return render_template('index.html', greeting='here we are')\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n\n\[email protected]('/api/1.0/get_all_topics', methods=['POST'])\ndef get_topics():\n retval = get_all_topics(ALL_DBS)\n return jsonify({'all_topics': retval})\n\n\n<function token>\n\n\[email protected]('/api/1.0/add_topic', methods=['POST'])\ndef add_topic():\n retval = add_new_topic(request.json, ALL_DBS)\n return jsonify({'return_code': retval})\n\n\[email protected]('/api/1.0/<string:api_call>', methods=['POST'])\ndef generic_api_call(api_call):\n if not request.json:\n abort(400)\n param1 = request.json.get('param1', 'no param 1')\n param2 = request.json.get('param2', 'no param 2')\n retval = {'param_1': param1, 'api_call': api_call, 'param_2': param2}\n return jsonify(retval)\n\n\n<code token>\n",
"<docstring token>\n<import token>\n<assignment token>\n<function token>\n\n\[email protected]('/hello/')\ndef hello():\n return render_template('index.html', greeting='here we are')\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n\n\[email protected]('/api/1.0/get_all_topics', methods=['POST'])\ndef get_topics():\n retval = get_all_topics(ALL_DBS)\n return jsonify({'all_topics': retval})\n\n\n<function token>\n<function token>\n\n\[email protected]('/api/1.0/<string:api_call>', methods=['POST'])\ndef generic_api_call(api_call):\n if not request.json:\n abort(400)\n param1 = request.json.get('param1', 'no param 1')\n param2 = request.json.get('param2', 'no param 2')\n retval = {'param_1': param1, 'api_call': api_call, 'param_2': param2}\n return jsonify(retval)\n\n\n<code token>\n",
"<docstring token>\n<import token>\n<assignment token>\n<function token>\n\n\[email protected]('/hello/')\ndef hello():\n return render_template('index.html', greeting='here we are')\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\[email protected]('/api/1.0/<string:api_call>', methods=['POST'])\ndef generic_api_call(api_call):\n if not request.json:\n abort(400)\n param1 = request.json.get('param1', 'no param 1')\n param2 = request.json.get('param2', 'no param 2')\n retval = {'param_1': param1, 'api_call': api_call, 'param_2': param2}\n return jsonify(retval)\n\n\n<code token>\n",
"<docstring token>\n<import token>\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\[email protected]('/api/1.0/<string:api_call>', methods=['POST'])\ndef generic_api_call(api_call):\n if not request.json:\n abort(400)\n param1 = request.json.get('param1', 'no param 1')\n param2 = request.json.get('param2', 'no param 2')\n retval = {'param_1': param1, 'api_call': api_call, 'param_2': param2}\n return jsonify(retval)\n\n\n<code token>\n",
"<docstring token>\n<import token>\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n"
] | false |
898 |
acbe9a9501c6a8532249496f327c2470c1d2f8e0
|
import math
import backtrader as bt
from datetime import datetime
from bots.TelegramBot import TelegramBot
import logging
class Volume(bt.Strategy):
params = (('avg_volume_period', 10), ('ticker', 'hpg'), ('ratio', 1.25))
def __init__(self):
self.mysignal = (self.data.volume / bt.ind.Average(self.data.volume, period=self.params.avg_volume_period)) >= self.params.ratio
def next(self):
self.step_date = self.data.datetime.date().strftime("%Y-%m-%d")
self.today = datetime.now().strftime("%Y-%m-%d")
if self.mysignal and self.step_date == self.today:
TelegramBot.send("{} - KLGD lớn hơn KLGD trung bình {} ngày gần nhất.".format(self.params.ticker, self.params.avg_volume_period))
|
[
"import math\nimport backtrader as bt\nfrom datetime import datetime\nfrom bots.TelegramBot import TelegramBot\nimport logging\nclass Volume(bt.Strategy):\n params = (('avg_volume_period', 10), ('ticker', 'hpg'), ('ratio', 1.25))\n\n def __init__(self):\n self.mysignal = (self.data.volume / bt.ind.Average(self.data.volume, period=self.params.avg_volume_period)) >= self.params.ratio\n def next(self):\n self.step_date = self.data.datetime.date().strftime(\"%Y-%m-%d\")\n self.today = datetime.now().strftime(\"%Y-%m-%d\")\n if self.mysignal and self.step_date == self.today:\n TelegramBot.send(\"{} - KLGD lớn hơn KLGD trung bình {} ngày gần nhất.\".format(self.params.ticker, self.params.avg_volume_period))\n ",
"import math\nimport backtrader as bt\nfrom datetime import datetime\nfrom bots.TelegramBot import TelegramBot\nimport logging\n\n\nclass Volume(bt.Strategy):\n params = ('avg_volume_period', 10), ('ticker', 'hpg'), ('ratio', 1.25)\n\n def __init__(self):\n self.mysignal = self.data.volume / bt.ind.Average(self.data.volume,\n period=self.params.avg_volume_period) >= self.params.ratio\n\n def next(self):\n self.step_date = self.data.datetime.date().strftime('%Y-%m-%d')\n self.today = datetime.now().strftime('%Y-%m-%d')\n if self.mysignal and self.step_date == self.today:\n TelegramBot.send(\n '{} - KLGD lớn hơn KLGD trung bình {} ngày gần nhất.'.\n format(self.params.ticker, self.params.avg_volume_period))\n",
"<import token>\n\n\nclass Volume(bt.Strategy):\n params = ('avg_volume_period', 10), ('ticker', 'hpg'), ('ratio', 1.25)\n\n def __init__(self):\n self.mysignal = self.data.volume / bt.ind.Average(self.data.volume,\n period=self.params.avg_volume_period) >= self.params.ratio\n\n def next(self):\n self.step_date = self.data.datetime.date().strftime('%Y-%m-%d')\n self.today = datetime.now().strftime('%Y-%m-%d')\n if self.mysignal and self.step_date == self.today:\n TelegramBot.send(\n '{} - KLGD lớn hơn KLGD trung bình {} ngày gần nhất.'.\n format(self.params.ticker, self.params.avg_volume_period))\n",
"<import token>\n\n\nclass Volume(bt.Strategy):\n <assignment token>\n\n def __init__(self):\n self.mysignal = self.data.volume / bt.ind.Average(self.data.volume,\n period=self.params.avg_volume_period) >= self.params.ratio\n\n def next(self):\n self.step_date = self.data.datetime.date().strftime('%Y-%m-%d')\n self.today = datetime.now().strftime('%Y-%m-%d')\n if self.mysignal and self.step_date == self.today:\n TelegramBot.send(\n '{} - KLGD lớn hơn KLGD trung bình {} ngày gần nhất.'.\n format(self.params.ticker, self.params.avg_volume_period))\n",
"<import token>\n\n\nclass Volume(bt.Strategy):\n <assignment token>\n <function token>\n\n def next(self):\n self.step_date = self.data.datetime.date().strftime('%Y-%m-%d')\n self.today = datetime.now().strftime('%Y-%m-%d')\n if self.mysignal and self.step_date == self.today:\n TelegramBot.send(\n '{} - KLGD lớn hơn KLGD trung bình {} ngày gần nhất.'.\n format(self.params.ticker, self.params.avg_volume_period))\n",
"<import token>\n\n\nclass Volume(bt.Strategy):\n <assignment token>\n <function token>\n <function token>\n",
"<import token>\n<class token>\n"
] | false |
899 |
e37f4422c1063df50453f7abf72a0a9a31156d8b
|
from locations.storefinders.stockinstore import StockInStoreSpider
class ScooterHutAUSpider(StockInStoreSpider):
name = "scooter_hut_au"
item_attributes = {"brand": "Scooter Hut", "brand_wikidata": "Q117747623"}
api_site_id = "10112"
api_widget_id = "119"
api_widget_type = "product"
api_origin = "https://scooterhut.com.au"
|
[
"from locations.storefinders.stockinstore import StockInStoreSpider\n\n\nclass ScooterHutAUSpider(StockInStoreSpider):\n name = \"scooter_hut_au\"\n item_attributes = {\"brand\": \"Scooter Hut\", \"brand_wikidata\": \"Q117747623\"}\n api_site_id = \"10112\"\n api_widget_id = \"119\"\n api_widget_type = \"product\"\n api_origin = \"https://scooterhut.com.au\"\n",
"from locations.storefinders.stockinstore import StockInStoreSpider\n\n\nclass ScooterHutAUSpider(StockInStoreSpider):\n name = 'scooter_hut_au'\n item_attributes = {'brand': 'Scooter Hut', 'brand_wikidata': 'Q117747623'}\n api_site_id = '10112'\n api_widget_id = '119'\n api_widget_type = 'product'\n api_origin = 'https://scooterhut.com.au'\n",
"<import token>\n\n\nclass ScooterHutAUSpider(StockInStoreSpider):\n name = 'scooter_hut_au'\n item_attributes = {'brand': 'Scooter Hut', 'brand_wikidata': 'Q117747623'}\n api_site_id = '10112'\n api_widget_id = '119'\n api_widget_type = 'product'\n api_origin = 'https://scooterhut.com.au'\n",
"<import token>\n\n\nclass ScooterHutAUSpider(StockInStoreSpider):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n",
"<import token>\n<class token>\n"
] | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.